using System; using System.Collections.Generic; using System.IO; using System.Linq; #if NET40_OR_GREATER || NETCOREAPP using System.Threading.Tasks; #endif using SabreTools.DatFiles; using SabreTools.DatItems; using SabreTools.IO; using SabreTools.IO.Logging; namespace SabreTools.DatTools { /// /// Helper methods for updating and converting DatFiles /// public static class DatFileTool { #region Logging /// /// Logging object /// private static readonly Logger logger = new(); #endregion /// /// Apply SuperDAT naming logic to a merged DatFile /// /// Current DatFile object to run operations on /// List of inputs to use for renaming public static void ApplySuperDAT(DatFile datFile, List inputs) { List keys = [.. datFile.Items.Keys]; #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(keys, key => #else foreach (var key in keys) #endif { List? items = datFile.Items[key]; if (items == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif List newItems = []; foreach (DatItem item in items) { DatItem newItem = item; var source = newItem.GetFieldValue(DatItem.SourceKey); if (source == null) continue; string filename = inputs[source.Index].CurrentPath; string rootpath = inputs[source.Index].ParentPath ?? string.Empty; if (rootpath.Length > 0 #if NETFRAMEWORK && !rootpath.EndsWith(Path.DirectorySeparatorChar.ToString()) && !rootpath.EndsWith(Path.AltDirectorySeparatorChar.ToString())) #else && !rootpath.EndsWith(Path.DirectorySeparatorChar) && !rootpath.EndsWith(Path.AltDirectorySeparatorChar)) #endif { rootpath += Path.DirectorySeparatorChar.ToString(); } filename = filename.Remove(0, rootpath.Length); var machine = newItem.GetFieldValue(DatItem.MachineKey); if (machine == null) continue; machine.SetFieldValue(Models.Metadata.Machine.NameKey, Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar + machine.GetStringFieldValue(Models.Metadata.Machine.NameKey)); newItems.Add(newItem); } datFile.Items.Remove(key); datFile.Items.AddRange(key, newItems); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } /// /// Apply SuperDAT naming logic to a merged DatFile /// /// Current DatFile object to run operations on /// List of inputs to use for renaming public static void ApplySuperDATDB(DatFile datFile, List inputs) { List keys = [.. datFile.ItemsDB.SortedKeys]; #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(keys, key => #else foreach (var key in keys) #endif { var items = datFile.ItemsDB.GetItemsForBucket(key); if (items == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif foreach (var item in items) { var source = datFile.ItemsDB.GetSourceForItem(item.Key); if (source.Value == null) continue; var machine = datFile.ItemsDB.GetMachineForItem(item.Key); if (machine.Value == null) continue; string filename = inputs[source.Value.Index].CurrentPath; string rootpath = inputs[source.Value.Index].ParentPath ?? string.Empty; if (rootpath.Length > 0 #if NETFRAMEWORK && !rootpath!.EndsWith(Path.DirectorySeparatorChar.ToString()) && !rootpath!.EndsWith(Path.AltDirectorySeparatorChar.ToString())) #else && !rootpath.EndsWith(Path.DirectorySeparatorChar) && !rootpath.EndsWith(Path.AltDirectorySeparatorChar)) #endif { rootpath += Path.DirectorySeparatorChar.ToString(); } filename = filename.Remove(0, rootpath.Length); machine.Value.SetFieldValue(Models.Metadata.Machine.NameKey, Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar + machine.Value.GetStringFieldValue(Models.Metadata.Machine.NameKey)); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } /// /// Replace item values from the base set represented by the current DAT /// /// Current DatFile object to use for updating /// DatFile to replace the values in /// List of machine field names representing what should be updated /// List of item field names representing what should be updated /// True if descriptions should only be replaced if the game name is the same, false otherwise public static void BaseReplace( DatFile datFile, DatFile intDat, List machineFieldNames, Dictionary> itemFieldNames, bool onlySame) { InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT"); // If we are matching based on DatItem fields of any sort if (itemFieldNames.Count > 0) { // For comparison's sake, we want to use CRC as the base bucketing datFile.Items.BucketBy(ItemKey.CRC, DedupeType.Full); intDat.Items.BucketBy(ItemKey.CRC, DedupeType.None); // Then we do a hashwise comparison against the base DAT #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(intDat.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(intDat.Items.Keys, key => #else foreach (var key in intDat.Items.Keys) #endif { List? datItems = intDat.Items[key]; if (datItems == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif List newDatItems = []; foreach (DatItem datItem in datItems) { List dupes = datFile.Items.GetDuplicates(datItem, sorted: true); if (datItem.Clone() is not DatItem newDatItem) continue; // Replace fields from the first duplicate, if we have one if (dupes.Count > 0) Replacer.ReplaceFields(newDatItem, dupes[0], itemFieldNames); newDatItems.Add(newDatItem); } // Now add the new list to the key intDat.Items.Remove(key); intDat.Items.AddRange(key, newDatItems); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } // If we are matching based on Machine fields of any sort if (machineFieldNames.Count > 0) { // For comparison's sake, we want to use Machine Name as the base bucketing datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full); intDat.Items.BucketBy(ItemKey.Machine, DedupeType.None); // Then we do a namewise comparison against the base DAT #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(intDat.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(intDat.Items.Keys, key => #else foreach (var key in intDat.Items.Keys) #endif { List? datItems = intDat.Items[key]; if (datItems == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif List newDatItems = []; foreach (DatItem datItem in datItems) { if (datItem.Clone() is not DatItem newDatItem) continue; if (!datFile.Items.TryGetValue(key, out var list) || list == null) continue; if (datFile.Items.ContainsKey(key) && list.Count > 0) Replacer.ReplaceFields(newDatItem.GetFieldValue(DatItem.MachineKey)!, list[index: 0].GetFieldValue(DatItem.MachineKey)!, machineFieldNames, onlySame); newDatItems.Add(newDatItem); } // Now add the new list to the key intDat.Items.Remove(key); intDat.Items.AddRange(key, newDatItems); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } watch.Stop(); } /// /// Replace item values from the base set represented by the current DAT /// /// Current DatFile object to use for updating /// DatFile to replace the values in /// List of machine field names representing what should be updated /// List of item field names representing what should be updated /// True if descriptions should only be replaced if the game name is the same, false otherwise public static void BaseReplaceDB( DatFile datFile, DatFile intDat, List machineFieldNames, Dictionary> itemFieldNames, bool onlySame) { InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT"); // If we are matching based on DatItem fields of any sort if (itemFieldNames.Count > 0) { // For comparison's sake, we want to use CRC as the base bucketing datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full); intDat.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.None); // Then we do a hashwise comparison against the base DAT #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(intDat.ItemsDB.SortedKeys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(intDat.ItemsDB.SortedKeys, key => #else foreach (var key in intDat.ItemsDB.SortedKeys) #endif { var datItems = intDat.ItemsDB.GetItemsForBucket(key); if (datItems == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif foreach (var datItem in datItems) { var dupes = datFile.ItemsDB.GetDuplicates(datItem, sorted: true); if (datItem.Value.Clone() is not DatItem newDatItem) continue; // Replace fields from the first duplicate, if we have one if (dupes.Count > 0) Replacer.ReplaceFields(datItem.Value, dupes.First().Value, itemFieldNames); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } // If we are matching based on Machine fields of any sort if (machineFieldNames.Count > 0) { // For comparison's sake, we want to use Machine Name as the base bucketing datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full); intDat.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.None); // Then we do a namewise comparison against the base DAT #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(intDat.ItemsDB.SortedKeys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(intDat.ItemsDB.SortedKeys, key => #else foreach (var key in intDat.ItemsDB.SortedKeys) #endif { var datItems = intDat.ItemsDB.GetItemsForBucket(key); if (datItems == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif foreach (var datItem in datItems) { var datMachine = datFile.ItemsDB.GetMachineForItem(datFile.ItemsDB.GetItemsForBucket(key)!.First().Key); var intMachine = intDat.ItemsDB.GetMachineForItem(datItem.Key); if (datMachine.Value != null && intMachine.Value != null) Replacer.ReplaceFields(intMachine.Value, datMachine.Value, machineFieldNames, onlySame); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } watch.Stop(); } /// /// Output diffs against a base set represented by the current DAT /// /// Current DatFile object to use for updating /// DatFile to replace the values in /// True to diff using games, false to use hashes public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames) { // For comparison's sake, we want to use a base ordering if (useGames) datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None); else datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None); InternalStopwatch watch = new($"Comparing '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' to base DAT"); // For comparison's sake, we want to a the base bucketing if (useGames) intDat.Items.BucketBy(ItemKey.Machine, DedupeType.None); else intDat.Items.BucketBy(ItemKey.CRC, DedupeType.Full); // Then we compare against the base DAT List keys = [.. intDat.Items.Keys]; #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(keys, key => #else foreach (var key in keys) #endif { // Game Against uses game names if (useGames) { // If the key is null, keep it if (!intDat.Items.TryGetValue(key, out var intList) || intList == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // If the base DAT doesn't contain the key, keep it if (!datFile.Items.TryGetValue(key, out var list) || list == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // If the number of items is different, then keep it if (list.Count != intList.Count) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Otherwise, compare by name and hash the remaining files bool exactMatch = true; foreach (DatItem item in intList) { // TODO: Make this granular to name as well if (!list.Contains(item)) { exactMatch = false; break; } } // If we have an exact match, remove the game if (exactMatch) intDat.Items.Remove(key); } // Standard Against uses hashes else { List? datItems = intDat.Items[key]; if (datItems == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif List keepDatItems = []; foreach (DatItem datItem in datItems) { if (!datFile.Items.HasDuplicates(datItem, true)) keepDatItems.Add(datItem); } // Now add the new list to the key intDat.Items.Remove(key); intDat.Items.AddRange(key, keepDatItems); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); } /// /// Output cascading diffs /// /// Current DatFile object to use for updating /// Dat headers used optionally /// List of DatFiles representing the individually indexed items public static List DiffCascade(DatFile datFile, List datHeaders) { // Create a list of DatData objects representing output files List outDats = []; // Ensure the current DatFile is sorted optimally datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None); // Loop through each of the inputs and get or create a new DatData object InternalStopwatch watch = new("Initializing and filling all output DATs"); // Create the DatFiles from the set of headers DatFile[] outDatsArray = new DatFile[datHeaders.Count]; #if NET452_OR_GREATER || NETCOREAPP Parallel.For(0, datHeaders.Count, Core.Globals.ParallelOptions, j => #elif NET40_OR_GREATER Parallel.For(0, datHeaders.Count, j => #else for (int j = 0; j < datHeaders.Count; j++) #endif { DatFile diffData = DatFile.Create(datHeaders[j]); diffData.ResetDictionary(); FillWithSourceIndex(datFile, diffData, j); //FillWithSourceIndexDB(datFile, diffData, j); outDatsArray[j] = diffData; #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif outDats = [.. outDatsArray]; watch.Stop(); return outDats; } /// /// Output duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffDuplicates(DatFile datFile, List inputs) { List paths = inputs.ConvertAll(i => new ParentablePath(i)); return DiffDuplicates(datFile, paths); //return DiffDuplicatesDB(datFile, paths); } /// /// Output duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffDuplicates(DatFile datFile, List inputs) { InternalStopwatch watch = new("Initializing duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs"); string post = " (Duplicates)"; DatFile dupeData = DatFile.Create(datFile.Header); dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post); dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post); dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post); dupeData.ResetDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating duplicate DAT"); #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(datFile.Items.Keys, key => #else foreach (var key in datFile.Items.Keys) #endif { List items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Loop through and add the items correctly foreach (DatItem item in items) { #if NETFRAMEWORK if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) != 0) #else if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External)) #endif { if (item.Clone() is not DatItem newrom) continue; if (item.GetFieldValue(DatItem.SourceKey) != null) newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[item.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})"); dupeData.Items.Add(key, newrom); } } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return dupeData; } /// /// Output duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffDuplicatesDB(DatFile datFile, List inputs) { var watch = new InternalStopwatch("Initializing duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs"); string post = " (Duplicates)"; DatFile dupeData = DatFile.Create(datFile.Header); dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post); dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post); dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post); dupeData.ResetDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating duplicate DAT"); // Get all current items, machines, and mappings var datItems = datFile.ItemsDB.GetItems(); var machines = datFile.ItemsDB.GetMachines(); var sources = datFile.ItemsDB.GetSources(); var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings(); var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings(); // Create mappings from old index to new index var machineRemapping = new Dictionary(); var sourceRemapping = new Dictionary(); // Loop through and add all sources foreach (var source in sources) { long newSourceIndex = dupeData.ItemsDB.AddSource(source.Value); sourceRemapping[source.Key] = newSourceIndex; } // Loop through and add all machines foreach (var machine in machines) { long newMachineIndex = dupeData.ItemsDB.AddMachine(machine.Value); machineRemapping[machine.Key] = newMachineIndex; } // Loop through and add the items #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item => #elif NET40_OR_GREATER Parallel.ForEach(datItems, item => #else foreach (var item in datItems) #endif { // Get the machine and source index for this item long machineIndex = itemMachineMappings[item.Key]; long sourceIndex = itemSourceMappings[item.Key]; // If the current item isn't an external duplicate #if NETFRAMEWORK if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) == 0) #else if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External)) #endif #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Get the current source and machine var currentSource = sources[sourceIndex]; string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey); var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName); if (currentMachine.Value == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Get the source-specific machine string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})"; var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName); if (renamedMachine.Value == null) { var newMachine = currentMachine.Value.Clone() as Machine; newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName); long newMachineIndex = dupeData.ItemsDB.AddMachine(newMachine!); renamedMachine = new KeyValuePair(newMachineIndex, newMachine); } dupeData.ItemsDB.AddItem(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return dupeData; } /// /// Output non-cascading diffs /// /// Current DatFile object to use for updating /// List of inputs to write out from public static List DiffIndividuals(DatFile datFile, List inputs) { List paths = inputs.ConvertAll(i => new ParentablePath(i)); return DiffIndividuals(datFile, paths); //return DiffIndividualsDB(datFile, paths); } /// /// Output non-cascading diffs /// /// Current DatFile object to use for updating /// List of inputs to write out from public static List DiffIndividuals(DatFile datFile, List inputs) { InternalStopwatch watch = new("Initializing all individual DATs"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs"); // Loop through each of the inputs and get or create a new DatData object DatFile[] outDatsArray = new DatFile[inputs.Count]; #if NET452_OR_GREATER || NETCOREAPP Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j => #elif NET40_OR_GREATER Parallel.For(0, inputs.Count, j => #else for (int j = 0; j < inputs.Count; j++) #endif { string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)"; DatFile diffData = DatFile.Create(datFile.Header); diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost); diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost); diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost); diffData.ResetDictionary(); outDatsArray[j] = diffData; #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif // Create a list of DatData objects representing individual output files List outDats = [.. outDatsArray]; watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating all individual DATs"); #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(datFile.Items.Keys, key => #else foreach (var key in datFile.Items.Keys) #endif { List items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Loop through and add the items correctly foreach (DatItem item in items) { if (item.GetFieldValue(DatItem.SourceKey) == null) continue; #if NETFRAMEWORK if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #else if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #endif outDats[item.GetFieldValue(DatItem.SourceKey)!.Index].Items.Add(key, item); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return [.. outDats]; } /// /// Output non-cascading diffs /// /// Current DatFile object to use for updating /// List of inputs to write out from public static List DiffIndividualsDB(DatFile datFile, List inputs) { InternalStopwatch watch = new("Initializing all individual DATs"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs"); // Loop through each of the inputs and get or create a new DatData object DatFile[] outDatsArray = new DatFile[inputs.Count]; #if NET452_OR_GREATER || NETCOREAPP Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j => #elif NET40_OR_GREATER Parallel.For(0, inputs.Count, j => #else for (int j = 0; j < inputs.Count; j++) #endif { string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)"; DatFile diffData = DatFile.Create(datFile.Header); diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost); diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost); diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost); diffData.ResetDictionary(); outDatsArray[j] = diffData; #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif // Create a list of DatData objects representing individual output files List outDats = [.. outDatsArray]; watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating all individual DATs"); // Get all current items, machines, and mappings var datItems = datFile.ItemsDB.GetItems(); var machines = datFile.ItemsDB.GetMachines(); var sources = datFile.ItemsDB.GetSources(); var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings(); var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings(); // Create mappings from old index to new index var machineRemapping = new Dictionary(); var sourceRemapping = new Dictionary(); // Loop through and add all sources foreach (var source in sources) { long newSourceIndex = outDats[0].ItemsDB.AddSource(source.Value); sourceRemapping[source.Key] = newSourceIndex; for (int i = 1; i < outDats.Count; i++) { _ = outDats[i].ItemsDB.AddSource(source.Value); } } // Loop through and add all machines foreach (var machine in machines) { long newMachineIndex = outDats[0].ItemsDB.AddMachine(machine.Value); machineRemapping[machine.Key] = newMachineIndex; for (int i = 1; i < outDats.Count; i++) { _ = outDats[i].ItemsDB.AddMachine(machine.Value); } } // Loop through and add the items #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item => #elif NET40_OR_GREATER Parallel.ForEach(datItems, item => #else foreach (var item in datItems) #endif { // Get the machine and source index for this item long machineIndex = itemMachineMappings[item.Key]; long sourceIndex = itemSourceMappings[item.Key]; // Get the source associated with the item var source = datFile.ItemsDB.GetSource(sourceIndex); if (source == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif #if NETFRAMEWORK if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #else if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #endif outDats[source.Index].ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return [.. outDats]; } /// /// Output non-duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffNoDuplicates(DatFile datFile, List inputs) { List paths = inputs.ConvertAll(i => new ParentablePath(i)); return DiffNoDuplicates(datFile, paths); //return DiffNoDuplicatesDB(datFile, paths); } /// /// Output non-duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffNoDuplicates(DatFile datFile, List inputs) { InternalStopwatch watch = new("Initializing no duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs"); string post = " (No Duplicates)"; DatFile outerDiffData = DatFile.Create(datFile.Header); outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post); outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post); outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post); outerDiffData.ResetDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating no duplicate DAT"); #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(datFile.Items.Keys, key => #else foreach (var key in datFile.Items.Keys) #endif { List items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Loop through and add the items correctly foreach (DatItem item in items) { #if NETFRAMEWORK if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #else if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00) #endif { if (item.Clone() is not DatItem newrom || newrom.GetFieldValue(DatItem.SourceKey) == null) continue; newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})"); outerDiffData.Items.Add(key, newrom); } } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return outerDiffData; } /// /// Output non-duplicate item diff /// /// Current DatFile object to use for updating /// List of inputs to write out from public static DatFile DiffNoDuplicatesDB(DatFile datFile, List inputs) { var watch = new InternalStopwatch("Initializing no duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey))) datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs"); if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey))) datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs"); string post = " (No Duplicates)"; DatFile outerDiffData = DatFile.Create(datFile.Header); outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post); outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post); outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post); outerDiffData.ResetDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs watch.Start("Populating no duplicate DAT"); // Get all current items, machines, and mappings var datItems = datFile.ItemsDB.GetItems(); var machines = datFile.ItemsDB.GetMachines(); var sources = datFile.ItemsDB.GetSources(); var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings(); var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings(); // Create mappings from old index to new index var machineRemapping = new Dictionary(); var sourceRemapping = new Dictionary(); // Loop through and add all sources foreach (var source in sources) { long newSourceIndex = outerDiffData.ItemsDB.AddSource(source.Value); sourceRemapping[source.Key] = newSourceIndex; } // Loop through and add all machines foreach (var machine in machines) { long newMachineIndex = outerDiffData.ItemsDB.AddMachine(machine.Value); machineRemapping[machine.Key] = newMachineIndex; } // Loop through and add the items #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item => #elif NET40_OR_GREATER Parallel.ForEach(datItems, item => #else foreach (var item in datItems) #endif { // Get the machine and source index for this item long machineIndex = itemMachineMappings[item.Key]; long sourceIndex = itemSourceMappings[item.Key]; // If the current item isn't a duplicate #if NETFRAMEWORK if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) == 0 && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00) #else if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00) #endif #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Get the current source and machine var currentSource = sources[sourceIndex]; string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey); var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName); if (currentMachine.Value == null) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif // Get the source-specific machine string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})"; var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName); if (renamedMachine.Value == null) { var newMachine = currentMachine.Value.Clone() as Machine; newMachine!.SetFieldValue(Models.Metadata.Machine.NameKey, renamedMachineName); long newMachineIndex = outerDiffData.ItemsDB.AddMachine(newMachine); renamedMachine = new KeyValuePair(newMachineIndex, newMachine); } outerDiffData.ItemsDB.AddItem(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); return outerDiffData; } /// /// Populate from multiple paths while returning the invividual headers /// /// Current DatFile object to use for updating /// Paths to DATs to parse /// List of DatHeader objects representing headers public static List PopulateUserData(DatFile datFile, List inputs) { List paths = inputs.ConvertAll(i => new ParentablePath(i)); return PopulateUserData(datFile, paths); } /// /// Populate from multiple paths while returning the invividual headers /// /// Current DatFile object to use for updating /// Paths to DATs to parse /// List of DatHeader objects representing headers public static List PopulateUserData(DatFile datFile, List inputs) { DatFile[] datFiles = new DatFile[inputs.Count]; InternalStopwatch watch = new("Processing individual DATs"); // Parse all of the DATs into their own DatFiles in the array #if NET452_OR_GREATER || NETCOREAPP Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, i => #elif NET40_OR_GREATER Parallel.For(0, inputs.Count, i => #else for (int i = 0; i < inputs.Count; i++) #endif { var input = inputs[i]; logger.User($"Adding DAT: {input.CurrentPath}"); datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering()); Parser.ParseInto(datFiles[i], input, i, keep: true); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif watch.Stop(); watch.Start("Populating internal DAT"); for (int i = 0; i < inputs.Count; i++) { AddFromExisting(datFile, datFiles[i], true); //AddFromExistingDB(datFile, datFiles[i], true); } watch.Stop(); return [.. Array.ConvertAll(datFiles, d => d.Header)]; } /// /// Add items from another DatFile to the existing DatFile /// /// DatFile to add to /// DatFile to add from /// If items should be deleted from the source DatFile private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false) { // Get the list of keys from the DAT List keys = [.. addFrom.Items.Keys]; foreach (string key in keys) { // Add everything from the key to the internal DAT addTo.Items.AddRange(key, addFrom.Items[key]); // Now remove the key from the source DAT if (delete) addFrom.Items.Remove(key); } // Now remove the file dictionary from the source DAT if (delete) addFrom.ResetDictionary(); } /// /// Add items from another DatFile to the existing DatFile /// /// DatFile to add to /// DatFile to add from /// If items should be deleted from the source DatFile private static void AddFromExistingDB(DatFile addTo, DatFile addFrom, bool delete = false) { // Get all current items, machines, and mappings var datItems = addFrom.ItemsDB.GetItems(); var machines = addFrom.ItemsDB.GetMachines(); var sources = addFrom.ItemsDB.GetSources(); var itemMachineMappings = addFrom.ItemsDB.GetItemMachineMappings(); var itemSourceMappings = addFrom.ItemsDB.GetItemSourceMappings(); // Create mappings from old index to new index var machineRemapping = new Dictionary(); var sourceRemapping = new Dictionary(); // Loop through and add all sources foreach (var source in sources) { long newSourceIndex = addTo.ItemsDB.AddSource(source.Value); sourceRemapping[source.Key] = newSourceIndex; } // Loop through and add all machines foreach (var machine in machines) { long newMachineIndex = addTo.ItemsDB.AddMachine(machine.Value); machineRemapping[machine.Key] = newMachineIndex; } // Loop through and add the items #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item => #elif NET40_OR_GREATER Parallel.ForEach(datItems, item => #else foreach (var item in datItems) #endif { // Get the machine and source index for this item long machineIndex = itemMachineMappings[item.Key]; long sourceIndex = itemSourceMappings[item.Key]; addTo.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false); // Now remove the key from the source DAT if (delete) addFrom.ItemsDB.RemoveItem(item.Key); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif // Now remove the file dictionary from the source DAT if (delete) addFrom.ResetDictionary(); } /// /// Fill a DatFile with all items with a particular source index ID /// /// Current DatFile object to use for updating /// DatFile to add found items to /// Source index ID to retrieve items for /// DatFile containing all items with the source index ID/returns> private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index) { // Loop through and add the items for this index to the output #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key => #elif NET40_OR_GREATER Parallel.ForEach(datFile.Items.Keys, key => #else foreach (var key in datFile.Items.Keys) #endif { List items = DatItem.Merge(datFile.Items[key]); // If the rom list is empty or null, just skip it if (items == null || items.Count == 0) #if NET40_OR_GREATER || NETCOREAPP return; #else continue; #endif foreach (DatItem item in items) { var source = item.GetFieldValue(DatItem.SourceKey); if (source != null && source.Index == index) indexDat.Items.Add(key, item); } #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } /// /// Fill a DatFile with all items with a particular source index ID /// /// Current DatFile object to use for updating /// DatFile to add found items to /// Source index ID to retrieve items for /// DatFile containing all items with the source index ID/returns> private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index) { // Get all current items, machines, and mappings var datItems = datFile.ItemsDB.GetItems(); var machines = datFile.ItemsDB.GetMachines(); var sources = datFile.ItemsDB.GetSources(); var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings(); var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings(); // Create mappings from old index to new index var machineRemapping = new Dictionary(); var sourceRemapping = new Dictionary(); // Loop through and add all sources foreach (var source in sources) { long newSourceIndex = indexDat.ItemsDB.AddSource(source.Value); sourceRemapping[source.Key] = newSourceIndex; } // Loop through and add all machines foreach (var machine in machines) { long newMachineIndex = indexDat.ItemsDB.AddMachine(machine.Value); machineRemapping[machine.Key] = newMachineIndex; } // Loop through and add the items #if NET452_OR_GREATER || NETCOREAPP Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item => #elif NET40_OR_GREATER Parallel.ForEach(datItems, item => #else foreach (var item in datItems) #endif { // Get the machine and source index for this item long machineIndex = itemMachineMappings[item.Key]; long sourceIndex = itemSourceMappings[item.Key]; // Get the source associated with the item var source = datFile.ItemsDB.GetSource(sourceIndex); if (source != null && source.Index == index) indexDat.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false); #if NET40_OR_GREATER || NETCOREAPP }); #else } #endif } } }