diff --git a/SabreTools.DatTools/DatFileTool.cs b/SabreTools.DatTools/DatFileTool.cs
index e2416f34..c050fe85 100644
--- a/SabreTools.DatTools/DatFileTool.cs
+++ b/SabreTools.DatTools/DatFileTool.cs
@@ -90,6 +90,66 @@ namespace SabreTools.DatTools
#endif
}
+ ///
+ /// Apply SuperDAT naming logic to a merged DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// List of inputs to use for renaming
+ public static void ApplySuperDATDB(DatFile datFile, List inputs)
+ {
+ List keys = [.. datFile.ItemsDB.SortedKeys];
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(keys, key =>
+#else
+ foreach (var key in keys)
+#endif
+ {
+ var items = datFile.ItemsDB.GetItemsForBucket(key);
+ if (items == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ foreach ((long, DatItem) item in items)
+ {
+ if (item.Item2.GetFieldValue(DatItem.SourceKey) == null)
+ continue;
+
+ var machine = datFile.ItemsDB.GetMachineForItem(item.Item1);
+ if (machine.Item2 == null)
+ continue;
+
+ string filename = inputs[item.Item2.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath;
+ string? rootpath = inputs[item.Item2.GetFieldValue(DatItem.SourceKey)!.Index].ParentPath;
+
+ if (!string.IsNullOrEmpty(rootpath)
+#if NETFRAMEWORK
+ && !rootpath!.EndsWith(Path.DirectorySeparatorChar.ToString())
+ && !rootpath!.EndsWith(Path.AltDirectorySeparatorChar.ToString()))
+#else
+ && !rootpath.EndsWith(Path.DirectorySeparatorChar)
+ && !rootpath.EndsWith(Path.AltDirectorySeparatorChar))
+#endif
+ {
+ rootpath += Path.DirectorySeparatorChar.ToString();
+ }
+
+ filename = filename.Remove(0, rootpath?.Length ?? 0);
+ machine.Item2.SetFieldValue(Models.Metadata.Machine.NameKey, Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ + machine.Item2.GetStringFieldValue(Models.Metadata.Machine.NameKey));
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+ }
+
///
/// Replace item values from the base set represented by the current DAT
///
@@ -207,6 +267,105 @@ namespace SabreTools.DatTools
watch.Stop();
}
+ ///
+ /// Replace item values from the base set represented by the current DAT
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to replace the values in
+ /// List of machine field names representing what should be updated
+ /// List of item field names representing what should be updated
+ /// True if descriptions should only be replaced if the game name is the same, false otherwise
+ public static void BaseReplaceDB(
+ DatFile datFile,
+ DatFile intDat,
+ List machineFieldNames,
+ Dictionary> itemFieldNames,
+ bool onlySame)
+ {
+ InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
+
+ // If we are matching based on DatItem fields of any sort
+ if (itemFieldNames.Any())
+ {
+ // For comparison's sake, we want to use CRC as the base bucketing
+ datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full);
+ intDat.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.None);
+
+ // Then we do a hashwise comparison against the base DAT
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(intDat.ItemsDB.SortedKeys, Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(intDat.ItemsDB.SortedKeys, key =>
+#else
+ foreach (var key in intDat.ItemsDB.SortedKeys)
+#endif
+ {
+ var datItems = intDat.ItemsDB.GetItemsForBucket(key);
+ if (datItems == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ foreach ((long, DatItem) datItem in datItems)
+ {
+ var dupes = datFile.ItemsDB.GetDuplicates(datItem.Item2, sorted: true);
+ if (datItem.Item2.Clone() is not DatItem newDatItem)
+ continue;
+
+ // Replace fields from the first duplicate, if we have one
+ if (dupes.Count > 0)
+ Replacer.ReplaceFields(datItem.Item2, dupes.First().Item2, itemFieldNames);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+ }
+
+ // If we are matching based on Machine fields of any sort
+ if (machineFieldNames.Any())
+ {
+ // For comparison's sake, we want to use Machine Name as the base bucketing
+ datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
+ intDat.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.None);
+
+ // Then we do a namewise comparison against the base DAT
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(intDat.ItemsDB.SortedKeys, Globals.ParallelOptions, key =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(intDat.ItemsDB.SortedKeys, key =>
+#else
+ foreach (var key in intDat.ItemsDB.SortedKeys)
+#endif
+ {
+ var datItems = intDat.ItemsDB.GetItemsForBucket(key);
+ if (datItems == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ foreach ((long, DatItem) datItem in datItems)
+ {
+ var datMachine = datFile.ItemsDB.GetMachineForItem(datFile.ItemsDB.GetItemsForBucket(key)![0].Item1);
+ var intMachine = intDat.ItemsDB.GetMachineForItem(datItem.Item1);
+ if (datMachine.Item2 != null && intMachine.Item2 != null)
+ Replacer.ReplaceFields(intMachine.Item2, datMachine.Item2, machineFieldNames, onlySame);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+ }
+
+ watch.Stop();
+ }
+
///
/// Output diffs against a base set represented by the current DAT
///
@@ -344,6 +503,7 @@ namespace SabreTools.DatTools
DatFile diffData = DatFile.Create(datHeaders[j]);
diffData.ResetDictionary();
FillWithSourceIndex(datFile, diffData, j);
+ //FillWithSourceIndexDB(datFile, diffData, j);
outDatsArray[j] = diffData;
#if NET40_OR_GREATER || NETCOREAPP
});
@@ -366,6 +526,7 @@ namespace SabreTools.DatTools
{
List paths = inputs.Select(i => new ParentablePath(i)).ToList();
return DiffDuplicates(datFile, paths);
+ //return DiffDuplicatesDB(datFile, paths);
}
///
@@ -446,6 +607,95 @@ namespace SabreTools.DatTools
return dupeData;
}
+ ///
+ /// Output duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffDuplicatesDB(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
+
+ string post = " (Duplicates)";
+ DatFile dupeData = DatFile.Create(datFile.Header);
+ dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ dupeData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating duplicate DAT");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems().ToDictionary(m => m.Item1, m => m.Item2);
+ var machines = datFile.ItemsDB.GetMachines().ToDictionary(m => m.Item1, m => m.Item2);
+ var mappings = datFile.ItemsDB.GetItemMappings().ToDictionary(m => m.Item1, m => m.Item2);
+
+ // Create a mapping from old machine index to new machine index
+ var machineRemapping = new Dictionary();
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ // TODO: Figure out how we can reintroduce the source to this name
+ var machineValue = machine.Value;
+ //machineValue.SetFieldValue(Models.Metadata.Machine.NameKey, machineValue.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
+ machineValue.SetFieldValue(Models.Metadata.Machine.NameKey, machineValue.GetStringFieldValue(Models.Metadata.Machine.NameKey));
+
+ long newMachineIndex = dupeData.ItemsDB.AddMachine(machineValue);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine index for this item
+ long machineIndex = mappings[item.Key];
+
+#if NETFRAMEWORK
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) != 0)
+#else
+ if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
+#endif
+ {
+ if (item.Value.Clone() is not DatItem newrom)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ dupeData.ItemsDB.AddItem(newrom, machineRemapping[machineIndex], false);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return dupeData;
+ }
+
///
/// Output non-cascading diffs
///
@@ -455,6 +705,7 @@ namespace SabreTools.DatTools
{
List paths = inputs.Select(i => new ParentablePath(i)).ToList();
return DiffIndividuals(datFile, paths);
+ //return DiffIndividualsDB(datFile, paths);
}
///
@@ -550,6 +801,113 @@ namespace SabreTools.DatTools
return [.. outDats];
}
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static List DiffIndividualsDB(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing all individual DATs");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ // Loop through each of the inputs and get or create a new DatData object
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
+#elif NET40_OR_GREATER
+ Parallel.For(0, inputs.Count, j =>
+#else
+ for (int j = 0; j < inputs.Count; j++)
+#endif
+ {
+ string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
+ DatFile diffData = DatFile.Create(datFile.Header);
+ diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
+ diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
+ diffData.ResetDictionary();
+ outDatsArray[j] = diffData;
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ // Create a list of DatData objects representing individual output files
+ List outDats = [.. outDatsArray];
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all individual DATs");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems().ToDictionary(m => m.Item1, m => m.Item2);
+ var machines = datFile.ItemsDB.GetMachines().ToDictionary(m => m.Item1, m => m.Item2);
+ var mappings = datFile.ItemsDB.GetItemMappings().ToDictionary(m => m.Item1, m => m.Item2);
+
+ // Create a mapping from old machine index to new machine index
+ var machineRemapping = new Dictionary();
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = outDats[0].ItemsDB.AddMachine(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+
+ for (int i = 1; i < outDats.Count; i++)
+ {
+ _ = outDats[i].ItemsDB.AddMachine(machine.Value);
+ }
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine index for this item
+ long machineIndex = mappings[item.Key];
+
+ if (item.Value.GetFieldValue(DatItem.SourceKey) == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+#if NETFRAMEWORK
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#else
+ if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#endif
+ outDats[item.Value.GetFieldValue(DatItem.SourceKey)!.Index].ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return [.. outDats];
+ }
+
///
/// Output non-duplicate item diff
///
@@ -559,6 +917,7 @@ namespace SabreTools.DatTools
{
List paths = inputs.Select(i => new ParentablePath(i)).ToList();
return DiffNoDuplicates(datFile, paths);
+ //return DiffNoDuplicatesDB(datFile, paths);
}
///
@@ -637,6 +996,96 @@ namespace SabreTools.DatTools
return outerDiffData;
}
+ ///
+ /// Output non-duplicate item diff
+ ///
+ /// Current DatFile object to use for updating
+ /// List of inputs to write out from
+ public static DatFile DiffNoDuplicatesDB(DatFile datFile, List inputs)
+ {
+ InternalStopwatch watch = new("Initializing no duplicate DAT");
+
+ // Fill in any information not in the base DAT
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
+ datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
+
+ if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
+ datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
+
+ string post = " (No Duplicates)";
+ DatFile outerDiffData = DatFile.Create(datFile.Header);
+ outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
+ outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
+ outerDiffData.ResetDictionary();
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating no duplicate DAT");
+
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems().ToDictionary(m => m.Item1, m => m.Item2);
+ var machines = datFile.ItemsDB.GetMachines().ToDictionary(m => m.Item1, m => m.Item2);
+ var mappings = datFile.ItemsDB.GetItemMappings().ToDictionary(m => m.Item1, m => m.Item2);
+
+ // Create a mapping from old machine index to new machine index
+ var machineRemapping = new Dictionary();
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ // TODO: Figure out how we can reintroduce the source to this name
+ var machineValue = machine.Value;
+ //machineValue.SetFieldValue(Models.Metadata.Machine.NameKey, machineValue.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
+ machineValue.SetFieldValue(Models.Metadata.Machine.NameKey, machineValue.GetStringFieldValue(Models.Metadata.Machine.NameKey));
+
+ long newMachineIndex = outerDiffData.ItemsDB.AddMachine(machineValue);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine index for this item
+ long machineIndex = mappings[item.Key];
+
+#if NETFRAMEWORK
+ if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#else
+ if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
+#endif
+ {
+ if (item.Value.Clone() is not DatItem newrom || newrom.GetFieldValue(DatItem.SourceKey) == null)
+#if NET40_OR_GREATER || NETCOREAPP
+ return;
+#else
+ continue;
+#endif
+
+ newrom.GetFieldValue(DatItem.MachineKey)!.SetFieldValue(Models.Metadata.Machine.NameKey, newrom.GetFieldValue(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
+ outerDiffData.ItemsDB.AddItem(newrom, machineRemapping[machineIndex], false);
+ }
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ watch.Stop();
+
+ return outerDiffData;
+ }
+
///
/// Populate from multiple paths while returning the invividual headers
///
@@ -685,6 +1134,7 @@ namespace SabreTools.DatTools
for (int i = 0; i < inputs.Count; i++)
{
AddFromExisting(datFile, datFiles[i], true);
+ //AddFromExistingDB(datFile, datFiles[i], true);
}
watch.Stop();
@@ -717,6 +1167,57 @@ namespace SabreTools.DatTools
addFrom.ResetDictionary();
}
+ ///
+ /// Add items from another DatFile to the existing DatFile
+ ///
+ /// DatFile to add to
+ /// DatFile to add from
+ /// If items should be deleted from the source DatFile
+ private static void AddFromExistingDB(DatFile addTo, DatFile addFrom, bool delete = false)
+ {
+ // Get all current items, machines, and mappings
+ var datItems = addFrom.ItemsDB.GetItems().ToDictionary(m => m.Item1, m => m.Item2);
+ var machines = addFrom.ItemsDB.GetMachines().ToDictionary(m => m.Item1, m => m.Item2);
+ var mappings = addFrom.ItemsDB.GetItemMappings().ToDictionary(m => m.Item1, m => m.Item2);
+
+ // Create a mapping from old machine index to new machine index
+ var machineRemapping = new Dictionary();
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = addTo.ItemsDB.AddMachine(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine index for this item
+ long machineIndex = mappings[item.Key];
+ addTo.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], false);
+
+ // Now remove the key from the source DAT
+ if (delete)
+ addFrom.ItemsDB.RemoveItem(item.Key);
+
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
+#endif
+
+ // Now remove the file dictionary from the source DAT
+ if (delete)
+ addFrom.ResetDictionary();
+ }
+
///
/// Fill a DatFile with all items with a particular source index ID
///
@@ -754,6 +1255,51 @@ namespace SabreTools.DatTools
});
#else
}
+#endif
+ }
+
+ ///
+ /// Fill a DatFile with all items with a particular source index ID
+ ///
+ /// Current DatFile object to use for updating
+ /// DatFile to add found items to
+ /// Source index ID to retrieve items for
+ /// DatFile containing all items with the source index ID/returns>
+ private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index)
+ {
+ // Get all current items, machines, and mappings
+ var datItems = datFile.ItemsDB.GetItems().ToDictionary(m => m.Item1, m => m.Item2);
+ var machines = datFile.ItemsDB.GetMachines().ToDictionary(m => m.Item1, m => m.Item2);
+ var mappings = datFile.ItemsDB.GetItemMappings().ToDictionary(m => m.Item1, m => m.Item2);
+
+ // Create a mapping from old machine index to new machine index
+ var machineRemapping = new Dictionary();
+
+ // Loop through and add all machines
+ foreach (var machine in machines)
+ {
+ long newMachineIndex = indexDat.ItemsDB.AddMachine(machine.Value);
+ machineRemapping[machine.Key] = newMachineIndex;
+ }
+
+ // Loop through and add the items
+#if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(datItems, Globals.ParallelOptions, item =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(datItems, item =>
+#else
+ foreach (var item in datItems)
+#endif
+ {
+ // Get the machine index for this item
+ long machineIndex = mappings[item.Key];
+
+ if (item.Value.GetFieldValue(DatItem.SourceKey) != null && item.Value.GetFieldValue(DatItem.SourceKey)!.Index == index)
+ indexDat.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], false);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
+ }
#endif
}
}
diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs
index 2168fe16..71f10b6d 100644
--- a/SabreTools/Features/Update.cs
+++ b/SabreTools/Features/Update.cs
@@ -227,6 +227,7 @@ namespace SabreTools.Features
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
{
DatFile dupeData = DatFileTool.DiffDuplicates(userInputDat, inputPaths);
+ //DatFile dupeData = DatFileTool.DiffDuplicatesDB(userInputDat, inputPaths);
InternalStopwatch watch = new("Outputting duplicate DAT");
Writer.Write(dupeData, OutputDir, overwrite: false);
@@ -237,6 +238,7 @@ namespace SabreTools.Features
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
{
DatFile outerDiffData = DatFileTool.DiffNoDuplicates(userInputDat, inputPaths);
+ //DatFile outerDiffData = DatFileTool.DiffNoDuplicatesDB(userInputDat, inputPaths);
InternalStopwatch watch = new("Outputting no duplicate DAT");
Writer.Write(outerDiffData, OutputDir, overwrite: false);
@@ -248,6 +250,7 @@ namespace SabreTools.Features
{
// Get all of the output DatFiles
List datFiles = DatFileTool.DiffIndividuals(userInputDat, inputPaths);
+ //List datFiles = DatFileTool.DiffIndividualsDB(userInputDat, inputPaths);
// Loop through and output the new DatFiles
InternalStopwatch watch = new("Outputting all individual DATs");
@@ -392,6 +395,7 @@ namespace SabreTools.Features
// Now replace the fields from the base DatFile
DatFileTool.BaseReplace(
+ //DatFileTool.BaseReplaceDB(
userInputDat,
repDat,
updateMachineFieldNames,
@@ -415,6 +419,7 @@ namespace SabreTools.Features
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (string.Equals(userInputDat.Header.GetStringFieldValue(Models.Metadata.Header.TypeKey), "SuperDAT", StringComparison.OrdinalIgnoreCase))
DatFileTool.ApplySuperDAT(userInputDat, inputPaths);
+ //DatFileTool.ApplySuperDATDB(userInputDat, inputPaths);
Writer.Write(userInputDat, OutputDir);
}