using System.Collections.Generic;
using System.IO;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
using SabreTools.DatFiles;
using SabreTools.DatItems;
using SabreTools.IO;
using SabreTools.IO.Logging;
namespace SabreTools.DatTools
{
///
/// This file represents all methods for diffing DatFiles
///
public class Diffing
{
#region Against
///
/// Output diffs against a base set represented by the current DAT
///
/// Current DatFile object to use for updating
/// DatFile to replace the values in
/// True to diff using games, false to use hashes
public static void Against(DatFile datFile, DatFile intDat, bool useGames)
{
InternalStopwatch watch = new($"Comparing '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' to base DAT");
// For comparison's sake, we want to a the base bucketing
if (useGames)
{
intDat.BucketBy(ItemKey.Machine);
}
else
{
intDat.BucketBy(ItemKey.CRC);
intDat.Deduplicate();
}
AgainstImpl(datFile, intDat, useGames);
AgainstDBImpl(datFile, intDat, useGames);
watch.Stop();
}
///
/// Output diffs against a base set represented by the current DAT
///
/// Current DatFile object to use for updating
/// DatFile to replace the values in
/// True to diff using games, false to use hashes
private static void AgainstImpl(DatFile datFile, DatFile intDat, bool useGames)
{
// Then we compare against the base DAT
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(intDat.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(intDat.Items.SortedKeys, key =>
#else
foreach (var key in intDat.Items.SortedKeys)
#endif
{
// Game Against uses game names
if (useGames)
{
// If the key is null, keep it
var intList = intDat.GetItemsForBucket(key);
if (intList.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// If the base DAT doesn't contain the key, keep it
var list = datFile.GetItemsForBucket(key);
if (list.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// If the number of items is different, then keep it
if (list.Count != intList.Count)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Otherwise, compare by name and hash the remaining files
bool exactMatch = true;
foreach (DatItem item in intList)
{
// TODO: Make this granular to name as well
if (!list.Contains(item))
{
exactMatch = false;
break;
}
}
// If we have an exact match, remove the game
if (exactMatch)
intDat.RemoveBucket(key);
}
// Standard Against uses hashes
else
{
List datItems = intDat.GetItemsForBucket(key);
if (datItems.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
List keepDatItems = [];
foreach (DatItem datItem in datItems)
{
if (!datFile.HasDuplicates(datItem, true))
keepDatItems.Add(datItem);
}
// Now add the new list to the key
intDat.RemoveBucket(key);
keepDatItems.ForEach(item => intDat.AddItem(item, statsOnly: false));
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Output diffs against a base set represented by the current DAT
///
/// Current DatFile object to use for updating
/// DatFile to replace the values in
/// True to diff using games, false to use hashes
private static void AgainstDBImpl(DatFile datFile, DatFile intDat, bool useGames)
{
// Then we compare against the base DAT
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(intDat.ItemsDB.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(intDat.ItemsDB.SortedKeys, key =>
#else
foreach (var key in intDat.ItemsDB.SortedKeys)
#endif
{
// Game Against uses game names
if (useGames)
{
// If the key is null, keep it
var intList = intDat.GetItemsForBucketDB(key);
if (intList.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// If the base DAT doesn't contain the key, keep it
List list = [.. datFile.GetItemsForBucketDB(key).Values];
if (list.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// If the number of items is different, then keep it
if (list.Count != intList.Count)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
//
// Otherwise, compare by name and hash the remaining files
bool exactMatch = true;
foreach (KeyValuePair item in intList)
{
// TODO: Make this granular to name as well
if (!list.Contains(item.Value))
{
exactMatch = false;
break;
}
}
// If we have an exact match, remove the game
if (exactMatch)
intDat.RemoveBucket(key);
}
// Standard Against uses hashes
else
{
Dictionary datItems = intDat.GetItemsForBucketDB(key);
if (datItems == null)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
List> keepDatItems = [];
foreach (KeyValuePair datItem in datItems)
{
if (!datFile.HasDuplicates(datItem, true))
keepDatItems.Add(datItem);
}
// Get all existing mappings
List currentMappings = keepDatItems.ConvertAll(item =>
{
return new ItemMappings(
item.Value,
intDat.GetMachineForItemDB(item.Key).Key,
intDat.GetSourceForItemDB(item.Key).Key
);
});
// Now add the new list to the key
intDat.RemoveBucketDB(key);
currentMappings.ForEach(map =>
intDat.AddItemDB(map.Item, map.MachineId, map.SourceId, statsOnly: false));
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
#endregion
#region Cascade
///
/// Output cascading diffs
///
/// Current DatFile object to use for updating
/// Dat headers used optionally
/// List of DatFiles representing the individually indexed items
public static List Cascade(DatFile datFile, List datHeaders)
{
// Create a list of DatData objects representing output files
List outDats = [];
// Ensure the current DatFile is sorted optimally
datFile.BucketBy(ItemKey.CRC, norename: false);
datFile.Deduplicate();
// Loop through each of the inputs and get or create a new DatData object
InternalStopwatch watch = new("Initializing and filling all output DATs");
// Create the DatFiles from the set of headers
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
#if NET452_OR_GREATER || NETCOREAPP
Parallel.For(0, datHeaders.Count, Core.Globals.ParallelOptions, j =>
#elif NET40_OR_GREATER
Parallel.For(0, datHeaders.Count, j =>
#else
for (int j = 0; j < datHeaders.Count; j++)
#endif
{
DatFile diffData = Parser.CreateDatFile(datHeaders[j], new DatModifiers());
diffData.ResetDictionary();
FillWithSourceIndex(datFile, diffData, j);
FillWithSourceIndexDB(datFile, diffData, j);
outDatsArray[j] = diffData;
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
outDats = [.. outDatsArray];
watch.Stop();
return outDats;
}
///
/// Fill a DatFile with all items with a particular source index ID
///
/// Current DatFile object to use for updating
/// DatFile to add found items to
/// Source index ID to retrieve items for
/// DatFile containing all items with the source index ID/returns>
private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
{
// Loop through and add the items for this index to the output
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(datFile.Items.SortedKeys, key =>
#else
foreach (var key in datFile.Items.SortedKeys)
#endif
{
List items = datFile.GetItemsForBucket(key);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
foreach (DatItem item in items)
{
var source = item.GetFieldValue(DatItem.SourceKey);
if (source != null && source.Index == index)
indexDat.AddItem(item, statsOnly: false);
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Fill a DatFile with all items with a particular source index ID
///
/// Current DatFile object to use for updating
/// DatFile to add found items to
/// Source index ID to retrieve items for
/// DatFile containing all items with the source index ID/returns>
private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index)
{
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
var sources = datFile.ItemsDB.GetSources();
// Create mappings from old index to new index
var machineRemapping = new Dictionary();
var sourceRemapping = new Dictionary();
// Loop through and add all sources
foreach (var source in sources)
{
long newSourceIndex = indexDat.AddSourceDB(source.Value);
sourceRemapping[source.Key] = newSourceIndex;
}
// Loop through and add all machines
foreach (var machine in machines)
{
long newMachineIndex = indexDat.AddMachineDB(machine.Value);
machineRemapping[machine.Key] = newMachineIndex;
}
// Loop through and add the items
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
#elif NET40_OR_GREATER
Parallel.ForEach(datItems, item =>
#else
foreach (var item in datItems)
#endif
{
// Get the machine and source index for this item
long machineIndex = datFile.GetMachineForItemDB(item.Key).Key;
long sourceIndex = datFile.GetSourceForItemDB(item.Key).Key;
// Get the source associated with the item
var source = datFile.ItemsDB.GetSource(sourceIndex);
if (source != null && source.Index == index)
indexDat.AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
#endregion
#region Duplicates
///
/// Output duplicate item diff
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static DatFile Duplicates(DatFile datFile, List inputs)
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return Duplicates(datFile, paths);
}
///
/// Output duplicate item diff
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static DatFile Duplicates(DatFile datFile, List inputs)
{
// Initialize duplicate data
DatFile dupeData = DuplicatesInit(datFile);
// Now, loop through the dictionary and populate the correct DATs
var watch = new InternalStopwatch("Populating duplicate DAT");
DuplicatesImpl(datFile, inputs, dupeData);
DuplicatesDBImpl(datFile, inputs, dupeData);
watch.Stop();
return dupeData;
}
///
/// Create a duplicate data DatFile based on an input DatFile
///
/// DatFile containing header and modifier information
private static DatFile DuplicatesInit(DatFile datFile)
{
var watch = new InternalStopwatch("Initializing duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "datFile.All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
string post = " (Duplicates)";
DatFile dupeData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
dupeData.Header.SetFieldValue(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
dupeData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
dupeData.ResetDictionary();
watch.Stop();
return dupeData;
}
///
/// Populate duplicates data from inputs
///
/// Source DatFile to process
/// Set of input paths for naming
/// Duplicate data DatFile
private static void DuplicatesImpl(DatFile datFile, List inputs, DatFile dupeData)
{
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(datFile.Items.SortedKeys, key =>
#else
foreach (var key in datFile.Items.SortedKeys)
#endif
{
List items = ItemDictionary.Merge(datFile.GetItemsForBucket(key));
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Loop through and add the items correctly
foreach (DatItem item in items)
{
#if NET20 || NET35
if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) != 0)
#else
if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
#endif
{
if (item.Clone() is not DatItem newrom)
continue;
if (item.GetFieldValue(DatItem.SourceKey) != null)
newrom.GetMachine()!.SetName(newrom.GetMachine()!.GetName() + $" ({Path.GetFileNameWithoutExtension(inputs[item.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
dupeData.AddItem(newrom, statsOnly: false);
}
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Populate duplicates data from inputs
///
/// Source DatFile to process
/// Set of input paths for naming
/// Duplicate data DatFile
private static void DuplicatesDBImpl(DatFile datFile, List inputs, DatFile dupeData)
{
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
var sources = datFile.ItemsDB.GetSources();
// Create mappings from old index to new index
var machineRemapping = new Dictionary();
var sourceRemapping = new Dictionary();
// Loop through and add all sources
foreach (var source in sources)
{
long newSourceIndex = dupeData.AddSourceDB(source.Value);
sourceRemapping[source.Key] = newSourceIndex;
}
// Loop through and add all machines
foreach (var machine in machines)
{
long newMachineIndex = dupeData.AddMachineDB(machine.Value);
machineRemapping[machine.Key] = newMachineIndex;
}
// Loop through and add the items
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
#elif NET40_OR_GREATER
Parallel.ForEach(datItems, item =>
#else
foreach (var item in datItems)
#endif
{
// Get the machine and source index for this item
long machineIndex = datFile.GetMachineForItemDB(item.Key).Key;
long sourceIndex = datFile.GetSourceForItemDB(item.Key).Key;
// If the current item isn't an external duplicate
#if NET20 || NET35
if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.External) == 0)
#else
if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.External))
#endif
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Get the current source and machine
var currentSource = sources[sourceIndex];
string? currentMachineName = machines[machineIndex].GetName();
var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
if (currentMachine.Value == null)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Get the source-specific machine
string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
if (renamedMachine.Value == null)
{
var newMachine = currentMachine.Value.Clone() as Machine;
newMachine!.SetName(renamedMachineName);
long newMachineIndex = dupeData.AddMachineDB(newMachine!);
renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
}
dupeData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
#endregion
#region Individuals
///
/// Output non-cascading diffs
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static List Individuals(DatFile datFile, List inputs)
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return Individuals(datFile, paths);
}
///
/// Output non-cascading diffs
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static List Individuals(DatFile datFile, List inputs)
{
// Create a set of DatData objects representing individual output files
DatFile[] outDats = IndividualsInit(datFile, inputs);
// Now, loop through the dictionary and populate the correct DATs
var watch = new InternalStopwatch("Populating all individual DATs");
IndividualsImpl(datFile, outDats);
IndividualsDBImpl(datFile, outDats);
watch.Stop();
return [.. outDats];
}
///
/// Create an individual data DatFile based on an input DatFile
///
/// DatFile containing header and modifier information
/// List of inputs to write out from
private static DatFile[] IndividualsInit(DatFile datFile, List inputs)
{
var watch = new InternalStopwatch("Initializing all individual DATs");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
// Loop through each of the inputs and get or create a new DatData object
DatFile[] outDatsArray = new DatFile[inputs.Count];
#if NET452_OR_GREATER || NETCOREAPP
Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
#elif NET40_OR_GREATER
Parallel.For(0, inputs.Count, j =>
#else
for (int j = 0; j < inputs.Count; j++)
#endif
{
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
DatFile diffData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
diffData.Header.SetFieldValue(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
diffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
diffData.ResetDictionary();
outDatsArray[j] = diffData;
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
watch.Stop();
return outDatsArray;
}
///
/// Populate individuals data from inputs
///
/// Source DatFile to process
/// Individual data DatFiles
private static void IndividualsImpl(DatFile datFile, DatFile[] outDats)
{
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(datFile.Items.SortedKeys, key =>
#else
foreach (var key in datFile.Items.SortedKeys)
#endif
{
List items = ItemDictionary.Merge(datFile.GetItemsForBucket(key));
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Loop through and add the items correctly
foreach (DatItem item in items)
{
if (item.GetFieldValue(DatItem.SourceKey) == null)
continue;
#if NET20 || NET35
if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#else
if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#endif
outDats[item.GetFieldValue(DatItem.SourceKey)!.Index].AddItem(item, statsOnly: false);
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Populate individuals data from inputs
///
/// Source DatFile to process
/// Individual data DatFiles
private static void IndividualsDBImpl(DatFile datFile, DatFile[] outDats)
{
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
var sources = datFile.ItemsDB.GetSources();
// Create mappings from old index to new index
var machineRemapping = new Dictionary();
var sourceRemapping = new Dictionary();
// Loop through and add all sources
foreach (var source in sources)
{
long newSourceIndex = outDats[0].AddSourceDB(source.Value);
sourceRemapping[source.Key] = newSourceIndex;
for (int i = 1; i < outDats.Length; i++)
{
_ = outDats[i].AddSourceDB(source.Value);
}
}
// Loop through and add all machines
foreach (var machine in machines)
{
long newMachineIndex = outDats[0].AddMachineDB(machine.Value);
machineRemapping[machine.Key] = newMachineIndex;
for (int i = 1; i < outDats.Length; i++)
{
_ = outDats[i].AddMachineDB(machine.Value);
}
}
// Loop through and add the items
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
#elif NET40_OR_GREATER
Parallel.ForEach(datItems, item =>
#else
foreach (var item in datItems)
#endif
{
// Get the machine and source index for this item
long machineIndex = datFile.GetMachineForItemDB(item.Key).Key;
long sourceIndex = datFile.GetSourceForItemDB(item.Key).Key;
// Get the source associated with the item
var source = datFile.ItemsDB.GetSource(sourceIndex);
if (source == null)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
#if NET20 || NET35
if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#else
if (item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#endif
outDats[source.Index].AddItemDB(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
#endregion
#region NoDuplicates
///
/// Output non-duplicate item diff
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static DatFile NoDuplicates(DatFile datFile, List inputs)
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
return NoDuplicates(datFile, paths);
}
///
/// Output non-duplicate item diff
///
/// Current DatFile object to use for updating
/// List of inputs to write out from
public static DatFile NoDuplicates(DatFile datFile, List inputs)
{
// Initialize no duplicate data
DatFile outerDiffData = NoDuplicatesInit(datFile);
// Now, loop through the dictionary and populate the correct DATs
var watch = new InternalStopwatch("Populating no duplicate DAT");
NoDuplicatesImpl(datFile, inputs, outerDiffData);
NoDuplicatesDBImpl(datFile, inputs, outerDiffData);
watch.Stop();
return outerDiffData;
}
///
/// Create a no duplicate data DatFile based on an input DatFile
///
/// DatFile containing header and modifier information
private static DatFile NoDuplicatesInit(DatFile datFile)
{
var watch = new InternalStopwatch("Initializing no duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
datFile.Header.SetFieldValue(DatHeader.FileNameKey, "All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.NameKey, "All DATs");
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
datFile.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, "All DATs");
string post = " (No Duplicates)";
DatFile outerDiffData = Parser.CreateDatFile(datFile.Header, datFile.Modifiers);
outerDiffData.Header.SetFieldValue(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
outerDiffData.Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
outerDiffData.ResetDictionary();
watch.Stop();
return outerDiffData;
}
///
/// Populate no duplicates data from inputs
///
/// Source DatFile to process
/// Set of input paths for naming
/// No duplicate data DatFile
private static void NoDuplicatesImpl(DatFile datFile, List inputs, DatFile outerDiffData)
{
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datFile.Items.SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(datFile.Items.SortedKeys, key =>
#else
foreach (var key in datFile.Items.SortedKeys)
#endif
{
List items = ItemDictionary.Merge(datFile.GetItemsForBucket(key));
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Loop through and add the items correctly
foreach (DatItem item in items)
{
#if NET20 || NET35
if ((item.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#else
if (item.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue(DatItem.DupeTypeKey) == 0x00)
#endif
{
if (item.Clone() is not DatItem newrom || newrom.GetFieldValue(DatItem.SourceKey) == null)
continue;
newrom.GetMachine()!.SetName(newrom.GetMachine()!.GetName() + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue(DatItem.SourceKey)!.Index].CurrentPath)})");
outerDiffData.AddItem(newrom, statsOnly: false);
}
}
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Populate no duplicates data from inputs
///
/// Source DatFile to process
/// Set of input paths for naming
/// No duplicate data DatFile
private static void NoDuplicatesDBImpl(DatFile datFile, List inputs, DatFile outerDiffData)
{
// Get all current items, machines, and mappings
var datItems = datFile.ItemsDB.GetItems();
var machines = datFile.GetMachinesDB();
var sources = datFile.ItemsDB.GetSources();
// Create mappings from old index to new index
var machineRemapping = new Dictionary();
var sourceRemapping = new Dictionary();
// Loop through and add all sources
foreach (var source in sources)
{
long newSourceIndex = outerDiffData.AddSourceDB(source.Value);
sourceRemapping[source.Key] = newSourceIndex;
}
// Loop through and add all machines
foreach (var machine in machines)
{
long newMachineIndex = outerDiffData.AddMachineDB(machine.Value);
machineRemapping[machine.Key] = newMachineIndex;
}
// Loop through and add the items
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
#elif NET40_OR_GREATER
Parallel.ForEach(datItems, item =>
#else
foreach (var item in datItems)
#endif
{
// Get the machine and source index for this item
long machineIndex = datFile.GetMachineForItemDB(item.Key).Key;
long sourceIndex = datFile.GetSourceForItemDB(item.Key).Key;
// If the current item isn't a duplicate
#if NET20 || NET35
if ((item.Value.GetFieldValue(DatItem.DupeTypeKey) & DupeType.Internal) == 0 && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
#else
if (!item.Value.GetFieldValue(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) && item.Value.GetFieldValue(DatItem.DupeTypeKey) != 0x00)
#endif
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Get the current source and machine
var currentSource = sources[sourceIndex];
string? currentMachineName = machines[machineIndex].GetName();
var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
if (currentMachine.Value == null)
#if NET40_OR_GREATER || NETCOREAPP
return;
#else
continue;
#endif
// Get the source-specific machine
string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
if (renamedMachine.Value == null)
{
var newMachine = currentMachine.Value.Clone() as Machine;
newMachine!.SetName(renamedMachineName);
long newMachineIndex = outerDiffData.AddMachineDB(newMachine);
renamedMachine = new KeyValuePair(newMachineIndex, newMachine);
}
outerDiffData.AddItemDB(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
#endregion
}
}