2024-11-12 21:12:06 -05:00
|
|
|
using System;
|
2020-12-10 14:47:38 -08:00
|
|
|
using System.Collections.Generic;
|
|
|
|
|
using System.IO;
|
|
|
|
|
using System.Linq;
|
2024-03-05 03:04:47 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
using System.Threading.Tasks;
|
2024-03-05 03:04:47 -05:00
|
|
|
#endif
|
2025-01-07 15:40:05 -05:00
|
|
|
using SabreTools.Core.Tools;
|
2020-12-10 14:47:38 -08:00
|
|
|
using SabreTools.DatItems;
|
2025-01-07 15:40:05 -05:00
|
|
|
using SabreTools.DatItems.Formats;
|
2020-12-10 14:47:38 -08:00
|
|
|
using SabreTools.IO;
|
2024-10-24 00:36:44 -04:00
|
|
|
using SabreTools.IO.Logging;
|
2025-01-07 15:40:05 -05:00
|
|
|
using SabreTools.Matching.Compare;
|
2020-12-10 10:39:39 -08:00
|
|
|
|
2025-01-07 15:31:28 -05:00
|
|
|
namespace SabreTools.DatFiles
|
2020-12-10 10:39:39 -08:00
|
|
|
{
|
2020-12-21 11:38:56 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Helper methods for updating and converting DatFiles
|
|
|
|
|
/// </summary>
|
2021-02-01 11:43:38 -08:00
|
|
|
public static class DatFileTool
|
2020-12-10 10:39:39 -08:00
|
|
|
{
|
|
|
|
|
#region Logging
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Logging object
|
|
|
|
|
/// </summary>
|
2025-01-08 16:59:44 -05:00
|
|
|
private static readonly Logger _staticLogger = new();
|
2020-12-10 10:39:39 -08:00
|
|
|
|
|
|
|
|
#endregion
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2025-01-07 15:40:05 -05:00
|
|
|
#region Sorting and Merging
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Merge an arbitrary set of DatItems based on the supplied information
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="items">List of DatItem objects representing the items to be merged</param>
|
|
|
|
|
/// <returns>A List of DatItem objects representing the merged items</returns>
|
|
|
|
|
public static List<DatItem> Merge(List<DatItem>? items)
|
|
|
|
|
{
|
|
|
|
|
// Check for null or blank inputs first
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
return [];
|
|
|
|
|
|
|
|
|
|
// Create output list
|
|
|
|
|
List<DatItem> output = [];
|
|
|
|
|
|
|
|
|
|
// Then deduplicate them by checking to see if data matches previous saved roms
|
|
|
|
|
int nodumpCount = 0;
|
|
|
|
|
foreach (DatItem datItem in items)
|
|
|
|
|
{
|
|
|
|
|
// If we don't have a Disk, File, Media, or Rom, we skip checking for duplicates
|
|
|
|
|
if (datItem is not Disk && datItem is not DatItems.Formats.File && datItem is not Media && datItem is not Rom)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// If it's a nodump, add and skip
|
|
|
|
|
if (datItem is Rom rom && rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsEnumValue<ItemStatus>() == ItemStatus.Nodump)
|
|
|
|
|
{
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
nodumpCount++;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
else if (datItem is Disk disk && disk.GetStringFieldValue(Models.Metadata.Disk.StatusKey).AsEnumValue<ItemStatus>() == ItemStatus.Nodump)
|
|
|
|
|
{
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
nodumpCount++;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If it's the first non-nodump item in the list, don't touch it
|
|
|
|
|
if (output.Count == nodumpCount)
|
|
|
|
|
{
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Find the index of the first duplicate, if one exists
|
|
|
|
|
int pos = output.FindIndex(lastItem => datItem.GetDuplicateStatus(lastItem) != 0x00);
|
|
|
|
|
if (pos < 0)
|
|
|
|
|
{
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get the duplicate item
|
|
|
|
|
DatItem savedItem = output[pos];
|
|
|
|
|
DupeType dupetype = datItem.GetDuplicateStatus(savedItem);
|
|
|
|
|
|
|
|
|
|
// Disks, File, Media, and Roms have more information to fill
|
|
|
|
|
if (datItem is Disk diskItem && savedItem is Disk savedDisk)
|
|
|
|
|
savedDisk.FillMissingInformation(diskItem);
|
|
|
|
|
else if (datItem is DatItems.Formats.File fileItem && savedItem is DatItems.Formats.File savedFile)
|
|
|
|
|
savedFile.FillMissingInformation(fileItem);
|
|
|
|
|
else if (datItem is Media mediaItem && savedItem is Media savedMedia)
|
|
|
|
|
savedMedia.FillMissingInformation(mediaItem);
|
|
|
|
|
else if (datItem is Rom romItem && savedItem is Rom savedRom)
|
|
|
|
|
savedRom.FillMissingInformation(romItem);
|
|
|
|
|
|
|
|
|
|
// Set the duplicate type on the saved item
|
|
|
|
|
savedItem.SetFieldValue<DupeType>(DatItem.DupeTypeKey, dupetype);
|
|
|
|
|
|
|
|
|
|
// Get the sources associated with the items
|
|
|
|
|
var savedSource = savedItem.GetFieldValue<Source?>(DatItem.SourceKey);
|
|
|
|
|
var itemSource = datItem.GetFieldValue<Source?>(DatItem.SourceKey);
|
|
|
|
|
|
|
|
|
|
// Get the machines associated with the items
|
|
|
|
|
var savedMachine = savedItem.GetFieldValue<Machine>(DatItem.MachineKey);
|
|
|
|
|
var itemMachine = datItem.GetFieldValue<Machine>(DatItem.MachineKey);
|
|
|
|
|
|
|
|
|
|
// If the current source has a lower ID than the saved, use the saved source
|
|
|
|
|
if (itemSource?.Index < savedSource?.Index)
|
|
|
|
|
{
|
|
|
|
|
datItem.SetFieldValue<Source?>(DatItem.SourceKey, savedSource.Clone() as Source);
|
|
|
|
|
savedItem.CopyMachineInformation(datItem);
|
|
|
|
|
savedItem.SetName(datItem.GetName());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If the saved machine is a child of the current machine, use the current machine instead
|
|
|
|
|
if (savedMachine?.GetStringFieldValue(Models.Metadata.Machine.CloneOfKey) == itemMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey)
|
|
|
|
|
|| savedMachine?.GetStringFieldValue(Models.Metadata.Machine.RomOfKey) == itemMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey))
|
|
|
|
|
{
|
|
|
|
|
savedItem.CopyMachineInformation(datItem);
|
|
|
|
|
savedItem.SetName(datItem.GetName());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Replace the original item in the list
|
|
|
|
|
output.RemoveAt(pos);
|
|
|
|
|
output.Insert(pos, savedItem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Then return the result
|
|
|
|
|
return output;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Sort a list of DatItem objects by SourceID, Game, and Name (in order)
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="items">List of DatItem objects representing the items to be sorted</param>
|
|
|
|
|
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
|
|
|
|
/// <returns>True if it sorted correctly, false otherwise</returns>
|
|
|
|
|
public static bool Sort(ref List<DatItem> items, bool norename)
|
|
|
|
|
{
|
|
|
|
|
items.Sort(delegate (DatItem x, DatItem y)
|
|
|
|
|
{
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
var nc = new NaturalComparer();
|
|
|
|
|
|
|
|
|
|
// If machine names don't match
|
|
|
|
|
string? xMachineName = x.GetFieldValue<Machine>(DatItem.MachineKey)?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
string? yMachineName = y.GetFieldValue<Machine>(DatItem.MachineKey)?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
if (xMachineName != yMachineName)
|
|
|
|
|
return nc.Compare(xMachineName, yMachineName);
|
|
|
|
|
|
|
|
|
|
// If types don't match
|
|
|
|
|
string? xType = x.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
string? yType = y.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
if (xType != yType)
|
|
|
|
|
return xType.AsEnumValue<ItemType>() - yType.AsEnumValue<ItemType>();
|
|
|
|
|
|
|
|
|
|
// If directory names don't match
|
|
|
|
|
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
|
|
|
|
|
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
|
|
|
|
|
if (xDirectoryName != yDirectoryName)
|
|
|
|
|
return nc.Compare(xDirectoryName, yDirectoryName);
|
|
|
|
|
|
|
|
|
|
// If item names don't match
|
|
|
|
|
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
|
|
|
|
|
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
|
|
|
|
|
if (xName != yName)
|
|
|
|
|
return nc.Compare(xName, yName);
|
|
|
|
|
|
|
|
|
|
// Otherwise, compare on machine or source, depending on the flag
|
|
|
|
|
int? xSourceIndex = x.GetFieldValue<Source?>(DatItem.SourceKey)?.Index;
|
|
|
|
|
int? ySourceIndex = y.GetFieldValue<Source?>(DatItem.SourceKey)?.Index;
|
|
|
|
|
return (norename ? nc.Compare(xMachineName, yMachineName) : (xSourceIndex - ySourceIndex) ?? 0);
|
|
|
|
|
}
|
|
|
|
|
catch
|
|
|
|
|
{
|
|
|
|
|
// Absorb the error
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Sort a list of DatItem objects by SourceID, Game, and Name (in order)
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="mappings">List of item ID to DatItem mappings representing the items to be sorted</param>
|
|
|
|
|
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
|
|
|
|
/// <returns>True if it sorted correctly, false otherwise</returns>
|
|
|
|
|
public static bool SortDB(ref List<KeyValuePair<long, DatItem>> mappings, bool norename)
|
|
|
|
|
{
|
|
|
|
|
mappings.Sort(delegate (KeyValuePair<long, DatItem> x, KeyValuePair<long, DatItem> y)
|
|
|
|
|
{
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
var nc = new NaturalComparer();
|
|
|
|
|
|
|
|
|
|
// TODO: Fix this since DB uses an external map for machines
|
|
|
|
|
|
|
|
|
|
// If machine names don't match
|
|
|
|
|
string? xMachineName = x.Value.GetFieldValue<Machine>(DatItem.MachineKey)?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
string? yMachineName = y.Value.GetFieldValue<Machine>(DatItem.MachineKey)?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
if (xMachineName != yMachineName)
|
|
|
|
|
return nc.Compare(xMachineName, yMachineName);
|
|
|
|
|
|
|
|
|
|
// If types don't match
|
|
|
|
|
string? xType = x.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
string? yType = y.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
if (xType != yType)
|
|
|
|
|
return xType.AsEnumValue<ItemType>() - yType.AsEnumValue<ItemType>();
|
|
|
|
|
|
|
|
|
|
// If directory names don't match
|
|
|
|
|
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName() ?? string.Empty));
|
|
|
|
|
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName() ?? string.Empty));
|
|
|
|
|
if (xDirectoryName != yDirectoryName)
|
|
|
|
|
return nc.Compare(xDirectoryName, yDirectoryName);
|
|
|
|
|
|
|
|
|
|
// If item names don't match
|
|
|
|
|
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName() ?? string.Empty));
|
|
|
|
|
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName() ?? string.Empty));
|
|
|
|
|
if (xName != yName)
|
|
|
|
|
return nc.Compare(xName, yName);
|
|
|
|
|
|
|
|
|
|
// Otherwise, compare on machine or source, depending on the flag
|
|
|
|
|
int? xSourceIndex = x.Value.GetFieldValue<Source?>(DatItem.SourceKey)?.Index;
|
|
|
|
|
int? ySourceIndex = y.Value.GetFieldValue<Source?>(DatItem.SourceKey)?.Index;
|
|
|
|
|
return (norename ? nc.Compare(xMachineName, yMachineName) : (xSourceIndex - ySourceIndex) ?? 0);
|
|
|
|
|
}
|
|
|
|
|
catch
|
|
|
|
|
{
|
|
|
|
|
// Absorb the error
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region SuperDAT
|
|
|
|
|
|
2021-02-01 11:43:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Apply SuperDAT naming logic to a merged DatFile
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to run operations on</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to use for renaming</param>
|
|
|
|
|
public static void ApplySuperDAT(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2024-02-28 19:19:50 -05:00
|
|
|
List<string> keys = [.. datFile.Items.Keys];
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in keys)
|
|
|
|
|
#endif
|
2021-02-01 11:43:38 -08:00
|
|
|
{
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem>? items = datFile.Items[key];
|
2024-02-28 19:19:50 -05:00
|
|
|
if (items == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-02-28 19:19:50 -05:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2024-02-28 19:19:50 -05:00
|
|
|
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem> newItems = [];
|
2021-02-01 11:43:38 -08:00
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
{
|
|
|
|
|
DatItem newItem = item;
|
2024-10-24 05:26:05 -04:00
|
|
|
var source = newItem.GetFieldValue<Source?>(DatItem.SourceKey);
|
|
|
|
|
if (source == null)
|
2024-02-28 19:19:50 -05:00
|
|
|
continue;
|
|
|
|
|
|
2024-10-24 05:26:05 -04:00
|
|
|
string filename = inputs[source.Index].CurrentPath;
|
|
|
|
|
string rootpath = inputs[source.Index].ParentPath ?? string.Empty;
|
2021-02-01 11:43:38 -08:00
|
|
|
|
2024-10-24 05:26:05 -04:00
|
|
|
if (rootpath.Length > 0
|
2024-02-29 00:14:16 -05:00
|
|
|
#if NETFRAMEWORK
|
2024-10-24 05:26:05 -04:00
|
|
|
&& !rootpath.EndsWith(Path.DirectorySeparatorChar.ToString())
|
|
|
|
|
&& !rootpath.EndsWith(Path.AltDirectorySeparatorChar.ToString()))
|
2024-02-29 00:14:16 -05:00
|
|
|
#else
|
2023-04-23 20:47:41 -04:00
|
|
|
&& !rootpath.EndsWith(Path.DirectorySeparatorChar)
|
|
|
|
|
&& !rootpath.EndsWith(Path.AltDirectorySeparatorChar))
|
2024-02-29 00:14:16 -05:00
|
|
|
#endif
|
2023-04-23 20:47:41 -04:00
|
|
|
{
|
2021-02-01 11:43:38 -08:00
|
|
|
rootpath += Path.DirectorySeparatorChar.ToString();
|
2023-04-23 20:47:41 -04:00
|
|
|
}
|
2021-02-01 11:43:38 -08:00
|
|
|
|
2024-10-24 05:26:05 -04:00
|
|
|
filename = filename.Remove(0, rootpath.Length);
|
|
|
|
|
|
|
|
|
|
var machine = newItem.GetFieldValue<Machine>(DatItem.MachineKey);
|
|
|
|
|
if (machine == null)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
machine.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, Path.GetDirectoryName(filename)
|
|
|
|
|
+ Path.DirectorySeparatorChar
|
|
|
|
|
+ Path.GetFileNameWithoutExtension(filename)
|
|
|
|
|
+ Path.DirectorySeparatorChar
|
|
|
|
|
+ machine.GetStringFieldValue(Models.Metadata.Machine.NameKey));
|
2021-02-01 11:43:38 -08:00
|
|
|
|
|
|
|
|
newItems.Add(newItem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
datFile.Items.Remove(key);
|
|
|
|
|
datFile.Items.AddRange(key, newItems);
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2021-02-01 11:43:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2021-02-01 11:43:38 -08:00
|
|
|
}
|
|
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Apply SuperDAT naming logic to a merged DatFile
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to run operations on</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to use for renaming</param>
|
|
|
|
|
public static void ApplySuperDATDB(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
|
|
|
|
List<string> keys = [.. datFile.ItemsDB.SortedKeys];
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(keys, Core.Globals.ParallelOptions, key =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in keys)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
var items = datFile.ItemsDB.GetItemsForBucket(key);
|
|
|
|
|
if (items == null)
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
foreach (var item in items)
|
2024-03-20 00:36:47 -04:00
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
var source = datFile.ItemsDB.GetSourceForItem(item.Key);
|
|
|
|
|
if (source.Value == null)
|
2024-03-20 00:36:47 -04:00
|
|
|
continue;
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
var machine = datFile.ItemsDB.GetMachineForItem(item.Key);
|
|
|
|
|
if (machine.Value == null)
|
2024-03-20 00:36:47 -04:00
|
|
|
continue;
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
string filename = inputs[source.Value.Index].CurrentPath;
|
|
|
|
|
string rootpath = inputs[source.Value.Index].ParentPath ?? string.Empty;
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-10-24 05:26:05 -04:00
|
|
|
if (rootpath.Length > 0
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NETFRAMEWORK
|
|
|
|
|
&& !rootpath!.EndsWith(Path.DirectorySeparatorChar.ToString())
|
|
|
|
|
&& !rootpath!.EndsWith(Path.AltDirectorySeparatorChar.ToString()))
|
|
|
|
|
#else
|
|
|
|
|
&& !rootpath.EndsWith(Path.DirectorySeparatorChar)
|
|
|
|
|
&& !rootpath.EndsWith(Path.AltDirectorySeparatorChar))
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
rootpath += Path.DirectorySeparatorChar.ToString();
|
|
|
|
|
}
|
|
|
|
|
|
2024-10-24 05:26:05 -04:00
|
|
|
filename = filename.Remove(0, rootpath.Length);
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
machine.Value.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
2024-03-20 00:36:47 -04:00
|
|
|
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
2024-12-06 23:16:09 -05:00
|
|
|
+ machine.Value.GetStringFieldValue(Models.Metadata.Machine.NameKey));
|
2024-03-20 00:36:47 -04:00
|
|
|
}
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-07 15:40:05 -05:00
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region Replacement
|
|
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Replace item values from the base set represented by the current DAT
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="intDat">DatFile to replace the values in</param>
|
2024-03-05 20:07:38 -05:00
|
|
|
/// <param name="machineFieldNames">List of machine field names representing what should be updated</param>
|
|
|
|
|
/// <param name="itemFieldNames">List of item field names representing what should be updated</param>
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
2020-12-13 13:22:06 -08:00
|
|
|
public static void BaseReplace(
|
|
|
|
|
DatFile datFile,
|
|
|
|
|
DatFile intDat,
|
2024-03-05 20:07:38 -05:00
|
|
|
List<string> machineFieldNames,
|
|
|
|
|
Dictionary<string, List<string>> itemFieldNames,
|
2020-12-13 13:22:06 -08:00
|
|
|
bool onlySame)
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2024-03-11 15:46:44 -04:00
|
|
|
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// If we are matching based on DatItem fields of any sort
|
2024-10-19 21:41:08 -04:00
|
|
|
if (itemFieldNames.Count > 0)
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
// For comparison's sake, we want to use CRC as the base bucketing
|
2020-12-14 15:43:01 -08:00
|
|
|
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.Full);
|
|
|
|
|
intDat.Items.BucketBy(ItemKey.CRC, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Then we do a hashwise comparison against the base DAT
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(intDat.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(intDat.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in intDat.Items.Keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem>? datItems = intDat.Items[key];
|
2024-02-28 19:19:50 -05:00
|
|
|
if (datItems == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-02-28 19:19:50 -05:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2024-02-28 19:19:50 -05:00
|
|
|
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem> newDatItems = [];
|
2020-12-10 14:47:38 -08:00
|
|
|
foreach (DatItem datItem in datItems)
|
|
|
|
|
{
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem> dupes = datFile.Items.GetDuplicates(datItem, sorted: true);
|
2024-02-28 19:19:50 -05:00
|
|
|
if (datItem.Clone() is not DatItem newDatItem)
|
|
|
|
|
continue;
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Replace fields from the first duplicate, if we have one
|
|
|
|
|
if (dupes.Count > 0)
|
2024-10-19 21:41:08 -04:00
|
|
|
Replacer.ReplaceFields(newDatItem, dupes[0], itemFieldNames);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
newDatItems.Add(newDatItem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
intDat.Items.AddRange(key, newDatItems);
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we are matching based on Machine fields of any sort
|
2024-10-19 21:41:08 -04:00
|
|
|
if (machineFieldNames.Count > 0)
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
2020-12-14 15:43:01 -08:00
|
|
|
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
|
|
|
|
|
intDat.Items.BucketBy(ItemKey.Machine, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Then we do a namewise comparison against the base DAT
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(intDat.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(intDat.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in intDat.Items.Keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem>? datItems = intDat.Items[key];
|
2024-02-28 19:19:50 -05:00
|
|
|
if (datItems == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-02-28 19:19:50 -05:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2024-02-28 19:19:50 -05:00
|
|
|
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem> newDatItems = [];
|
2020-12-10 14:47:38 -08:00
|
|
|
foreach (DatItem datItem in datItems)
|
|
|
|
|
{
|
2024-02-28 19:19:50 -05:00
|
|
|
if (datItem.Clone() is not DatItem newDatItem)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
if (!datFile.Items.TryGetValue(key, out var list) || list == null)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
if (datFile.Items.ContainsKey(key) && list.Count > 0)
|
2024-03-10 16:49:07 -04:00
|
|
|
Replacer.ReplaceFields(newDatItem.GetFieldValue<Machine>(DatItem.MachineKey)!, list[index: 0].GetFieldValue<Machine>(DatItem.MachineKey)!, machineFieldNames, onlySame);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
newDatItems.Add(newDatItem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
intDat.Items.AddRange(key, newDatItems);
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
2021-02-02 14:09:49 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Replace item values from the base set represented by the current DAT
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="intDat">DatFile to replace the values in</param>
|
|
|
|
|
/// <param name="machineFieldNames">List of machine field names representing what should be updated</param>
|
|
|
|
|
/// <param name="itemFieldNames">List of item field names representing what should be updated</param>
|
|
|
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
|
|
|
|
public static void BaseReplaceDB(
|
|
|
|
|
DatFile datFile,
|
|
|
|
|
DatFile intDat,
|
|
|
|
|
List<string> machineFieldNames,
|
|
|
|
|
Dictionary<string, List<string>> itemFieldNames,
|
|
|
|
|
bool onlySame)
|
|
|
|
|
{
|
|
|
|
|
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
|
|
|
|
|
|
|
|
|
// If we are matching based on DatItem fields of any sort
|
2024-10-19 21:41:08 -04:00
|
|
|
if (itemFieldNames.Count > 0)
|
2024-03-20 00:36:47 -04:00
|
|
|
{
|
|
|
|
|
// For comparison's sake, we want to use CRC as the base bucketing
|
|
|
|
|
datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full);
|
|
|
|
|
intDat.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.None);
|
|
|
|
|
|
|
|
|
|
// Then we do a hashwise comparison against the base DAT
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(intDat.ItemsDB.SortedKeys, Core.Globals.ParallelOptions, key =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(intDat.ItemsDB.SortedKeys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in intDat.ItemsDB.SortedKeys)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
var datItems = intDat.ItemsDB.GetItemsForBucket(key);
|
|
|
|
|
if (datItems == null)
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
foreach (var datItem in datItems)
|
2024-03-20 00:36:47 -04:00
|
|
|
{
|
2024-03-20 02:10:38 -04:00
|
|
|
var dupes = datFile.ItemsDB.GetDuplicates(datItem, sorted: true);
|
2024-12-06 23:16:09 -05:00
|
|
|
if (datItem.Value.Clone() is not DatItem newDatItem)
|
2024-03-20 00:36:47 -04:00
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// Replace fields from the first duplicate, if we have one
|
|
|
|
|
if (dupes.Count > 0)
|
2024-12-06 23:16:09 -05:00
|
|
|
Replacer.ReplaceFields(datItem.Value, dupes.First().Value, itemFieldNames);
|
2024-03-20 00:36:47 -04:00
|
|
|
}
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we are matching based on Machine fields of any sort
|
2024-10-19 21:41:08 -04:00
|
|
|
if (machineFieldNames.Count > 0)
|
2024-03-20 00:36:47 -04:00
|
|
|
{
|
|
|
|
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
|
|
|
|
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
|
|
|
|
|
intDat.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.None);
|
|
|
|
|
|
|
|
|
|
// Then we do a namewise comparison against the base DAT
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(intDat.ItemsDB.SortedKeys, Core.Globals.ParallelOptions, key =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(intDat.ItemsDB.SortedKeys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in intDat.ItemsDB.SortedKeys)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
var datItems = intDat.ItemsDB.GetItemsForBucket(key);
|
|
|
|
|
if (datItems == null)
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
foreach (var datItem in datItems)
|
2024-03-20 00:36:47 -04:00
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
var datMachine = datFile.ItemsDB.GetMachineForItem(datFile.ItemsDB.GetItemsForBucket(key)!.First().Key);
|
|
|
|
|
var intMachine = intDat.ItemsDB.GetMachineForItem(datItem.Key);
|
|
|
|
|
if (datMachine.Value != null && intMachine.Value != null)
|
|
|
|
|
Replacer.ReplaceFields(intMachine.Value, datMachine.Value, machineFieldNames, onlySame);
|
2024-03-20 00:36:47 -04:00
|
|
|
}
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-07 15:40:05 -05:00
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region Diffing
|
|
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output diffs against a base set represented by the current DAT
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="intDat">DatFile to replace the values in</param>
|
|
|
|
|
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
|
|
|
|
public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames)
|
|
|
|
|
{
|
|
|
|
|
// For comparison's sake, we want to use a base ordering
|
|
|
|
|
if (useGames)
|
2020-12-14 15:43:01 -08:00
|
|
|
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
else
|
2020-12-14 15:43:01 -08:00
|
|
|
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
InternalStopwatch watch = new($"Comparing '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' to base DAT");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// For comparison's sake, we want to a the base bucketing
|
|
|
|
|
if (useGames)
|
2020-12-14 15:43:01 -08:00
|
|
|
intDat.Items.BucketBy(ItemKey.Machine, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
else
|
2020-12-14 15:43:01 -08:00
|
|
|
intDat.Items.BucketBy(ItemKey.CRC, DedupeType.Full);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Then we compare against the base DAT
|
2024-02-28 19:19:50 -05:00
|
|
|
List<string> keys = [.. intDat.Items.Keys];
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
// Game Against uses game names
|
|
|
|
|
if (useGames)
|
|
|
|
|
{
|
2024-02-28 19:19:50 -05:00
|
|
|
// If the key is null, keep it
|
|
|
|
|
if (!intDat.Items.TryGetValue(key, out var intList) || intList == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-02-28 19:19:50 -05:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2024-02-28 19:19:50 -05:00
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
// If the base DAT doesn't contain the key, keep it
|
2024-02-28 19:19:50 -05:00
|
|
|
if (!datFile.Items.TryGetValue(key, out var list) || list == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// If the number of items is different, then keep it
|
2024-02-28 19:19:50 -05:00
|
|
|
if (list.Count != intList.Count)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Otherwise, compare by name and hash the remaining files
|
|
|
|
|
bool exactMatch = true;
|
2024-02-28 19:19:50 -05:00
|
|
|
foreach (DatItem item in intList)
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
// TODO: Make this granular to name as well
|
2024-02-28 19:19:50 -05:00
|
|
|
if (!list.Contains(item))
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
exactMatch = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we have an exact match, remove the game
|
|
|
|
|
if (exactMatch)
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Standard Against uses hashes
|
|
|
|
|
else
|
|
|
|
|
{
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem>? datItems = intDat.Items[key];
|
2024-02-28 19:19:50 -05:00
|
|
|
if (datItems == null)
|
2024-03-05 02:52:53 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-02-28 19:19:50 -05:00
|
|
|
return;
|
2024-03-05 02:52:53 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2024-02-28 19:19:50 -05:00
|
|
|
|
2024-10-30 10:59:04 -04:00
|
|
|
List<DatItem> keepDatItems = [];
|
2020-12-10 14:47:38 -08:00
|
|
|
foreach (DatItem datItem in datItems)
|
|
|
|
|
{
|
|
|
|
|
if (!datFile.Items.HasDuplicates(datItem, true))
|
|
|
|
|
keepDatItems.Add(datItem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
intDat.Items.AddRange(key, keepDatItems);
|
|
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2021-02-02 14:09:49 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Output cascading diffs
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
|
|
|
/// <returns>List of DatFiles representing the individually indexed items</returns>
|
|
|
|
|
public static List<DatFile> DiffCascade(DatFile datFile, List<DatHeader> datHeaders)
|
|
|
|
|
{
|
|
|
|
|
// Create a list of DatData objects representing output files
|
2024-02-28 19:19:50 -05:00
|
|
|
List<DatFile> outDats = [];
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Ensure the current DatFile is sorted optimally
|
2020-12-14 15:43:01 -08:00
|
|
|
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
2023-04-19 16:39:58 -04:00
|
|
|
InternalStopwatch watch = new("Initializing and filling all output DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Create the DatFiles from the set of headers
|
|
|
|
|
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.For(0, datHeaders.Count, Core.Globals.ParallelOptions, j =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.For(0, datHeaders.Count, j =>
|
|
|
|
|
#else
|
|
|
|
|
for (int j = 0; j < datHeaders.Count; j++)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
DatFile diffData = DatFile.Create(datHeaders[j]);
|
2024-03-19 18:40:24 -04:00
|
|
|
diffData.ResetDictionary();
|
2020-12-10 14:47:38 -08:00
|
|
|
FillWithSourceIndex(datFile, diffData, j);
|
2024-03-20 00:36:47 -04:00
|
|
|
//FillWithSourceIndexDB(datFile, diffData, j);
|
2020-12-10 14:47:38 -08:00
|
|
|
outDatsArray[j] = diffData;
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-02-28 19:19:50 -05:00
|
|
|
outDats = [.. outDatsArray];
|
2020-12-10 14:47:38 -08:00
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return outDats;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Output duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffDuplicates(DatFile datFile, List<string> inputs)
|
|
|
|
|
{
|
2024-11-12 21:12:06 -05:00
|
|
|
List<ParentablePath> paths = inputs.ConvertAll(i => new ParentablePath(i));
|
2020-12-10 14:47:38 -08:00
|
|
|
return DiffDuplicates(datFile, paths);
|
2024-03-20 00:36:47 -04:00
|
|
|
//return DiffDuplicatesDB(datFile, paths);
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Output duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffDuplicates(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2023-04-19 16:39:58 -04:00
|
|
|
InternalStopwatch watch = new("Initializing duplicate DAT");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
2024-03-10 21:41:49 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "datFile.All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
string post = " (Duplicates)";
|
|
|
|
|
DatFile dupeData = DatFile.Create(datFile.Header);
|
2024-03-11 15:46:44 -04:00
|
|
|
dupeData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
|
|
|
|
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
|
|
|
|
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
2024-03-19 18:40:24 -04:00
|
|
|
dupeData.ResetDictionary();
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating duplicate DAT");
|
|
|
|
|
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datFile.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in datFile.Items.Keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2025-01-07 15:40:05 -05:00
|
|
|
List<DatItem> items = Merge(datFile.Items[key]);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
if (items == null || items.Count == 0)
|
2024-02-29 00:14:16 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
return;
|
2024-02-29 00:14:16 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
{
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-10 16:49:07 -04:00
|
|
|
if ((item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.External) != 0)
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.External))
|
2024-02-28 22:54:56 -05:00
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2024-02-28 19:19:50 -05:00
|
|
|
if (item.Clone() is not DatItem newrom)
|
|
|
|
|
continue;
|
|
|
|
|
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.GetFieldValue<Source?>(DatItem.SourceKey) != null)
|
2024-03-11 15:46:44 -04:00
|
|
|
newrom.GetFieldValue<Machine>(DatItem.MachineKey)!.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, newrom.GetFieldValue<Machine>(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[item.GetFieldValue<Source?>(DatItem.SourceKey)!.Index].CurrentPath)})");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
dupeData.Items.Add(key, newrom);
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return dupeData;
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffDuplicatesDB(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2024-03-20 10:42:27 -04:00
|
|
|
var watch = new InternalStopwatch("Initializing duplicate DAT");
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "datFile.All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "datFile.All DATs");
|
|
|
|
|
|
|
|
|
|
string post = " (Duplicates)";
|
|
|
|
|
DatFile dupeData = DatFile.Create(datFile.Header);
|
|
|
|
|
dupeData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, dupeData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
|
|
|
|
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
|
|
|
|
dupeData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, dupeData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
|
|
|
|
dupeData.ResetDictionary();
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating duplicate DAT");
|
|
|
|
|
|
|
|
|
|
// Get all current items, machines, and mappings
|
2024-12-06 23:16:09 -05:00
|
|
|
var datItems = datFile.ItemsDB.GetItems();
|
|
|
|
|
var machines = datFile.ItemsDB.GetMachines();
|
|
|
|
|
var sources = datFile.ItemsDB.GetSources();
|
|
|
|
|
var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings();
|
|
|
|
|
var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings();
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:29:59 -04:00
|
|
|
// Create mappings from old index to new index
|
2024-03-20 00:36:47 -04:00
|
|
|
var machineRemapping = new Dictionary<long, long>();
|
2024-03-20 01:29:59 -04:00
|
|
|
var sourceRemapping = new Dictionary<long, long>();
|
|
|
|
|
|
|
|
|
|
// Loop through and add all sources
|
|
|
|
|
foreach (var source in sources)
|
|
|
|
|
{
|
|
|
|
|
long newSourceIndex = dupeData.ItemsDB.AddSource(source.Value);
|
|
|
|
|
sourceRemapping[source.Key] = newSourceIndex;
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Loop through and add all machines
|
|
|
|
|
foreach (var machine in machines)
|
|
|
|
|
{
|
2024-03-20 10:42:27 -04:00
|
|
|
long newMachineIndex = dupeData.ItemsDB.AddMachine(machine.Value);
|
2024-03-20 00:36:47 -04:00
|
|
|
machineRemapping[machine.Key] = newMachineIndex;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datItems, item =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var item in datItems)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
2024-03-20 01:29:59 -04:00
|
|
|
// Get the machine and source index for this item
|
|
|
|
|
long machineIndex = itemMachineMappings[item.Key];
|
|
|
|
|
long sourceIndex = itemSourceMappings[item.Key];
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 10:42:27 -04:00
|
|
|
// If the current item isn't an external duplicate
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-20 10:42:27 -04:00
|
|
|
if ((item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.External) == 0)
|
2024-03-20 00:36:47 -04:00
|
|
|
#else
|
2024-03-20 10:42:27 -04:00
|
|
|
if (!item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.External))
|
2024-03-20 00:36:47 -04:00
|
|
|
#endif
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-03-20 10:42:27 -04:00
|
|
|
return;
|
2024-03-20 00:36:47 -04:00
|
|
|
#else
|
2024-03-20 10:42:27 -04:00
|
|
|
continue;
|
2024-03-20 00:36:47 -04:00
|
|
|
#endif
|
|
|
|
|
|
2024-03-20 10:42:27 -04:00
|
|
|
// Get the current source and machine
|
|
|
|
|
var currentSource = sources[sourceIndex];
|
|
|
|
|
string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
|
2024-12-06 23:16:09 -05:00
|
|
|
if (currentMachine.Value == null)
|
2024-03-20 10:42:27 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
// Get the source-specific machine
|
|
|
|
|
string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
|
|
|
|
|
var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
|
2024-12-06 23:16:09 -05:00
|
|
|
if (renamedMachine.Value == null)
|
2024-03-20 10:42:27 -04:00
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
var newMachine = currentMachine.Value.Clone() as Machine;
|
2024-03-20 10:42:27 -04:00
|
|
|
newMachine!.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, renamedMachineName);
|
|
|
|
|
long newMachineIndex = dupeData.ItemsDB.AddMachine(newMachine!);
|
2024-12-06 23:16:09 -05:00
|
|
|
renamedMachine = new KeyValuePair<long, Machine?>(newMachineIndex, newMachine);
|
2024-03-20 00:36:47 -04:00
|
|
|
}
|
2024-03-20 10:42:27 -04:00
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
dupeData.ItemsDB.AddItem(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return dupeData;
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-cascading diffs
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static List<DatFile> DiffIndividuals(DatFile datFile, List<string> inputs)
|
|
|
|
|
{
|
2024-11-12 21:12:06 -05:00
|
|
|
List<ParentablePath> paths = inputs.ConvertAll(i => new ParentablePath(i));
|
2020-12-10 14:47:38 -08:00
|
|
|
return DiffIndividuals(datFile, paths);
|
2024-03-20 00:36:47 -04:00
|
|
|
//return DiffIndividualsDB(datFile, paths);
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-cascading diffs
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static List<DatFile> DiffIndividuals(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2023-04-19 16:39:58 -04:00
|
|
|
InternalStopwatch watch = new("Initializing all individual DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
2024-03-10 21:41:49 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
|
|
|
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
|
|
|
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.For(0, inputs.Count, j =>
|
|
|
|
|
#else
|
|
|
|
|
for (int j = 0; j < inputs.Count; j++)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
|
|
|
|
DatFile diffData = DatFile.Create(datFile.Header);
|
2024-03-11 15:46:44 -04:00
|
|
|
diffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
|
|
|
|
|
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
|
|
|
|
|
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
|
2024-03-19 18:40:24 -04:00
|
|
|
diffData.ResetDictionary();
|
2020-12-10 14:47:38 -08:00
|
|
|
outDatsArray[j] = diffData;
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Create a list of DatData objects representing individual output files
|
2024-02-28 19:19:50 -05:00
|
|
|
List<DatFile> outDats = [.. outDatsArray];
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating all individual DATs");
|
|
|
|
|
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datFile.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in datFile.Items.Keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2025-01-07 15:40:05 -05:00
|
|
|
List<DatItem> items = Merge(datFile.Items[key]);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
if (items == null || items.Count == 0)
|
2024-02-29 00:14:16 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
return;
|
2024-02-29 00:14:16 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
{
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.GetFieldValue<Source?>(DatItem.SourceKey) == null)
|
2024-02-28 19:19:50 -05:00
|
|
|
continue;
|
|
|
|
|
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-10 16:49:07 -04:00
|
|
|
if ((item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
2024-02-28 22:54:56 -05:00
|
|
|
#endif
|
2024-03-10 16:49:07 -04:00
|
|
|
outDats[item.GetFieldValue<Source?>(DatItem.SourceKey)!.Index].Items.Add(key, item);
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
2024-02-28 19:19:50 -05:00
|
|
|
return [.. outDats];
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-cascading diffs
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static List<DatFile> DiffIndividualsDB(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
|
|
|
|
InternalStopwatch watch = new("Initializing all individual DATs");
|
|
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
|
|
|
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
|
|
|
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, j =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.For(0, inputs.Count, j =>
|
|
|
|
|
#else
|
|
|
|
|
for (int j = 0; j < inputs.Count; j++)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
|
|
|
|
DatFile diffData = DatFile.Create(datFile.Header);
|
|
|
|
|
diffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, diffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + innerpost);
|
|
|
|
|
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + innerpost);
|
|
|
|
|
diffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, diffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + innerpost);
|
|
|
|
|
diffData.ResetDictionary();
|
|
|
|
|
outDatsArray[j] = diffData;
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
// Create a list of DatData objects representing individual output files
|
|
|
|
|
List<DatFile> outDats = [.. outDatsArray];
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating all individual DATs");
|
|
|
|
|
|
|
|
|
|
// Get all current items, machines, and mappings
|
2024-12-06 23:16:09 -05:00
|
|
|
var datItems = datFile.ItemsDB.GetItems();
|
|
|
|
|
var machines = datFile.ItemsDB.GetMachines();
|
|
|
|
|
var sources = datFile.ItemsDB.GetSources();
|
|
|
|
|
var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings();
|
|
|
|
|
var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings();
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:29:59 -04:00
|
|
|
// Create mappings from old index to new index
|
2024-03-20 00:36:47 -04:00
|
|
|
var machineRemapping = new Dictionary<long, long>();
|
2024-03-20 01:29:59 -04:00
|
|
|
var sourceRemapping = new Dictionary<long, long>();
|
|
|
|
|
|
|
|
|
|
// Loop through and add all sources
|
|
|
|
|
foreach (var source in sources)
|
|
|
|
|
{
|
|
|
|
|
long newSourceIndex = outDats[0].ItemsDB.AddSource(source.Value);
|
|
|
|
|
sourceRemapping[source.Key] = newSourceIndex;
|
|
|
|
|
|
|
|
|
|
for (int i = 1; i < outDats.Count; i++)
|
|
|
|
|
{
|
|
|
|
|
_ = outDats[i].ItemsDB.AddSource(source.Value);
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Loop through and add all machines
|
|
|
|
|
foreach (var machine in machines)
|
|
|
|
|
{
|
|
|
|
|
long newMachineIndex = outDats[0].ItemsDB.AddMachine(machine.Value);
|
|
|
|
|
machineRemapping[machine.Key] = newMachineIndex;
|
|
|
|
|
|
|
|
|
|
for (int i = 1; i < outDats.Count; i++)
|
|
|
|
|
{
|
|
|
|
|
_ = outDats[i].ItemsDB.AddMachine(machine.Value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datItems, item =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var item in datItems)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
2024-03-20 01:29:59 -04:00
|
|
|
// Get the machine and source index for this item
|
|
|
|
|
long machineIndex = itemMachineMappings[item.Key];
|
|
|
|
|
long sourceIndex = itemSourceMappings[item.Key];
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:50:08 -04:00
|
|
|
// Get the source associated with the item
|
|
|
|
|
var source = datFile.ItemsDB.GetSource(sourceIndex);
|
|
|
|
|
if (source == null)
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-20 00:36:47 -04:00
|
|
|
if ((item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
|
|
|
|
#else
|
|
|
|
|
if (item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
|
|
|
|
#endif
|
2024-03-20 01:50:08 -04:00
|
|
|
outDats[source.Index].ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return [.. outDats];
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffNoDuplicates(DatFile datFile, List<string> inputs)
|
|
|
|
|
{
|
2024-11-12 21:12:06 -05:00
|
|
|
List<ParentablePath> paths = inputs.ConvertAll(i => new ParentablePath(i));
|
2020-12-10 14:47:38 -08:00
|
|
|
return DiffNoDuplicates(datFile, paths);
|
2024-03-20 00:36:47 -04:00
|
|
|
//return DiffNoDuplicatesDB(datFile, paths);
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffNoDuplicates(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2023-04-19 16:39:58 -04:00
|
|
|
InternalStopwatch watch = new("Initializing no duplicate DAT");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
2024-03-10 21:41:49 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
2024-03-10 04:10:37 -04:00
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
string post = " (No Duplicates)";
|
|
|
|
|
DatFile outerDiffData = DatFile.Create(datFile.Header);
|
2024-03-11 15:46:44 -04:00
|
|
|
outerDiffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
|
|
|
|
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
|
|
|
|
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
2024-03-19 18:40:24 -04:00
|
|
|
outerDiffData.ResetDictionary();
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating no duplicate DAT");
|
|
|
|
|
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datFile.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in datFile.Items.Keys)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2025-01-07 15:40:05 -05:00
|
|
|
List<DatItem> items = Merge(datFile.Items[key]);
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
if (items == null || items.Count == 0)
|
2024-02-29 00:14:16 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
return;
|
2024-02-29 00:14:16 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
{
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-10 16:49:07 -04:00
|
|
|
if ((item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.Internal) != 0 || item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) || item.GetFieldValue<DupeType>(DatItem.DupeTypeKey) == 0x00)
|
2024-02-28 22:54:56 -05:00
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
2024-03-10 16:49:07 -04:00
|
|
|
if (item.Clone() is not DatItem newrom || newrom.GetFieldValue<Source?>(DatItem.SourceKey) == null)
|
2024-02-28 19:19:50 -05:00
|
|
|
continue;
|
|
|
|
|
|
2024-03-11 15:46:44 -04:00
|
|
|
newrom.GetFieldValue<Machine>(DatItem.MachineKey)!.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, newrom.GetFieldValue<Machine>(DatItem.MachineKey)!.GetStringFieldValue(Models.Metadata.Machine.NameKey) + $" ({Path.GetFileNameWithoutExtension(inputs[newrom.GetFieldValue<Source?>(DatItem.SourceKey)!.Index].CurrentPath)})");
|
2020-12-10 14:47:38 -08:00
|
|
|
outerDiffData.Items.Add(key, newrom);
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return outerDiffData;
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Output non-duplicate item diff
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
|
|
|
public static DatFile DiffNoDuplicatesDB(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
2024-03-20 10:42:27 -04:00
|
|
|
var watch = new InternalStopwatch("Initializing no duplicate DAT");
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(DatHeader.FileNameKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrEmpty(datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)))
|
|
|
|
|
datFile.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, "All DATs");
|
|
|
|
|
|
|
|
|
|
string post = " (No Duplicates)";
|
|
|
|
|
DatFile outerDiffData = DatFile.Create(datFile.Header);
|
|
|
|
|
outerDiffData.Header.SetFieldValue<string?>(DatHeader.FileNameKey, outerDiffData.Header.GetStringFieldValue(DatHeader.FileNameKey) + post);
|
|
|
|
|
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.NameKey) + post);
|
|
|
|
|
outerDiffData.Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, outerDiffData.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey) + post);
|
|
|
|
|
outerDiffData.ResetDictionary();
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
watch.Start("Populating no duplicate DAT");
|
|
|
|
|
|
|
|
|
|
// Get all current items, machines, and mappings
|
2024-12-06 23:16:09 -05:00
|
|
|
var datItems = datFile.ItemsDB.GetItems();
|
|
|
|
|
var machines = datFile.ItemsDB.GetMachines();
|
|
|
|
|
var sources = datFile.ItemsDB.GetSources();
|
|
|
|
|
var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings();
|
|
|
|
|
var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings();
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:29:59 -04:00
|
|
|
// Create mappings from old index to new index
|
2024-03-20 00:36:47 -04:00
|
|
|
var machineRemapping = new Dictionary<long, long>();
|
2024-03-20 01:29:59 -04:00
|
|
|
var sourceRemapping = new Dictionary<long, long>();
|
|
|
|
|
|
|
|
|
|
// Loop through and add all sources
|
|
|
|
|
foreach (var source in sources)
|
|
|
|
|
{
|
|
|
|
|
long newSourceIndex = outerDiffData.ItemsDB.AddSource(source.Value);
|
|
|
|
|
sourceRemapping[source.Key] = newSourceIndex;
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Loop through and add all machines
|
|
|
|
|
foreach (var machine in machines)
|
|
|
|
|
{
|
2024-03-20 10:42:27 -04:00
|
|
|
long newMachineIndex = outerDiffData.ItemsDB.AddMachine(machine.Value);
|
2024-03-20 00:36:47 -04:00
|
|
|
machineRemapping[machine.Key] = newMachineIndex;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datItems, item =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var item in datItems)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
2024-03-20 01:29:59 -04:00
|
|
|
// Get the machine and source index for this item
|
|
|
|
|
long machineIndex = itemMachineMappings[item.Key];
|
|
|
|
|
long sourceIndex = itemSourceMappings[item.Key];
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 10:42:27 -04:00
|
|
|
// If the current item isn't a duplicate
|
2024-12-28 20:15:32 -05:00
|
|
|
#if NET20 || NET35
|
2024-03-20 10:42:27 -04:00
|
|
|
if ((item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.Internal) == 0 && item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) != 0x00)
|
2024-03-20 00:36:47 -04:00
|
|
|
#else
|
2024-03-20 10:42:27 -04:00
|
|
|
if (!item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.Internal) && item.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) != 0x00)
|
2024-03-20 00:36:47 -04:00
|
|
|
#endif
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-03-20 10:42:27 -04:00
|
|
|
return;
|
2024-03-20 00:36:47 -04:00
|
|
|
#else
|
2024-03-20 10:42:27 -04:00
|
|
|
continue;
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
// Get the current source and machine
|
|
|
|
|
var currentSource = sources[sourceIndex];
|
|
|
|
|
string? currentMachineName = machines[machineIndex].GetStringFieldValue(Models.Metadata.Machine.NameKey);
|
|
|
|
|
var currentMachine = datFile.ItemsDB.GetMachine(currentMachineName);
|
2024-12-06 23:16:09 -05:00
|
|
|
if (currentMachine.Value == null)
|
2024-03-20 10:42:27 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
return;
|
|
|
|
|
#else
|
|
|
|
|
continue;
|
2024-03-20 00:36:47 -04:00
|
|
|
#endif
|
|
|
|
|
|
2024-03-20 10:42:27 -04:00
|
|
|
// Get the source-specific machine
|
|
|
|
|
string? renamedMachineName = $"{currentMachineName} ({Path.GetFileNameWithoutExtension(inputs[currentSource!.Index].CurrentPath)})";
|
|
|
|
|
var renamedMachine = datFile.ItemsDB.GetMachine(renamedMachineName);
|
2024-12-06 23:16:09 -05:00
|
|
|
if (renamedMachine.Value == null)
|
2024-03-20 10:42:27 -04:00
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
var newMachine = currentMachine.Value.Clone() as Machine;
|
2024-03-20 10:42:27 -04:00
|
|
|
newMachine!.SetFieldValue<string?>(Models.Metadata.Machine.NameKey, renamedMachineName);
|
|
|
|
|
long newMachineIndex = outerDiffData.ItemsDB.AddMachine(newMachine);
|
2024-12-06 23:16:09 -05:00
|
|
|
renamedMachine = new KeyValuePair<long, Machine?>(newMachineIndex, newMachine);
|
2024-03-20 00:36:47 -04:00
|
|
|
}
|
2024-03-20 10:42:27 -04:00
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
outerDiffData.ItemsDB.AddItem(item.Value, renamedMachine.Key, sourceRemapping[sourceIndex], statsOnly: false);
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
return outerDiffData;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-07 15:40:05 -05:00
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region Population
|
|
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Populate from multiple paths while returning the invividual headers
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
|
|
|
|
/// <returns>List of DatHeader objects representing headers</returns>
|
|
|
|
|
public static List<DatHeader> PopulateUserData(DatFile datFile, List<string> inputs)
|
|
|
|
|
{
|
2024-11-12 21:12:06 -05:00
|
|
|
List<ParentablePath> paths = inputs.ConvertAll(i => new ParentablePath(i));
|
2020-12-10 14:47:38 -08:00
|
|
|
return PopulateUserData(datFile, paths);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Populate from multiple paths while returning the invividual headers
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
|
|
|
|
/// <returns>List of DatHeader objects representing headers</returns>
|
|
|
|
|
public static List<DatHeader> PopulateUserData(DatFile datFile, List<ParentablePath> inputs)
|
|
|
|
|
{
|
|
|
|
|
DatFile[] datFiles = new DatFile[inputs.Count];
|
2023-04-19 16:39:58 -04:00
|
|
|
InternalStopwatch watch = new("Processing individual DATs");
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
// Parse all of the DATs into their own DatFiles in the array
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.For(0, inputs.Count, Core.Globals.ParallelOptions, i =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.For(0, inputs.Count, i =>
|
|
|
|
|
#else
|
|
|
|
|
for (int i = 0; i < inputs.Count; i++)
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
{
|
|
|
|
|
var input = inputs[i];
|
2025-01-08 16:59:44 -05:00
|
|
|
_staticLogger.User($"Adding DAT: {input.CurrentPath}");
|
2020-12-10 14:47:38 -08:00
|
|
|
datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
|
|
|
|
|
Parser.ParseInto(datFiles[i], input, i, keep: true);
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2020-12-10 14:47:38 -08:00
|
|
|
});
|
2024-02-28 22:54:56 -05:00
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2020-12-10 14:47:38 -08:00
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
watch.Start("Populating internal DAT");
|
|
|
|
|
for (int i = 0; i < inputs.Count; i++)
|
|
|
|
|
{
|
|
|
|
|
AddFromExisting(datFile, datFiles[i], true);
|
2024-03-20 00:36:47 -04:00
|
|
|
//AddFromExistingDB(datFile, datFiles[i], true);
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
2024-11-12 21:12:06 -05:00
|
|
|
return [.. Array.ConvertAll(datFiles, d => d.Header)];
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
2024-02-28 22:54:56 -05:00
|
|
|
|
2020-12-10 14:47:38 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Add items from another DatFile to the existing DatFile
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="addTo">DatFile to add to</param>
|
|
|
|
|
/// <param name="addFrom">DatFile to add from</param>
|
|
|
|
|
/// <param name="delete">If items should be deleted from the source DatFile</param>
|
|
|
|
|
private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
|
|
|
|
|
{
|
|
|
|
|
// Get the list of keys from the DAT
|
2024-10-19 21:41:08 -04:00
|
|
|
List<string> keys = [.. addFrom.Items.Keys];
|
2020-12-10 14:47:38 -08:00
|
|
|
foreach (string key in keys)
|
|
|
|
|
{
|
|
|
|
|
// Add everything from the key to the internal DAT
|
|
|
|
|
addTo.Items.AddRange(key, addFrom.Items[key]);
|
|
|
|
|
|
|
|
|
|
// Now remove the key from the source DAT
|
|
|
|
|
if (delete)
|
|
|
|
|
addFrom.Items.Remove(key);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now remove the file dictionary from the source DAT
|
|
|
|
|
if (delete)
|
2024-03-19 18:40:24 -04:00
|
|
|
addFrom.ResetDictionary();
|
2020-12-10 14:47:38 -08:00
|
|
|
}
|
2021-02-02 14:09:49 -08:00
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
/// <summary>
|
|
|
|
|
/// Add items from another DatFile to the existing DatFile
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="addTo">DatFile to add to</param>
|
|
|
|
|
/// <param name="addFrom">DatFile to add from</param>
|
|
|
|
|
/// <param name="delete">If items should be deleted from the source DatFile</param>
|
|
|
|
|
private static void AddFromExistingDB(DatFile addTo, DatFile addFrom, bool delete = false)
|
|
|
|
|
{
|
|
|
|
|
// Get all current items, machines, and mappings
|
2024-12-06 23:16:09 -05:00
|
|
|
var datItems = addFrom.ItemsDB.GetItems();
|
|
|
|
|
var machines = addFrom.ItemsDB.GetMachines();
|
|
|
|
|
var sources = addFrom.ItemsDB.GetSources();
|
|
|
|
|
var itemMachineMappings = addFrom.ItemsDB.GetItemMachineMappings();
|
|
|
|
|
var itemSourceMappings = addFrom.ItemsDB.GetItemSourceMappings();
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:29:59 -04:00
|
|
|
// Create mappings from old index to new index
|
2024-03-20 00:36:47 -04:00
|
|
|
var machineRemapping = new Dictionary<long, long>();
|
2024-03-20 01:29:59 -04:00
|
|
|
var sourceRemapping = new Dictionary<long, long>();
|
|
|
|
|
|
|
|
|
|
// Loop through and add all sources
|
|
|
|
|
foreach (var source in sources)
|
|
|
|
|
{
|
|
|
|
|
long newSourceIndex = addTo.ItemsDB.AddSource(source.Value);
|
|
|
|
|
sourceRemapping[source.Key] = newSourceIndex;
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Loop through and add all machines
|
|
|
|
|
foreach (var machine in machines)
|
|
|
|
|
{
|
|
|
|
|
long newMachineIndex = addTo.ItemsDB.AddMachine(machine.Value);
|
|
|
|
|
machineRemapping[machine.Key] = newMachineIndex;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datItems, item =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var item in datItems)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
2024-03-20 01:29:59 -04:00
|
|
|
// Get the machine and source index for this item
|
|
|
|
|
long machineIndex = itemMachineMappings[item.Key];
|
|
|
|
|
long sourceIndex = itemSourceMappings[item.Key];
|
|
|
|
|
|
|
|
|
|
addTo.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
|
2024-03-20 10:42:27 -04:00
|
|
|
|
2024-03-20 00:36:47 -04:00
|
|
|
// Now remove the key from the source DAT
|
|
|
|
|
if (delete)
|
|
|
|
|
addFrom.ItemsDB.RemoveItem(item.Key);
|
|
|
|
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
// Now remove the file dictionary from the source DAT
|
|
|
|
|
if (delete)
|
|
|
|
|
addFrom.ResetDictionary();
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-02 14:09:49 -08:00
|
|
|
/// <summary>
|
|
|
|
|
/// Fill a DatFile with all items with a particular source index ID
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="indexDat">DatFile to add found items to</param>
|
|
|
|
|
/// <param name="index">Source index ID to retrieve items for</param>
|
|
|
|
|
/// <returns>DatFile containing all items with the source index ID/returns>
|
|
|
|
|
private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
|
|
|
|
|
{
|
|
|
|
|
// Loop through and add the items for this index to the output
|
2024-02-28 22:54:56 -05:00
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datFile.Items.Keys, Core.Globals.ParallelOptions, key =>
|
2024-02-28 22:54:56 -05:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datFile.Items.Keys, key =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var key in datFile.Items.Keys)
|
|
|
|
|
#endif
|
2021-02-02 14:09:49 -08:00
|
|
|
{
|
2025-01-07 15:40:05 -05:00
|
|
|
List<DatItem> items = Merge(datFile.Items[key]);
|
2021-02-02 14:09:49 -08:00
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
if (items == null || items.Count == 0)
|
2024-02-29 00:14:16 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2021-02-02 14:09:49 -08:00
|
|
|
return;
|
2024-02-29 00:14:16 -05:00
|
|
|
#else
|
|
|
|
|
continue;
|
|
|
|
|
#endif
|
2021-02-02 14:09:49 -08:00
|
|
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
{
|
2024-10-24 05:26:05 -04:00
|
|
|
var source = item.GetFieldValue<Source?>(DatItem.SourceKey);
|
|
|
|
|
if (source != null && source.Index == index)
|
2021-02-02 14:09:49 -08:00
|
|
|
indexDat.Items.Add(key, item);
|
|
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2021-02-02 14:09:49 -08:00
|
|
|
});
|
2024-02-28 21:59:13 -05:00
|
|
|
#else
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Fill a DatFile with all items with a particular source index ID
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
|
|
|
|
/// <param name="indexDat">DatFile to add found items to</param>
|
|
|
|
|
/// <param name="index">Source index ID to retrieve items for</param>
|
|
|
|
|
/// <returns>DatFile containing all items with the source index ID/returns>
|
|
|
|
|
private static void FillWithSourceIndexDB(DatFile datFile, DatFile indexDat, int index)
|
|
|
|
|
{
|
|
|
|
|
// Get all current items, machines, and mappings
|
2024-12-06 23:16:09 -05:00
|
|
|
var datItems = datFile.ItemsDB.GetItems();
|
|
|
|
|
var machines = datFile.ItemsDB.GetMachines();
|
|
|
|
|
var sources = datFile.ItemsDB.GetSources();
|
|
|
|
|
var itemMachineMappings = datFile.ItemsDB.GetItemMachineMappings();
|
|
|
|
|
var itemSourceMappings = datFile.ItemsDB.GetItemSourceMappings();
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:29:59 -04:00
|
|
|
// Create mappings from old index to new index
|
2024-03-20 00:36:47 -04:00
|
|
|
var machineRemapping = new Dictionary<long, long>();
|
2024-03-20 01:29:59 -04:00
|
|
|
var sourceRemapping = new Dictionary<long, long>();
|
|
|
|
|
|
|
|
|
|
// Loop through and add all sources
|
|
|
|
|
foreach (var source in sources)
|
|
|
|
|
{
|
|
|
|
|
long newSourceIndex = indexDat.ItemsDB.AddSource(source.Value);
|
|
|
|
|
sourceRemapping[source.Key] = newSourceIndex;
|
|
|
|
|
}
|
2024-03-20 00:36:47 -04:00
|
|
|
|
|
|
|
|
// Loop through and add all machines
|
|
|
|
|
foreach (var machine in machines)
|
|
|
|
|
{
|
|
|
|
|
long newMachineIndex = indexDat.ItemsDB.AddMachine(machine.Value);
|
|
|
|
|
machineRemapping[machine.Key] = newMachineIndex;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
Parallel.ForEach(datItems, Core.Globals.ParallelOptions, item =>
|
2024-03-20 00:36:47 -04:00
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
Parallel.ForEach(datItems, item =>
|
|
|
|
|
#else
|
|
|
|
|
foreach (var item in datItems)
|
|
|
|
|
#endif
|
|
|
|
|
{
|
2024-03-20 01:29:59 -04:00
|
|
|
// Get the machine and source index for this item
|
|
|
|
|
long machineIndex = itemMachineMappings[item.Key];
|
|
|
|
|
long sourceIndex = itemSourceMappings[item.Key];
|
2024-03-20 00:36:47 -04:00
|
|
|
|
2024-03-20 01:50:08 -04:00
|
|
|
// Get the source associated with the item
|
|
|
|
|
var source = datFile.ItemsDB.GetSource(sourceIndex);
|
|
|
|
|
|
|
|
|
|
if (source != null && source.Index == index)
|
2024-03-20 01:29:59 -04:00
|
|
|
indexDat.ItemsDB.AddItem(item.Value, machineRemapping[machineIndex], sourceRemapping[sourceIndex], statsOnly: false);
|
2024-03-20 00:36:47 -04:00
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
});
|
|
|
|
|
#else
|
|
|
|
|
}
|
2024-02-28 21:59:13 -05:00
|
|
|
#endif
|
2021-02-02 14:09:49 -08:00
|
|
|
}
|
2025-01-07 15:40:05 -05:00
|
|
|
|
|
|
|
|
#endregion
|
2020-12-10 10:39:39 -08:00
|
|
|
}
|
|
|
|
|
}
|