2025-01-14 22:10:28 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-03-12 23:27:23 -04:00
|
|
|
|
using System.Collections.Concurrent;
|
|
|
|
|
|
#endif
|
2024-03-13 02:10:34 -04:00
|
|
|
|
using System.Collections.Generic;
|
2024-03-13 10:14:04 -04:00
|
|
|
|
using System.IO;
|
2024-03-13 02:10:34 -04:00
|
|
|
|
using System.Linq;
|
2024-03-13 02:44:04 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-14 13:34:05 -05:00
|
|
|
|
using System.Threading;
|
2024-03-13 02:44:04 -04:00
|
|
|
|
using System.Threading.Tasks;
|
|
|
|
|
|
#endif
|
2022-11-03 15:54:00 -07:00
|
|
|
|
using System.Xml.Serialization;
|
|
|
|
|
|
using Newtonsoft.Json;
|
2024-03-13 10:14:04 -04:00
|
|
|
|
using SabreTools.Core.Tools;
|
2022-11-03 15:54:00 -07:00
|
|
|
|
using SabreTools.DatItems;
|
2024-03-13 02:44:04 -04:00
|
|
|
|
using SabreTools.DatItems.Formats;
|
|
|
|
|
|
using SabreTools.Hashing;
|
2025-01-14 16:04:30 -05:00
|
|
|
|
using SabreTools.IO.Logging;
|
2024-10-19 11:43:11 -04:00
|
|
|
|
using SabreTools.Matching.Compare;
|
2022-11-03 15:54:00 -07:00
|
|
|
|
|
2024-03-19 11:02:36 -04:00
|
|
|
|
/*
|
|
|
|
|
|
* Planning Notes:
|
|
|
|
|
|
*
|
|
|
|
|
|
* In order for this in-memory "database" design to work, there need to be a few things:
|
|
|
|
|
|
* - Feature parity with all existing item dictionary operations
|
|
|
|
|
|
* - A way to transition between the two item dictionaries (a flag?)
|
|
|
|
|
|
* - Helper methods that target the "database" version instead of assuming the standard dictionary
|
|
|
|
|
|
*
|
|
|
|
|
|
* Notable changes include:
|
|
|
|
|
|
* - Separation of Machine from DatItem, leading to a mapping instead
|
|
|
|
|
|
* + Should DatItem include an index reference to the machine? Or should that be all external?
|
|
|
|
|
|
* - Adding machines to the dictionary distinctly from the items
|
|
|
|
|
|
* - Having a separate "bucketing" system that only reorders indicies and not full items; quicker?
|
|
|
|
|
|
* - Non-key-based add/remove of values; use explicit methods instead of dictionary-style accessors
|
|
|
|
|
|
*/
|
|
|
|
|
|
|
2022-11-03 15:54:00 -07:00
|
|
|
|
namespace SabreTools.DatFiles
|
|
|
|
|
|
{
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Item dictionary with statistics, bucketing, and sorting
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonObject("items"), XmlRoot("items")]
|
2024-03-13 01:44:15 -04:00
|
|
|
|
public class ItemDictionaryDB
|
2022-11-03 15:54:00 -07:00
|
|
|
|
{
|
|
|
|
|
|
#region Private instance variables
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2024-03-12 23:27:23 -04:00
|
|
|
|
/// Internal dictionary for all items
|
2022-11-03 15:54:00 -07:00
|
|
|
|
/// </summary>
|
2024-03-13 02:10:34 -04:00
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2024-03-12 23:27:23 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-10-19 22:39:23 -04:00
|
|
|
|
private readonly ConcurrentDictionary<long, DatItem> _items = [];
|
2024-03-12 23:27:23 -04:00
|
|
|
|
#else
|
2024-03-13 02:10:34 -04:00
|
|
|
|
private readonly Dictionary<long, DatItem> _items = [];
|
2024-03-12 23:27:23 -04:00
|
|
|
|
#endif
|
2022-11-03 15:54:00 -07:00
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Current highest available item index
|
|
|
|
|
|
/// </summary>
|
2024-03-13 02:44:04 -04:00
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2024-03-13 02:10:34 -04:00
|
|
|
|
private long _itemIndex = 0;
|
|
|
|
|
|
|
2022-11-03 15:54:00 -07:00
|
|
|
|
/// <summary>
|
2024-03-12 23:27:23 -04:00
|
|
|
|
/// Internal dictionary for all machines
|
2022-11-03 15:54:00 -07:00
|
|
|
|
/// </summary>
|
2024-03-13 02:10:34 -04:00
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2024-03-12 23:27:23 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-10-19 22:39:23 -04:00
|
|
|
|
private readonly ConcurrentDictionary<long, Machine> _machines = [];
|
2024-03-12 23:27:23 -04:00
|
|
|
|
#else
|
2024-03-13 02:10:34 -04:00
|
|
|
|
private readonly Dictionary<long, Machine> _machines = [];
|
2024-03-13 01:41:18 -04:00
|
|
|
|
#endif
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Current highest available machine index
|
|
|
|
|
|
/// </summary>
|
2024-03-13 02:44:04 -04:00
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2024-03-13 02:10:34 -04:00
|
|
|
|
private long _machineIndex = 0;
|
|
|
|
|
|
|
2024-03-20 00:59:47 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Internal dictionary for all sources
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-10-19 22:39:23 -04:00
|
|
|
|
private readonly ConcurrentDictionary<long, Source> _sources = [];
|
2024-03-20 00:59:47 -04:00
|
|
|
|
#else
|
|
|
|
|
|
private readonly Dictionary<long, Source> _sources = [];
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Current highest available source index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
private long _sourceIndex = 0;
|
|
|
|
|
|
|
2024-03-13 01:41:18 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Internal dictionary for item to machine mappings
|
|
|
|
|
|
/// </summary>
|
2024-03-13 02:10:34 -04:00
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2024-03-13 01:41:18 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-14 21:02:37 -05:00
|
|
|
|
private readonly ConcurrentDictionary<long, long> _itemToMachineMapping = [];
|
2024-02-28 22:54:56 -05:00
|
|
|
|
#else
|
2025-01-14 21:02:37 -05:00
|
|
|
|
private readonly Dictionary<long, long> _itemToMachineMapping = [];
|
2024-02-28 22:54:56 -05:00
|
|
|
|
#endif
|
2022-11-03 15:54:00 -07:00
|
|
|
|
|
2024-03-20 00:59:47 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Internal dictionary for item to source mappings
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2024-10-19 22:39:23 -04:00
|
|
|
|
private readonly ConcurrentDictionary<long, long> _itemToSourceMapping = [];
|
2024-03-20 00:59:47 -04:00
|
|
|
|
#else
|
|
|
|
|
|
private readonly Dictionary<long, long> _itemToSourceMapping = [];
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
2024-03-13 02:44:04 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Internal dictionary representing the current buckets
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-14 21:02:37 -05:00
|
|
|
|
private readonly ConcurrentDictionary<string, List<long>> _buckets = [];
|
2024-03-13 02:44:04 -04:00
|
|
|
|
#else
|
2025-01-14 21:02:37 -05:00
|
|
|
|
private readonly Dictionary<string, List<long>> _buckets = [];
|
2024-03-13 02:44:04 -04:00
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Current bucketed by value
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private ItemKey _bucketedBy = ItemKey.NULL;
|
2024-03-13 02:10:34 -04:00
|
|
|
|
|
2025-01-14 16:04:30 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Logging object
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private readonly Logger _logger;
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
#region Fields
|
|
|
|
|
|
|
2024-03-19 15:21:01 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the keys in sorted order from the file dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <returns>List of the keys in sorted order</returns>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
public string[] SortedKeys
|
|
|
|
|
|
{
|
|
|
|
|
|
get
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> keys = [.. _buckets.Keys];
|
|
|
|
|
|
keys.Sort(new NaturalComparer());
|
2024-10-19 21:41:08 -04:00
|
|
|
|
return [.. keys];
|
2024-03-19 15:21:01 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// DAT statistics
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
public DatStatistics DatStatistics { get; } = new DatStatistics();
|
|
|
|
|
|
|
2022-11-03 15:54:00 -07:00
|
|
|
|
#endregion
|
2024-03-13 02:10:34 -04:00
|
|
|
|
|
2025-01-14 16:04:30 -05:00
|
|
|
|
#region Constructors
|
|
|
|
|
|
|
2024-03-19 16:10:00 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Generic constructor
|
|
|
|
|
|
/// </summary>
|
2025-01-14 16:04:30 -05:00
|
|
|
|
public ItemDictionaryDB()
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger = new Logger(this);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
2024-03-19 16:10:00 -04:00
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
#region Accessors
|
|
|
|
|
|
|
2024-03-19 11:12:04 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a DatItem to the dictionary after validation
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">Item data to validate</param>
|
|
|
|
|
|
/// <param name="machineIndex">Index of the machine related to the item</param>
|
2024-03-20 01:32:15 -04:00
|
|
|
|
/// <param name="sourceIndex">Index of the source related to the item</param>
|
|
|
|
|
|
/// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
|
2024-03-19 11:12:04 -04:00
|
|
|
|
/// <returns>The index for the added item, -1 on error</returns>
|
2024-03-20 01:32:15 -04:00
|
|
|
|
public long AddItem(DatItem item, long machineIndex, long sourceIndex, bool statsOnly)
|
2024-03-19 11:12:04 -04:00
|
|
|
|
{
|
2025-01-18 01:36:57 -05:00
|
|
|
|
// If we have a Disk, File, Media, or Rom, clean the hash data
|
2024-03-19 11:12:04 -04:00
|
|
|
|
if (item is Disk disk)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the file has aboslutely no hashes, skip and log
|
2025-05-11 22:55:38 -04:00
|
|
|
|
if (disk.GetStringFieldValue(Models.Metadata.Disk.StatusKey).AsItemStatus() != ItemStatus.Nodump
|
2024-03-19 11:12:04 -04:00
|
|
|
|
&& string.IsNullOrEmpty(disk.GetStringFieldValue(Models.Metadata.Disk.MD5Key))
|
|
|
|
|
|
&& string.IsNullOrEmpty(disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key)))
|
|
|
|
|
|
{
|
2025-01-14 16:04:30 -05:00
|
|
|
|
_logger.Verbose($"Incomplete entry for '{disk.GetName()}' will be output as nodump");
|
2024-03-19 11:12:04 -04:00
|
|
|
|
disk.SetFieldValue<string?>(Models.Metadata.Disk.StatusKey, ItemStatus.Nodump.AsStringValue());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
item = disk;
|
|
|
|
|
|
}
|
2025-01-18 01:36:57 -05:00
|
|
|
|
else if (item is DatItems.Formats.File file)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the file has aboslutely no hashes, skip and log
|
|
|
|
|
|
if (string.IsNullOrEmpty(file.CRC)
|
|
|
|
|
|
&& string.IsNullOrEmpty(file.MD5)
|
|
|
|
|
|
&& string.IsNullOrEmpty(file.SHA1)
|
|
|
|
|
|
&& string.IsNullOrEmpty(file.SHA256))
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.Verbose($"Incomplete entry for '{file.GetName()}' will be output as nodump");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
item = file;
|
|
|
|
|
|
}
|
2024-03-19 11:12:04 -04:00
|
|
|
|
else if (item is Media media)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the file has aboslutely no hashes, skip and log
|
|
|
|
|
|
if (string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.MD5Key))
|
|
|
|
|
|
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SHA1Key))
|
|
|
|
|
|
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SHA256Key))
|
|
|
|
|
|
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SpamSumKey)))
|
|
|
|
|
|
{
|
2025-01-14 16:04:30 -05:00
|
|
|
|
_logger.Verbose($"Incomplete entry for '{media.GetName()}' will be output as nodump");
|
2024-03-19 11:12:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
item = media;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item is Rom rom)
|
|
|
|
|
|
{
|
|
|
|
|
|
long? size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey);
|
|
|
|
|
|
|
|
|
|
|
|
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
|
2025-01-18 01:36:57 -05:00
|
|
|
|
if (size == null && !string.IsNullOrEmpty(rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key)))
|
2024-03-19 11:12:04 -04:00
|
|
|
|
{
|
|
|
|
|
|
// No-op, just catch it so it doesn't go further
|
2025-01-14 16:04:30 -05:00
|
|
|
|
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Entry with only SHA-1 found - '{rom.GetName()}'");
|
2024-03-19 11:12:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
|
|
|
|
|
|
else if ((size == 0 || size == null)
|
|
|
|
|
|
&& (string.IsNullOrEmpty(rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey)) || rom.HasZeroHash()))
|
|
|
|
|
|
{
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SizeKey, Constants.SizeZero.ToString());
|
2024-11-13 03:55:33 -05:00
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.CRCKey, ZeroHash.CRC32Str);
|
2025-01-09 05:26:36 -05:00
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD2Key, null); // ZeroHash.GetString(HashType.MD2)
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD4Key, null); // ZeroHash.GetString(HashType.MD4)
|
2024-11-13 03:55:33 -05:00
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD5Key, ZeroHash.MD5Str);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA1Key, ZeroHash.SHA1Str);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA256Key, null); // ZeroHash.SHA256Str;
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA384Key, null); // ZeroHash.SHA384Str;
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA512Key, null); // ZeroHash.SHA512Str;
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SpamSumKey, null); // ZeroHash.SpamSumStr;
|
2024-03-19 11:12:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the file has no size and it's not the above case, skip and log
|
2025-05-11 22:55:38 -04:00
|
|
|
|
else if (rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsItemStatus() != ItemStatus.Nodump && (size == 0 || size == null))
|
2024-03-19 11:12:04 -04:00
|
|
|
|
{
|
2025-01-14 16:04:30 -05:00
|
|
|
|
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Incomplete entry for '{rom.GetName()}' will be output as nodump");
|
2024-03-19 11:12:04 -04:00
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.StatusKey, ItemStatus.Nodump.AsStringValue());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the file has a size but aboslutely no hashes, skip and log
|
2025-05-11 22:55:38 -04:00
|
|
|
|
else if (rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsItemStatus() != ItemStatus.Nodump
|
2024-03-19 11:12:04 -04:00
|
|
|
|
&& size != null && size > 0
|
|
|
|
|
|
&& !rom.HasHashes())
|
|
|
|
|
|
{
|
2025-01-14 16:04:30 -05:00
|
|
|
|
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Incomplete entry for '{rom.GetName()}' will be output as nodump");
|
2024-03-19 11:12:04 -04:00
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.StatusKey, ItemStatus.Nodump.AsStringValue());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
item = rom;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 13:34:05 -05:00
|
|
|
|
// If only adding statistics, we add just item stats
|
2024-03-19 11:12:04 -04:00
|
|
|
|
if (statsOnly)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatStatistics.AddItemStatistics(item);
|
|
|
|
|
|
return -1;
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2024-03-20 00:59:47 -04:00
|
|
|
|
return AddItem(item, machineIndex, sourceIndex);
|
2024-03-19 11:12:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a machine, returning the insert index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public long AddMachine(Machine machine)
|
|
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
long index = Interlocked.Increment(ref _machineIndex) - 1;
|
|
|
|
|
|
_machines.TryAdd(index, machine);
|
|
|
|
|
|
return index;
|
|
|
|
|
|
#else
|
|
|
|
|
|
long index = _machineIndex++ - 1;
|
|
|
|
|
|
_machines[index] = machine;
|
|
|
|
|
|
return index;
|
|
|
|
|
|
#endif
|
2024-03-13 02:10:34 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-20 00:59:47 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a source, returning the insert index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public long AddSource(Source source)
|
|
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
long index = Interlocked.Increment(ref _sourceIndex) - 1;
|
|
|
|
|
|
_sources.TryAdd(index, source);
|
|
|
|
|
|
return index;
|
|
|
|
|
|
#else
|
|
|
|
|
|
long index = _sourceIndex++ - 1;
|
|
|
|
|
|
_sources[index] = source;
|
|
|
|
|
|
return index;
|
|
|
|
|
|
#endif
|
2024-03-20 00:59:47 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 11:11:59 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove all items marked for removal
|
|
|
|
|
|
/// </summary>
|
2025-01-14 22:07:05 -05:00
|
|
|
|
public void ClearMarked()
|
2024-03-13 11:11:59 -04:00
|
|
|
|
{
|
2025-01-14 22:10:28 -05:00
|
|
|
|
long[] itemIndices = [.. _items.Keys];
|
2024-03-13 11:11:59 -04:00
|
|
|
|
foreach (long itemIndex in itemIndices)
|
|
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-14 22:28:18 -05:00
|
|
|
|
if (!_items.TryGetValue(itemIndex, out var datItem) || datItem == null)
|
2025-01-14 13:34:05 -05:00
|
|
|
|
continue;
|
|
|
|
|
|
#else
|
2024-03-13 11:11:59 -04:00
|
|
|
|
var datItem = _items[itemIndex];
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#endif
|
|
|
|
|
|
|
2025-01-14 22:28:18 -05:00
|
|
|
|
if (datItem.GetBoolFieldValue(DatItem.RemoveKey) != true)
|
2024-03-13 11:11:59 -04:00
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
RemoveItem(itemIndex);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 23:38:56 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get all items and their indicies
|
|
|
|
|
|
/// </summary>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public IDictionary<long, DatItem> GetItems() => _items;
|
2024-03-19 23:35:29 -04:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the indices and items associated with a bucket name
|
|
|
|
|
|
/// </summary>
|
2025-01-13 13:22:48 -05:00
|
|
|
|
public Dictionary<long, DatItem> GetItemsForBucket(string? bucketName, bool filter = false)
|
2024-03-19 23:35:29 -04:00
|
|
|
|
{
|
2025-01-13 13:22:48 -05:00
|
|
|
|
if (bucketName == null)
|
|
|
|
|
|
return [];
|
|
|
|
|
|
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_buckets.TryGetValue(bucketName, out var itemIds))
|
|
|
|
|
|
return [];
|
|
|
|
|
|
#else
|
2024-03-19 23:35:29 -04:00
|
|
|
|
if (!_buckets.ContainsKey(bucketName))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return [];
|
2024-03-19 23:35:29 -04:00
|
|
|
|
|
|
|
|
|
|
var itemIds = _buckets[bucketName];
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#endif
|
2024-03-19 23:35:29 -04:00
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
|
var datItems = new Dictionary<long, DatItem>();
|
2024-03-20 00:59:47 -04:00
|
|
|
|
foreach (long itemId in itemIds)
|
|
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
// Ignore missing IDs
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_items.TryGetValue(itemId, out var datItem) || datItem == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
#else
|
2024-12-06 23:16:09 -05:00
|
|
|
|
if (!_items.ContainsKey(itemId))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
2025-01-14 13:34:05 -05:00
|
|
|
|
var datItem = _items[itemId];
|
|
|
|
|
|
if (datItem == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
if (!filter || datItem.GetBoolFieldValue(DatItem.RemoveKey) != true)
|
|
|
|
|
|
datItems[itemId] = datItem;
|
2024-03-20 00:59:47 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return datItems;
|
2024-03-20 00:59:47 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get a machine based on the index
|
|
|
|
|
|
/// </summary>
|
2024-03-13 19:41:10 -04:00
|
|
|
|
public Machine? GetMachine(long index)
|
2024-03-13 02:10:34 -04:00
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_machines.TryGetValue(index, out var machine))
|
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
|
|
return machine;
|
|
|
|
|
|
#else
|
2024-03-13 02:10:34 -04:00
|
|
|
|
if (!_machines.ContainsKey(index))
|
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
|
|
return _machines[index];
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#endif
|
2024-03-13 02:10:34 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 21:07:47 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get a machine based on the name
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <remarks>This assume that all machines have unique names</remarks>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public KeyValuePair<long, Machine?> GetMachine(string? name)
|
2024-03-19 21:07:47 -04:00
|
|
|
|
{
|
|
|
|
|
|
if (string.IsNullOrEmpty(name))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Machine?>(-1, null);
|
2024-03-19 21:07:47 -04:00
|
|
|
|
|
2025-05-02 16:05:08 -04:00
|
|
|
|
var machine = _machines.FirstOrDefault(m => m.Value.GetName() == name);
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Machine?>(machine.Key, machine.Value);
|
2024-03-19 21:07:47 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
2024-03-19 14:05:53 -04:00
|
|
|
|
/// Get the index and machine associated with an item index
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// </summary>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public KeyValuePair<long, Machine?> GetMachineForItem(long itemIndex)
|
2024-03-13 02:10:34 -04:00
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_itemToMachineMapping.TryGetValue(itemIndex, out long machineIndex))
|
|
|
|
|
|
return new KeyValuePair<long, Machine?>(-1, null);
|
|
|
|
|
|
|
|
|
|
|
|
if (!_machines.TryGetValue(machineIndex, out var machine))
|
|
|
|
|
|
return new KeyValuePair<long, Machine?>(-1, null);
|
|
|
|
|
|
|
|
|
|
|
|
return new KeyValuePair<long, Machine?>(machineIndex, machine);
|
|
|
|
|
|
#else
|
2024-03-13 02:10:34 -04:00
|
|
|
|
if (!_itemToMachineMapping.ContainsKey(itemIndex))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Machine?>(-1, null);
|
2024-03-13 02:10:34 -04:00
|
|
|
|
|
|
|
|
|
|
long machineIndex = _itemToMachineMapping[itemIndex];
|
|
|
|
|
|
if (!_machines.ContainsKey(machineIndex))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Machine?>(-1, null);
|
2024-03-13 02:10:34 -04:00
|
|
|
|
|
2025-01-14 13:34:05 -05:00
|
|
|
|
var machine = _machines[machineIndex];
|
|
|
|
|
|
return new KeyValuePair<long, Machine?>(machineIndex, machine);
|
|
|
|
|
|
#endif
|
2024-03-13 02:10:34 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 19:41:10 -04:00
|
|
|
|
/// <summary>
|
2024-03-19 23:38:56 -04:00
|
|
|
|
/// Get all machines and their indicies
|
2024-03-13 19:41:10 -04:00
|
|
|
|
/// </summary>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public IDictionary<long, Machine> GetMachines() => _machines;
|
2024-03-13 02:10:34 -04:00
|
|
|
|
|
2024-03-20 00:59:47 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get a source based on the index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public Source? GetSource(long index)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!_sources.ContainsKey(index))
|
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
|
|
return _sources[index];
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the index and source associated with an item index
|
|
|
|
|
|
/// </summary>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public KeyValuePair<long, Source?> GetSourceForItem(long itemIndex)
|
2024-03-20 00:59:47 -04:00
|
|
|
|
{
|
2025-01-16 14:56:50 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_itemToSourceMapping.TryGetValue(itemIndex, out long sourceIndex))
|
|
|
|
|
|
return new KeyValuePair<long, Source?>(-1, null);
|
|
|
|
|
|
|
|
|
|
|
|
if (!_sources.TryGetValue(sourceIndex, out var source))
|
|
|
|
|
|
return new KeyValuePair<long, Source?>(-1, null);
|
|
|
|
|
|
|
|
|
|
|
|
return new KeyValuePair<long, Source?>(sourceIndex, source);
|
|
|
|
|
|
#else
|
2024-03-20 00:59:47 -04:00
|
|
|
|
if (!_itemToSourceMapping.ContainsKey(itemIndex))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Source?>(-1, null);
|
2024-03-20 00:59:47 -04:00
|
|
|
|
|
|
|
|
|
|
long sourceIndex = _itemToSourceMapping[itemIndex];
|
|
|
|
|
|
if (!_sources.ContainsKey(sourceIndex))
|
2024-12-06 23:16:09 -05:00
|
|
|
|
return new KeyValuePair<long, Source?>(-1, null);
|
2024-03-20 00:59:47 -04:00
|
|
|
|
|
2025-01-16 14:56:50 -05:00
|
|
|
|
var source = _sources[sourceIndex];
|
|
|
|
|
|
return new KeyValuePair<long, Source?>(sourceIndex, source);
|
|
|
|
|
|
#endif
|
2024-03-20 00:59:47 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get all sources and their indicies
|
|
|
|
|
|
/// </summary>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
public IDictionary<long, Source> GetSources() => _sources;
|
2024-03-20 00:59:47 -04:00
|
|
|
|
|
2025-01-14 21:02:37 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remap an item to a new machine index without validation
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="itemIndex">Current item index</param>
|
|
|
|
|
|
/// <param name="machineIndex">New machine index</param>
|
|
|
|
|
|
public void RemapDatItemToMachine(long itemIndex, long machineIndex)
|
|
|
|
|
|
{
|
|
|
|
|
|
lock (_itemToMachineMapping)
|
|
|
|
|
|
{
|
|
|
|
|
|
_itemToMachineMapping[itemIndex] = machineIndex;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 22:07:05 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a key from the file dictionary if it exists
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="key">Key in the dictionary to remove</param>
|
|
|
|
|
|
public bool RemoveBucket(string key)
|
|
|
|
|
|
{
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-14 22:28:18 -05:00
|
|
|
|
bool removed = _buckets.TryRemove(key, out var list);
|
2025-01-14 22:07:05 -05:00
|
|
|
|
#else
|
|
|
|
|
|
if (!_buckets.ContainsKey(key))
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2025-01-14 22:28:18 -05:00
|
|
|
|
bool removed = true;
|
2025-01-14 22:07:05 -05:00
|
|
|
|
var list = _buckets[key];
|
|
|
|
|
|
_buckets.Remove(key);
|
|
|
|
|
|
#endif
|
2025-01-14 22:28:18 -05:00
|
|
|
|
if (list == null)
|
|
|
|
|
|
return removed;
|
|
|
|
|
|
|
|
|
|
|
|
foreach (var index in list)
|
|
|
|
|
|
{
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_items.TryGetValue(index, out var datItem) || datItem == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
#else
|
|
|
|
|
|
if (!_items.ContainsKey(index))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
var datItem = _items[index];
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
RemoveItem(index);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return removed;
|
2025-01-14 22:07:05 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove an item, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveItem(long itemIndex)
|
|
|
|
|
|
{
|
2025-01-16 15:37:11 -05:00
|
|
|
|
// If the key doesn't exist, return
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_items.TryRemove(itemIndex, out var datItem))
|
|
|
|
|
|
return false;
|
|
|
|
|
|
#else
|
2024-03-13 02:10:34 -04:00
|
|
|
|
if (!_items.ContainsKey(itemIndex))
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2025-01-16 15:37:11 -05:00
|
|
|
|
var datItem = _items[itemIndex];
|
2024-03-13 02:10:34 -04:00
|
|
|
|
_items.Remove(itemIndex);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
2025-01-16 15:37:11 -05:00
|
|
|
|
// Remove statistics, if possible
|
|
|
|
|
|
if (datItem != null)
|
|
|
|
|
|
DatStatistics.RemoveItemStatistics(datItem);
|
|
|
|
|
|
|
|
|
|
|
|
// Remove the machine mapping
|
2024-03-13 02:10:34 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-01-16 15:37:11 -05:00
|
|
|
|
_itemToMachineMapping.TryRemove(itemIndex, out _);
|
2024-03-13 02:10:34 -04:00
|
|
|
|
#else
|
2025-01-16 15:37:11 -05:00
|
|
|
|
if (_itemToMachineMapping.ContainsKey(itemIndex))
|
2024-03-13 02:10:34 -04:00
|
|
|
|
_itemToMachineMapping.Remove(itemIndex);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
2025-01-16 15:37:11 -05:00
|
|
|
|
// Remove the source mapping
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_itemToSourceMapping.TryRemove(itemIndex, out _);
|
|
|
|
|
|
#else
|
|
|
|
|
|
if (_itemToSourceMapping.ContainsKey(itemIndex))
|
|
|
|
|
|
_itemToSourceMapping.Remove(itemIndex);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
2024-03-13 02:10:34 -04:00
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a machine, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveMachine(long machineIndex)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!_machines.ContainsKey(machineIndex))
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_machines.TryRemove(machineIndex, out _);
|
|
|
|
|
|
#else
|
|
|
|
|
|
_machines.Remove(machineIndex);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
var itemIds = _itemToMachineMapping
|
|
|
|
|
|
.Where(mapping => mapping.Value == machineIndex)
|
|
|
|
|
|
.Select(mapping => mapping.Key);
|
|
|
|
|
|
|
|
|
|
|
|
foreach (long itemId in itemIds)
|
|
|
|
|
|
{
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_itemToMachineMapping.TryRemove(itemId, out _);
|
|
|
|
|
|
#else
|
|
|
|
|
|
_itemToMachineMapping.Remove(itemId);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 22:10:59 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a machine, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveMachine(string machineName)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (string.IsNullOrEmpty(machineName))
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2025-05-02 16:05:08 -04:00
|
|
|
|
var machine = _machines.FirstOrDefault(m => m.Value.GetName() == machineName);
|
2024-03-19 22:10:59 -04:00
|
|
|
|
return RemoveMachine(machine.Key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 12:53:38 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add an item, returning the insert index
|
|
|
|
|
|
/// </summary>
|
2025-01-12 23:56:48 -05:00
|
|
|
|
internal long AddItem(DatItem item, long machineIndex, long sourceIndex)
|
2024-03-19 12:53:38 -04:00
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
// Add the item with a new index
|
|
|
|
|
|
long index = Interlocked.Increment(ref _itemIndex) - 1;
|
|
|
|
|
|
_items.TryAdd(index, item);
|
|
|
|
|
|
|
|
|
|
|
|
// Add the machine mapping
|
|
|
|
|
|
_itemToMachineMapping.TryAdd(index, machineIndex);
|
|
|
|
|
|
|
|
|
|
|
|
// Add the source mapping
|
|
|
|
|
|
_itemToSourceMapping.TryAdd(index, sourceIndex);
|
|
|
|
|
|
#else
|
2024-03-19 12:53:38 -04:00
|
|
|
|
// Add the item with a new index
|
2025-01-14 13:34:05 -05:00
|
|
|
|
long index = _itemIndex++ - 1;
|
|
|
|
|
|
_items[index] = item;
|
2024-03-19 12:53:38 -04:00
|
|
|
|
|
|
|
|
|
|
// Add the machine mapping
|
2025-01-14 13:34:05 -05:00
|
|
|
|
_itemToMachineMapping[index] = machineIndex;
|
2024-03-19 12:53:38 -04:00
|
|
|
|
|
2024-03-20 00:59:47 -04:00
|
|
|
|
// Add the source mapping
|
2025-01-14 13:34:05 -05:00
|
|
|
|
_itemToSourceMapping[index] = sourceIndex;
|
|
|
|
|
|
#endif
|
2024-03-20 00:59:47 -04:00
|
|
|
|
|
2024-03-19 12:53:38 -04:00
|
|
|
|
// Add the item statistics
|
|
|
|
|
|
DatStatistics.AddItemStatistics(item);
|
|
|
|
|
|
|
|
|
|
|
|
// Add the item to the default bucket
|
2025-01-14 13:34:05 -05:00
|
|
|
|
PerformItemBucketing(index, _bucketedBy, lower: true, norename: true);
|
2024-03-19 12:53:38 -04:00
|
|
|
|
|
|
|
|
|
|
// Return the used index
|
2025-01-16 14:56:50 -05:00
|
|
|
|
return index;
|
2024-03-19 12:53:38 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:44:04 -04:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
#region Bucketing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Update the bucketing dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="bucketBy">ItemKey enum representing how to bucket the individual items</param>
|
|
|
|
|
|
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
|
|
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
|
|
|
|
|
/// <returns></returns>
|
2025-01-14 20:21:54 -05:00
|
|
|
|
public void BucketBy(ItemKey bucketBy, bool lower = true, bool norename = true)
|
2024-03-13 02:44:04 -04:00
|
|
|
|
{
|
2024-03-13 12:00:39 -04:00
|
|
|
|
// If the sorted type isn't the same, we want to sort the dictionary accordingly
|
|
|
|
|
|
if (_bucketedBy != bucketBy && bucketBy != ItemKey.NULL)
|
2025-01-14 16:04:30 -05:00
|
|
|
|
{
|
|
|
|
|
|
_logger.User($"Organizing roms by {bucketBy}");
|
2024-03-13 12:00:39 -04:00
|
|
|
|
PerformBucketing(bucketBy, lower, norename);
|
2025-01-14 16:04:30 -05:00
|
|
|
|
}
|
2024-03-13 02:44:04 -04:00
|
|
|
|
|
2025-01-14 20:21:54 -05:00
|
|
|
|
// Sort the dictionary to be consistent
|
|
|
|
|
|
_logger.User($"Sorting roms by {bucketBy}");
|
|
|
|
|
|
PerformSorting(norename);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2025-01-21 11:07:39 -05:00
|
|
|
|
/// Perform deduplication on the current sorted dictionary
|
2025-01-14 20:21:54 -05:00
|
|
|
|
/// </summary>
|
2025-01-21 11:07:39 -05:00
|
|
|
|
public void Deduplicate()
|
2025-01-14 20:21:54 -05:00
|
|
|
|
{
|
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2025-01-21 10:50:52 -05:00
|
|
|
|
Parallel.ForEach(SortedKeys, Core.Globals.ParallelOptions, key =>
|
2025-01-14 20:21:54 -05:00
|
|
|
|
#elif NET40_OR_GREATER
|
2025-01-21 10:50:52 -05:00
|
|
|
|
Parallel.ForEach(SortedKeys, key =>
|
2025-01-14 20:21:54 -05:00
|
|
|
|
#else
|
2025-01-21 10:50:52 -05:00
|
|
|
|
foreach (var key in SortedKeys)
|
2025-01-14 20:21:54 -05:00
|
|
|
|
#endif
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2025-01-21 10:50:52 -05:00
|
|
|
|
// Get the possibly unsorted list
|
|
|
|
|
|
List<KeyValuePair<long, DatItem>> sortedList = [.. GetItemsForBucket(key)];
|
2025-01-14 20:21:54 -05:00
|
|
|
|
|
2025-01-21 12:00:38 -05:00
|
|
|
|
// Sort and merge the list
|
2025-01-21 10:50:52 -05:00
|
|
|
|
Sort(ref sortedList, false);
|
2025-01-21 10:59:33 -05:00
|
|
|
|
sortedList = Merge(sortedList);
|
2025-01-21 10:50:52 -05:00
|
|
|
|
|
|
|
|
|
|
// Get all existing mappings
|
|
|
|
|
|
List<ItemMappings> currentMappings = sortedList.ConvertAll(item =>
|
|
|
|
|
|
{
|
|
|
|
|
|
return new ItemMappings(
|
|
|
|
|
|
item.Value,
|
|
|
|
|
|
GetMachineForItem(item.Key).Key,
|
|
|
|
|
|
GetSourceForItem(item.Key).Key
|
|
|
|
|
|
);
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Add the list back to the dictionary
|
|
|
|
|
|
RemoveBucket(key);
|
|
|
|
|
|
currentMappings.ForEach(map =>
|
2025-01-21 12:05:49 -05:00
|
|
|
|
AddItem(map.Item, map.MachineId, map.SourceId));
|
2025-01-14 20:21:54 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
});
|
|
|
|
|
|
#else
|
2024-03-13 12:00:39 -04:00
|
|
|
|
}
|
2025-01-14 20:21:54 -05:00
|
|
|
|
#endif
|
2024-03-13 12:00:39 -04:00
|
|
|
|
}
|
2024-03-13 02:44:04 -04:00
|
|
|
|
|
2025-05-02 19:54:23 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Return the duplicate status of two items
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="selfItem">Current DatItem</param>
|
|
|
|
|
|
/// <param name="selfSource">Source associated with this item</param>
|
|
|
|
|
|
/// <param name="lastItem">DatItem to check against</param>
|
|
|
|
|
|
/// <param name="lastSource">Source associated with the last item</param>
|
|
|
|
|
|
/// <returns>The DupeType corresponding to the relationship between the two</returns>
|
|
|
|
|
|
public DupeType GetDuplicateStatus(KeyValuePair<long, DatItem>? selfItem, Source? selfSource, KeyValuePair<long, DatItem>? lastItem, Source? lastSource)
|
|
|
|
|
|
{
|
|
|
|
|
|
DupeType output = 0x00;
|
|
|
|
|
|
|
|
|
|
|
|
// If either item is null
|
|
|
|
|
|
if (selfItem == null || lastItem == null)
|
|
|
|
|
|
return output;
|
|
|
|
|
|
|
|
|
|
|
|
// If we don't have a duplicate at all, return none
|
|
|
|
|
|
if (!selfItem.Value.Value.Equals(lastItem.Value.Value))
|
|
|
|
|
|
return output;
|
|
|
|
|
|
|
|
|
|
|
|
// Get the machines for comparison
|
|
|
|
|
|
var selfMachine = GetMachineForItem(selfItem.Value.Key).Value;
|
|
|
|
|
|
string? selfMachineName = selfMachine?.GetName();
|
|
|
|
|
|
var lastMachine = GetMachineForItem(lastItem.Value.Key).Value;
|
|
|
|
|
|
string? lastMachineName = lastMachine?.GetName();
|
|
|
|
|
|
|
|
|
|
|
|
// If the duplicate is external already
|
|
|
|
|
|
#if NET20 || NET35
|
|
|
|
|
|
if ((lastItem.Value.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey) & DupeType.External) != 0)
|
|
|
|
|
|
#else
|
|
|
|
|
|
if (lastItem.Value.Value.GetFieldValue<DupeType>(DatItem.DupeTypeKey).HasFlag(DupeType.External))
|
|
|
|
|
|
#endif
|
|
|
|
|
|
output |= DupeType.External;
|
|
|
|
|
|
|
|
|
|
|
|
// If the duplicate should be external
|
|
|
|
|
|
else if (lastSource?.Index != selfSource?.Index)
|
|
|
|
|
|
output |= DupeType.External;
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, it's considered an internal dupe
|
|
|
|
|
|
else
|
|
|
|
|
|
output |= DupeType.Internal;
|
|
|
|
|
|
|
|
|
|
|
|
// If the item and machine names match
|
|
|
|
|
|
if (lastMachineName == selfMachineName && lastItem.Value.Value.GetName() == selfItem.Value.Value.GetName())
|
|
|
|
|
|
output |= DupeType.All;
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, hash match is assumed
|
|
|
|
|
|
else
|
|
|
|
|
|
output |= DupeType.Hash;
|
|
|
|
|
|
|
|
|
|
|
|
return output;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 23:15:58 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// List all duplicates found in a DAT based on a DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>List of matched DatItem objects</returns>
|
2025-01-12 23:56:48 -05:00
|
|
|
|
/// <remarks>This also sets the remove flag on any duplicates found</remarks>
|
2025-01-14 19:51:59 -05:00
|
|
|
|
/// TODO: Figure out if removal should be a flag or just removed entirely
|
2025-01-12 23:15:30 -05:00
|
|
|
|
internal Dictionary<long, DatItem> GetDuplicates(KeyValuePair<long, DatItem> datItem, bool sorted = false)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
{
|
|
|
|
|
|
// Check for an empty rom list first
|
|
|
|
|
|
if (DatStatistics.TotalCount == 0)
|
2025-01-14 19:51:59 -05:00
|
|
|
|
return [];
|
2024-03-19 23:15:58 -04:00
|
|
|
|
|
2025-01-16 13:52:37 -05:00
|
|
|
|
// We want to get the proper key for the DatItem, ignoring the index
|
|
|
|
|
|
_ = SortAndGetKey(datItem, sorted);
|
2025-05-02 16:34:42 -04:00
|
|
|
|
var machine = GetMachineForItem(datItem.Key);
|
|
|
|
|
|
var source = GetSourceForItem(datItem.Key);
|
|
|
|
|
|
string key = datItem.Value.GetKey(_bucketedBy, machine.Value, source.Value);
|
2024-03-19 23:15:58 -04:00
|
|
|
|
|
|
|
|
|
|
// If the key doesn't exist, return the empty list
|
2025-01-14 19:51:59 -05:00
|
|
|
|
var items = GetItemsForBucket(key);
|
|
|
|
|
|
if (items.Count == 0)
|
|
|
|
|
|
return [];
|
2024-03-19 23:15:58 -04:00
|
|
|
|
|
|
|
|
|
|
// Try to find duplicates
|
2025-01-14 19:51:59 -05:00
|
|
|
|
Dictionary<long, DatItem> output = [];
|
|
|
|
|
|
foreach (var rom in items)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
{
|
2025-01-14 19:51:59 -05:00
|
|
|
|
// Skip items marked for removal
|
2024-12-06 23:16:09 -05:00
|
|
|
|
if (rom.Value.GetBoolFieldValue(DatItem.RemoveKey) == true)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
continue;
|
|
|
|
|
|
|
2025-01-14 19:51:59 -05:00
|
|
|
|
// Mark duplicates for future removal
|
2024-12-06 23:16:09 -05:00
|
|
|
|
if (datItem.Value.Equals(rom.Value))
|
2024-03-19 23:15:58 -04:00
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
rom.Value.SetFieldValue<bool?>(DatItem.RemoveKey, true);
|
|
|
|
|
|
output[rom.Key] = rom.Value;
|
2024-03-19 23:15:58 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 19:51:59 -05:00
|
|
|
|
// Return any matching items
|
2024-03-19 23:15:58 -04:00
|
|
|
|
return output;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Check if a DAT contains the given DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>True if it contains the rom, false otherwise</returns>
|
2025-01-12 23:15:30 -05:00
|
|
|
|
internal bool HasDuplicates(KeyValuePair<long, DatItem> datItem, bool sorted = false)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
{
|
|
|
|
|
|
// Check for an empty rom list first
|
|
|
|
|
|
if (DatStatistics.TotalCount == 0)
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2025-01-16 13:52:37 -05:00
|
|
|
|
// We want to get the proper key for the DatItem, ignoring the index
|
|
|
|
|
|
_ = SortAndGetKey(datItem, sorted);
|
2025-05-02 16:34:42 -04:00
|
|
|
|
var machine = GetMachineForItem(datItem.Key);
|
|
|
|
|
|
var source = GetSourceForItem(datItem.Key);
|
|
|
|
|
|
string key = datItem.Value.GetKey(_bucketedBy, machine.Value, source.Value);
|
2024-03-19 23:15:58 -04:00
|
|
|
|
|
|
|
|
|
|
// If the key doesn't exist
|
2024-03-19 23:35:29 -04:00
|
|
|
|
var roms = GetItemsForBucket(key);
|
2024-12-06 23:16:09 -05:00
|
|
|
|
if (roms == null || roms.Count == 0)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
|
|
// Try to find duplicates
|
2025-01-16 13:52:37 -05:00
|
|
|
|
return roms.Values.Any(datItem.Value.Equals);
|
2024-03-19 23:15:58 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 12:00:39 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Merge an arbitrary set of item pairs based on the supplied information
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="itemMappings">List of pairs representing the items to be merged</param>
|
2025-01-07 14:55:56 -05:00
|
|
|
|
private List<KeyValuePair<long, DatItem>> Merge(List<KeyValuePair<long, DatItem>> itemMappings)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
|
|
|
|
|
// Check for null or blank roms first
|
2024-03-19 14:35:43 -04:00
|
|
|
|
if (itemMappings == null || itemMappings.Count == 0)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
return [];
|
2024-03-13 02:44:04 -04:00
|
|
|
|
|
2024-03-13 12:00:39 -04:00
|
|
|
|
// Create output list
|
2024-12-06 23:16:09 -05:00
|
|
|
|
List<KeyValuePair<long, DatItem>> output = [];
|
2024-03-13 12:00:39 -04:00
|
|
|
|
|
|
|
|
|
|
// Then deduplicate them by checking to see if data matches previous saved roms
|
|
|
|
|
|
int nodumpCount = 0;
|
2024-12-06 23:16:09 -05:00
|
|
|
|
foreach (var kvp in itemMappings)
|
2024-03-13 02:44:04 -04:00
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
long itemIndex = kvp.Key;
|
|
|
|
|
|
DatItem datItem = kvp.Value;
|
2024-03-13 12:00:39 -04:00
|
|
|
|
|
|
|
|
|
|
// If we don't have a Disk, File, Media, or Rom, we skip checking for duplicates
|
|
|
|
|
|
if (datItem is not Disk && datItem is not DatItems.Formats.File && datItem is not Media && datItem is not Rom)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If it's a nodump, add and skip
|
2025-05-11 22:55:38 -04:00
|
|
|
|
if (datItem is Rom rom && rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsItemStatus() == ItemStatus.Nodump)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
output.Add(new KeyValuePair<long, DatItem>(itemIndex, datItem));
|
2024-03-13 12:00:39 -04:00
|
|
|
|
nodumpCount++;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-05-11 22:55:38 -04:00
|
|
|
|
else if (datItem is Disk disk && disk.GetStringFieldValue(Models.Metadata.Disk.StatusKey).AsItemStatus() == ItemStatus.Nodump)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
output.Add(new KeyValuePair<long, DatItem>(itemIndex, datItem));
|
2024-03-13 12:00:39 -04:00
|
|
|
|
nodumpCount++;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-01-07 14:55:56 -05:00
|
|
|
|
|
2024-03-13 12:00:39 -04:00
|
|
|
|
// If it's the first non-nodump rom in the list, don't touch it
|
2025-01-07 14:55:56 -05:00
|
|
|
|
if (output.Count == nodumpCount)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2024-12-06 23:16:09 -05:00
|
|
|
|
output.Add(new KeyValuePair<long, DatItem>(itemIndex, datItem));
|
2024-03-13 12:00:39 -04:00
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-07 14:55:56 -05:00
|
|
|
|
// Find the index of the first duplicate, if one exists
|
2025-05-02 19:54:23 -04:00
|
|
|
|
var datItemSource = GetSourceForItem(itemIndex);
|
|
|
|
|
|
int pos = output.FindIndex(lastItem =>
|
|
|
|
|
|
{
|
|
|
|
|
|
var lastItemSource = GetSourceForItem(lastItem.Key);
|
|
|
|
|
|
return GetDuplicateStatus(kvp, datItemSource.Value, lastItem, lastItemSource.Value) != 0x00;
|
|
|
|
|
|
});
|
2025-01-07 14:55:56 -05:00
|
|
|
|
if (pos < 0)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2025-01-07 14:55:56 -05:00
|
|
|
|
output.Add(new KeyValuePair<long, DatItem>(itemIndex, datItem));
|
|
|
|
|
|
continue;
|
2024-03-13 12:00:39 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-07 14:55:56 -05:00
|
|
|
|
// Get the duplicate item
|
|
|
|
|
|
long savedIndex = output[pos].Key;
|
|
|
|
|
|
DatItem savedItem = output[pos].Value;
|
2025-05-02 19:54:23 -04:00
|
|
|
|
var savedItemSource = GetSourceForItem(savedIndex);
|
|
|
|
|
|
DupeType dupetype = GetDuplicateStatus(kvp, datItemSource.Value, output[pos], savedItemSource.Value);
|
2025-01-07 14:55:56 -05:00
|
|
|
|
|
|
|
|
|
|
// Disks, Media, and Roms have more information to fill
|
|
|
|
|
|
if (datItem is Disk diskItem && savedItem is Disk savedDisk)
|
|
|
|
|
|
savedDisk.FillMissingInformation(diskItem);
|
|
|
|
|
|
else if (datItem is DatItems.Formats.File fileItem && savedItem is DatItems.Formats.File savedFile)
|
|
|
|
|
|
savedFile.FillMissingInformation(fileItem);
|
|
|
|
|
|
else if (datItem is Media mediaItem && savedItem is Media savedMedia)
|
|
|
|
|
|
savedMedia.FillMissingInformation(mediaItem);
|
|
|
|
|
|
else if (datItem is Rom romItem && savedItem is Rom savedRom)
|
|
|
|
|
|
savedRom.FillMissingInformation(romItem);
|
|
|
|
|
|
|
|
|
|
|
|
savedItem.SetFieldValue<DupeType>(DatItem.DupeTypeKey, dupetype);
|
|
|
|
|
|
|
|
|
|
|
|
// Get the sources associated with the items
|
|
|
|
|
|
var savedSource = _sources[_itemToSourceMapping[savedIndex]];
|
|
|
|
|
|
var itemSource = _sources[_itemToSourceMapping[itemIndex]];
|
|
|
|
|
|
|
|
|
|
|
|
// Get the machines associated with the items
|
|
|
|
|
|
var savedMachine = _machines[_itemToMachineMapping[savedIndex]];
|
|
|
|
|
|
var itemMachine = _machines[_itemToMachineMapping[itemIndex]];
|
|
|
|
|
|
|
2025-01-07 15:03:27 -05:00
|
|
|
|
// If the current source has a lower ID than the saved, use the saved source
|
2025-01-07 14:55:56 -05:00
|
|
|
|
if (itemSource?.Index < savedSource?.Index)
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2025-01-07 14:55:56 -05:00
|
|
|
|
_itemToSourceMapping[itemIndex] = _itemToSourceMapping[savedIndex];
|
|
|
|
|
|
_machines[_itemToMachineMapping[savedIndex]] = (itemMachine.Clone() as Machine)!;
|
|
|
|
|
|
savedItem.SetName(datItem.GetName());
|
2024-03-13 12:00:39 -04:00
|
|
|
|
}
|
2025-01-07 14:55:56 -05:00
|
|
|
|
|
2025-01-07 15:03:27 -05:00
|
|
|
|
// If the saved machine is a child of the current machine, use the current machine instead
|
2025-05-02 16:05:08 -04:00
|
|
|
|
if (savedMachine.GetStringFieldValue(Models.Metadata.Machine.CloneOfKey) == itemMachine.GetName()
|
|
|
|
|
|
|| savedMachine.GetStringFieldValue(Models.Metadata.Machine.RomOfKey) == itemMachine.GetName())
|
2024-03-13 12:00:39 -04:00
|
|
|
|
{
|
2025-01-07 14:55:56 -05:00
|
|
|
|
_machines[_itemToMachineMapping[savedIndex]] = (itemMachine.Clone() as Machine)!;
|
|
|
|
|
|
savedItem.SetName(datItem.GetName());
|
2024-03-13 12:00:39 -04:00
|
|
|
|
}
|
2025-01-07 14:55:56 -05:00
|
|
|
|
|
|
|
|
|
|
// Replace the original item in the list
|
|
|
|
|
|
output.RemoveAt(pos);
|
|
|
|
|
|
output.Insert(pos, new KeyValuePair<long, DatItem>(savedIndex, savedItem));
|
2024-03-13 02:44:04 -04:00
|
|
|
|
}
|
2024-03-13 10:14:04 -04:00
|
|
|
|
|
2024-03-13 12:00:39 -04:00
|
|
|
|
return output;
|
2024-03-13 02:44:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-16 13:52:37 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Ensure the key exists in the items dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void EnsureBucketingKey(string key)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the key is missing from the dictionary, add it
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_buckets.GetOrAdd(key, []);
|
|
|
|
|
|
#else
|
|
|
|
|
|
if (!_buckets.ContainsKey(key))
|
|
|
|
|
|
_buckets[key] = [];
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 23:15:58 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the highest-order Field value that represents the statistics
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private ItemKey GetBestAvailable()
|
|
|
|
|
|
{
|
|
|
|
|
|
// Get the required counts
|
|
|
|
|
|
long diskCount = DatStatistics.GetItemCount(ItemType.Disk);
|
|
|
|
|
|
long mediaCount = DatStatistics.GetItemCount(ItemType.Media);
|
|
|
|
|
|
long romCount = DatStatistics.GetItemCount(ItemType.Rom);
|
|
|
|
|
|
long nodumpCount = DatStatistics.GetStatusCount(ItemStatus.Nodump);
|
|
|
|
|
|
|
|
|
|
|
|
// If all items are supposed to have a SHA-512, we bucket by that
|
|
|
|
|
|
if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA512))
|
|
|
|
|
|
return ItemKey.SHA512;
|
|
|
|
|
|
|
|
|
|
|
|
// If all items are supposed to have a SHA-384, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA384))
|
|
|
|
|
|
return ItemKey.SHA384;
|
|
|
|
|
|
|
|
|
|
|
|
// If all items are supposed to have a SHA-256, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA256))
|
|
|
|
|
|
return ItemKey.SHA256;
|
|
|
|
|
|
|
|
|
|
|
|
// If all items are supposed to have a SHA-1, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA1))
|
|
|
|
|
|
return ItemKey.SHA1;
|
|
|
|
|
|
|
|
|
|
|
|
// If all items are supposed to have a MD5, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD5))
|
|
|
|
|
|
return ItemKey.MD5;
|
|
|
|
|
|
|
2025-01-09 05:44:34 -05:00
|
|
|
|
// If all items are supposed to have a MD4, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD4))
|
|
|
|
|
|
return ItemKey.MD4;
|
2025-01-09 05:26:36 -05:00
|
|
|
|
|
2025-01-09 05:44:34 -05:00
|
|
|
|
// If all items are supposed to have a MD2, we bucket by that
|
|
|
|
|
|
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD2))
|
|
|
|
|
|
return ItemKey.MD2;
|
2025-01-09 05:26:36 -05:00
|
|
|
|
|
2024-03-19 23:15:58 -04:00
|
|
|
|
// Otherwise, we bucket by CRC
|
|
|
|
|
|
else
|
|
|
|
|
|
return ItemKey.CRC;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:44:04 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the bucketing key for a given item index
|
2024-03-13 12:00:39 -04:00
|
|
|
|
/// <param name="itemIndex">Index of the current item</param>
|
|
|
|
|
|
/// <param name="bucketBy">ItemKey value representing what key to get</param>
|
|
|
|
|
|
/// <param name="lower">True if the key should be lowercased, false otherwise</param>
|
|
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
2024-03-13 02:44:04 -04:00
|
|
|
|
/// </summary>
|
2024-03-13 12:00:39 -04:00
|
|
|
|
private string GetBucketKey(long itemIndex, ItemKey bucketBy, bool lower, bool norename)
|
2024-03-13 02:44:04 -04:00
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_items.TryGetValue(itemIndex, out var datItem) || datItem == null)
|
|
|
|
|
|
return string.Empty;
|
|
|
|
|
|
#else
|
2024-03-13 02:44:04 -04:00
|
|
|
|
if (!_items.ContainsKey(itemIndex))
|
|
|
|
|
|
return string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
var datItem = _items[itemIndex];
|
|
|
|
|
|
if (datItem == null)
|
|
|
|
|
|
return string.Empty;
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#endif
|
2024-03-13 02:44:04 -04:00
|
|
|
|
|
2024-03-20 01:50:08 -04:00
|
|
|
|
var source = GetSourceForItem(itemIndex);
|
2025-01-14 14:22:18 -05:00
|
|
|
|
var machine = GetMachineForItem(itemIndex);
|
2024-03-13 12:00:39 -04:00
|
|
|
|
|
2025-01-14 14:22:18 -05:00
|
|
|
|
// Treat NULL like machine
|
|
|
|
|
|
if (bucketBy == ItemKey.NULL)
|
|
|
|
|
|
bucketBy = ItemKey.Machine;
|
2024-03-13 12:00:39 -04:00
|
|
|
|
|
2025-01-14 14:22:18 -05:00
|
|
|
|
// Get the bucket key
|
2025-05-02 16:34:42 -04:00
|
|
|
|
return datItem.GetKey(bucketBy, machine.Value, source.Value, lower, norename);
|
2024-03-13 02:44:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 12:00:39 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Perform bucketing based on the item key provided
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="bucketBy">ItemKey enum representing how to bucket the individual items</param>
|
|
|
|
|
|
/// <param name="lower">True if the key should be lowercased, false otherwise</param>
|
|
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
|
|
|
|
|
private void PerformBucketing(ItemKey bucketBy, bool lower, bool norename)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Reset the bucketing values
|
|
|
|
|
|
_bucketedBy = bucketBy;
|
|
|
|
|
|
_buckets.Clear();
|
|
|
|
|
|
|
|
|
|
|
|
// Get the current list of item indicies
|
|
|
|
|
|
long[] itemIndicies = [.. _items.Keys];
|
|
|
|
|
|
|
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
|
Parallel.For(0, itemIndicies.Length, Core.Globals.ParallelOptions, i =>
|
2024-03-13 12:00:39 -04:00
|
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
|
Parallel.For(0, itemIndicies.Length, i =>
|
|
|
|
|
|
#else
|
|
|
|
|
|
for (int i = 0; i < itemIndicies.Length; i++)
|
|
|
|
|
|
#endif
|
|
|
|
|
|
{
|
2024-03-19 12:53:38 -04:00
|
|
|
|
PerformItemBucketing(i, bucketBy, lower, norename);
|
2024-03-13 12:00:39 -04:00
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
});
|
|
|
|
|
|
#else
|
|
|
|
|
|
}
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 12:53:38 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Bucket a single DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="itemIndex">Index of the item to bucket</param>
|
|
|
|
|
|
/// <param name="bucketBy">ItemKey enum representing how to bucket the individual items</param>
|
|
|
|
|
|
/// <param name="lower">True if the key should be lowercased, false otherwise</param>
|
|
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
|
|
|
|
|
private void PerformItemBucketing(long itemIndex, ItemKey bucketBy, bool lower, bool norename)
|
|
|
|
|
|
{
|
|
|
|
|
|
string? bucketKey = GetBucketKey(itemIndex, bucketBy, lower, norename);
|
2025-01-14 13:34:05 -05:00
|
|
|
|
lock (bucketKey)
|
|
|
|
|
|
{
|
|
|
|
|
|
EnsureBucketingKey(bucketKey);
|
|
|
|
|
|
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
if (!_buckets.TryGetValue(bucketKey, out var bucket) || bucket == null)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
bucket.Add(itemIndex);
|
|
|
|
|
|
#else
|
|
|
|
|
|
_buckets[bucketKey].Add(itemIndex);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
2024-03-19 12:53:38 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 10:14:04 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Sort existing buckets for consistency
|
|
|
|
|
|
/// </summary>
|
2024-03-13 11:02:17 -04:00
|
|
|
|
private void PerformSorting(bool norename)
|
2024-03-13 10:14:04 -04:00
|
|
|
|
{
|
|
|
|
|
|
// Get the current list of bucket keys
|
|
|
|
|
|
string[] bucketKeys = [.. _buckets.Keys];
|
|
|
|
|
|
|
|
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
2024-10-24 05:58:03 -04:00
|
|
|
|
Parallel.For(0, bucketKeys.Length, Core.Globals.ParallelOptions, i =>
|
2024-03-13 10:14:04 -04:00
|
|
|
|
#elif NET40_OR_GREATER
|
|
|
|
|
|
Parallel.For(0, bucketKeys.Length, i =>
|
|
|
|
|
|
#else
|
|
|
|
|
|
for (int i = 0; i < bucketKeys.Length; i++)
|
|
|
|
|
|
#endif
|
|
|
|
|
|
{
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#if NET452_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_buckets.TryGetValue(bucketKeys[i], out var itemIndices);
|
|
|
|
|
|
#else
|
2024-03-13 10:14:04 -04:00
|
|
|
|
var itemIndices = _buckets[bucketKeys[i]];
|
2025-01-14 13:34:05 -05:00
|
|
|
|
#endif
|
2024-03-19 14:35:43 -04:00
|
|
|
|
if (itemIndices == null || itemIndices.Count == 0)
|
2024-03-13 10:14:04 -04:00
|
|
|
|
{
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
|
|
|
|
|
_buckets.TryRemove(bucketKeys[i], out _);
|
|
|
|
|
|
return;
|
|
|
|
|
|
#else
|
|
|
|
|
|
_buckets.Remove(bucketKeys[i]);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var datItems = itemIndices
|
2024-12-06 13:57:48 -05:00
|
|
|
|
.FindAll(i => _items.ContainsKey(i))
|
2025-04-14 13:52:43 -04:00
|
|
|
|
.ConvertAll(i => new KeyValuePair<long, DatItem>(i, _items[i]));
|
2024-03-13 10:14:04 -04:00
|
|
|
|
|
|
|
|
|
|
Sort(ref datItems, norename);
|
|
|
|
|
|
|
|
|
|
|
|
#if NET40_OR_GREATER || NETCOREAPP
|
2025-04-14 13:52:43 -04:00
|
|
|
|
_buckets.TryAdd(bucketKeys[i], datItems.ConvertAll(kvp => kvp.Key));
|
2024-03-13 10:14:04 -04:00
|
|
|
|
});
|
|
|
|
|
|
#else
|
2025-04-14 13:52:43 -04:00
|
|
|
|
_buckets[bucketKeys[i]] = datItems.ConvertAll(kvp => kvp.Key);
|
2024-03-13 10:14:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2024-03-13 12:00:39 -04:00
|
|
|
|
/// Sort a list of item pairs by SourceID, Game, and Name (in order)
|
2024-03-13 10:14:04 -04:00
|
|
|
|
/// </summary>
|
2024-03-13 12:00:39 -04:00
|
|
|
|
/// <param name="itemMappings">List of pairs representing the items to be sorted</param>
|
2024-03-13 10:14:04 -04:00
|
|
|
|
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if it sorted correctly, false otherwise</returns>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
private bool Sort(ref List<KeyValuePair<long, DatItem>> itemMappings, bool norename)
|
2024-03-13 10:14:04 -04:00
|
|
|
|
{
|
2025-05-19 10:33:57 -04:00
|
|
|
|
// Create the comparer extenal to the delegate
|
|
|
|
|
|
var nc = new NaturalComparer();
|
2025-05-19 10:39:32 -04:00
|
|
|
|
|
2024-12-06 23:16:09 -05:00
|
|
|
|
itemMappings.Sort(delegate (KeyValuePair<long, DatItem> x, KeyValuePair<long, DatItem> y)
|
2024-03-13 10:14:04 -04:00
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2025-05-19 10:39:32 -04:00
|
|
|
|
// Compare on source if renaming
|
|
|
|
|
|
if (!norename)
|
|
|
|
|
|
{
|
|
|
|
|
|
int xSourceIndex = GetSourceForItem(x.Key).Value?.Index ?? 0;
|
|
|
|
|
|
int ySourceIndex = GetSourceForItem(y.Key).Value?.Index ?? 0;
|
|
|
|
|
|
if (xSourceIndex != ySourceIndex)
|
|
|
|
|
|
return xSourceIndex - ySourceIndex;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 19:38:21 -05:00
|
|
|
|
// Get the machines
|
|
|
|
|
|
Machine? xMachine = _machines[_itemToMachineMapping[x.Key]];
|
|
|
|
|
|
Machine? yMachine = _machines[_itemToMachineMapping[y.Key]];
|
|
|
|
|
|
|
2024-03-13 10:45:08 -04:00
|
|
|
|
// If machine names don't match
|
2025-05-02 16:05:08 -04:00
|
|
|
|
string? xMachineName = xMachine?.GetName();
|
|
|
|
|
|
string? yMachineName = yMachine?.GetName();
|
2024-03-13 10:43:05 -04:00
|
|
|
|
if (xMachineName != yMachineName)
|
|
|
|
|
|
return nc.Compare(xMachineName, yMachineName);
|
|
|
|
|
|
|
|
|
|
|
|
// If types don't match
|
2024-12-06 23:16:09 -05:00
|
|
|
|
string? xType = x.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
string? yType = y.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
2024-03-13 10:43:05 -04:00
|
|
|
|
if (xType != yType)
|
2025-05-11 22:55:38 -04:00
|
|
|
|
return xType.AsItemType() - yType.AsItemType();
|
2024-03-13 10:14:04 -04:00
|
|
|
|
|
2024-03-13 10:43:05 -04:00
|
|
|
|
// If directory names don't match
|
2024-12-06 23:16:09 -05:00
|
|
|
|
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName()));
|
|
|
|
|
|
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName()));
|
2024-03-13 10:43:05 -04:00
|
|
|
|
if (xDirectoryName != yDirectoryName)
|
|
|
|
|
|
return nc.Compare(xDirectoryName, yDirectoryName);
|
|
|
|
|
|
|
|
|
|
|
|
// If item names don't match
|
2024-12-06 23:16:09 -05:00
|
|
|
|
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName()));
|
|
|
|
|
|
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName()));
|
2025-05-19 10:39:32 -04:00
|
|
|
|
return nc.Compare(xName, yName);
|
2024-03-13 10:14:04 -04:00
|
|
|
|
}
|
|
|
|
|
|
catch
|
|
|
|
|
|
{
|
|
|
|
|
|
// Absorb the error
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 23:15:58 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Sort the input DAT and get the key to be used by the item
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>Key to try to use</returns>
|
2024-12-06 23:16:09 -05:00
|
|
|
|
private string SortAndGetKey(KeyValuePair<long, DatItem> datItem, bool sorted = false)
|
2024-03-19 23:15:58 -04:00
|
|
|
|
{
|
|
|
|
|
|
// If we're not already sorted, take care of it
|
|
|
|
|
|
if (!sorted)
|
2025-01-14 20:21:54 -05:00
|
|
|
|
BucketBy(GetBestAvailable());
|
2024-03-19 23:15:58 -04:00
|
|
|
|
|
|
|
|
|
|
// Now that we have the sorted type, we get the proper key
|
2025-01-14 15:45:02 -05:00
|
|
|
|
return GetBucketKey(datItem.Key, _bucketedBy, lower: true, norename: true);
|
2024-03-19 23:15:58 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 02:44:04 -04:00
|
|
|
|
#endregion
|
2024-03-13 11:05:34 -04:00
|
|
|
|
|
|
|
|
|
|
#region Statistics
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Recalculate the statistics for the Dat
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void RecalculateStats()
|
|
|
|
|
|
{
|
|
|
|
|
|
// Wipe out any stats already there
|
|
|
|
|
|
DatStatistics.ResetStatistics();
|
|
|
|
|
|
|
|
|
|
|
|
// If there are no items
|
2024-03-19 14:35:43 -04:00
|
|
|
|
if (_items == null || _items.Count == 0)
|
2024-03-13 11:05:34 -04:00
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through and add
|
|
|
|
|
|
foreach (var item in _items.Values)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (item == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
DatStatistics.AddItemStatistics(item);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
2022-11-03 15:54:00 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|