#if NET40_OR_GREATER || NETCOREAPP
using System.Collections.Concurrent;
#endif
using System.Collections.Generic;
using System.IO;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
using System.Xml.Serialization;
using Newtonsoft.Json;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Hashing;
using SabreTools.IO.Logging;
using SabreTools.Matching.Compare;
namespace SabreTools.DatFiles
{
///
/// Item dictionary with statistics, bucketing, and sorting
///
[JsonObject("items"), XmlRoot("items")]
public class ItemDictionary
{
#region Private instance variables
///
/// Determine the bucketing key for all items
///
private ItemKey _bucketedBy = ItemKey.NULL;
///
/// Internal dictionary for the class
///
#if NET40_OR_GREATER || NETCOREAPP
private readonly ConcurrentDictionary?> _items = [];
#else
private readonly Dictionary?> _items = [];
#endif
///
/// Logging object
///
private readonly Logger _logger;
#endregion
#region Fields
///
/// Get the keys in sorted order from the file dictionary
///
/// List of the keys in sorted order
[JsonIgnore, XmlIgnore]
public string[] SortedKeys
{
get
{
List keys = [.. _items.Keys];
keys.Sort(new NaturalComparer());
return [.. keys];
}
}
///
/// DAT statistics
///
[JsonIgnore, XmlIgnore]
public DatStatistics DatStatistics { get; } = new DatStatistics();
#endregion
#region Constructors
///
/// Generic constructor
///
public ItemDictionary()
{
_logger = new Logger(this);
}
#endregion
#region Accessors
///
/// Add a DatItem to the dictionary after checking
///
/// Item data to check against
/// True to only add item statistics while parsing, false otherwise
/// The key for the item
public string AddItem(DatItem item, bool statsOnly)
{
string key;
// If we have a Disk, File, Media, or Rom, clean the hash data
if (item is Disk disk)
{
// If the file has aboslutely no hashes, skip and log
if (disk.GetStringFieldValue(Models.Metadata.Disk.StatusKey).AsEnumValue() != ItemStatus.Nodump
&& string.IsNullOrEmpty(disk.GetStringFieldValue(Models.Metadata.Disk.MD5Key))
&& string.IsNullOrEmpty(disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key)))
{
_logger.Verbose($"Incomplete entry for '{disk.GetName()}' will be output as nodump");
disk.SetFieldValue(Models.Metadata.Disk.StatusKey, ItemStatus.Nodump.AsStringValue());
}
item = disk;
}
else if (item is DatItems.Formats.File file)
{
// If the file has aboslutely no hashes, skip and log
if (string.IsNullOrEmpty(file.CRC)
&& string.IsNullOrEmpty(file.MD5)
&& string.IsNullOrEmpty(file.SHA1)
&& string.IsNullOrEmpty(file.SHA256))
{
_logger.Verbose($"Incomplete entry for '{file.GetName()}' will be output as nodump");
}
item = file;
}
else if (item is Media media)
{
// If the file has aboslutely no hashes, skip and log
if (string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.MD5Key))
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SHA1Key))
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SHA256Key))
&& string.IsNullOrEmpty(media.GetStringFieldValue(Models.Metadata.Media.SpamSumKey)))
{
_logger.Verbose($"Incomplete entry for '{media.GetName()}' will be output as nodump");
}
item = media;
}
else if (item is Rom rom)
{
long? size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey);
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
if (size == null && !string.IsNullOrEmpty(rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key)))
{
// No-op, just catch it so it doesn't go further
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Entry with only SHA-1 found - '{rom.GetName()}'");
}
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
else if ((size == 0 || size == null)
&& (string.IsNullOrEmpty(rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey)) || rom.HasZeroHash()))
{
rom.SetFieldValue(Models.Metadata.Rom.SizeKey, Constants.SizeZero.ToString());
rom.SetFieldValue(Models.Metadata.Rom.CRCKey, ZeroHash.CRC32Str);
rom.SetFieldValue(Models.Metadata.Rom.MD2Key, null); // ZeroHash.GetString(HashType.MD2)
rom.SetFieldValue(Models.Metadata.Rom.MD4Key, null); // ZeroHash.GetString(HashType.MD4)
rom.SetFieldValue(Models.Metadata.Rom.MD5Key, ZeroHash.MD5Str);
rom.SetFieldValue(Models.Metadata.Rom.SHA1Key, ZeroHash.SHA1Str);
rom.SetFieldValue(Models.Metadata.Rom.SHA256Key, null); // ZeroHash.SHA256Str;
rom.SetFieldValue(Models.Metadata.Rom.SHA384Key, null); // ZeroHash.SHA384Str;
rom.SetFieldValue(Models.Metadata.Rom.SHA512Key, null); // ZeroHash.SHA512Str;
rom.SetFieldValue(Models.Metadata.Rom.SpamSumKey, null); // ZeroHash.SpamSumStr;
}
// If the file has no size and it's not the above case, skip and log
else if (rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsEnumValue() != ItemStatus.Nodump && (size == 0 || size == null))
{
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Incomplete entry for '{rom.GetName()}' will be output as nodump");
rom.SetFieldValue(Models.Metadata.Rom.StatusKey, ItemStatus.Nodump.AsStringValue());
}
// If the file has a size but aboslutely no hashes, skip and log
else if (rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsEnumValue() != ItemStatus.Nodump
&& size != null && size > 0
&& !rom.HasHashes())
{
//logger.Verbose($"{Header.GetStringFieldValue(DatHeader.FileNameKey)}: Incomplete entry for '{rom.GetName()}' will be output as nodump");
rom.SetFieldValue(Models.Metadata.Rom.StatusKey, ItemStatus.Nodump.AsStringValue());
}
item = rom;
}
// Get the key and add the file
key = GetBucketKey(item, _bucketedBy, lower: true, norename: true);
// If only adding statistics, we add an empty key for games and then just item stats
if (statsOnly)
{
EnsureBucketingKey(key);
DatStatistics.AddItemStatistics(item);
}
else
{
AddItem(key, item);
}
return key;
}
///
/// Remove all items marked for removal
///
public void ClearMarked()
{
foreach (string key in SortedKeys)
{
// Get the unfiltered list
List list = GetItemsForBucket(key, filter: false);
foreach (DatItem datItem in list)
{
if (datItem.GetBoolFieldValue(DatItem.RemoveKey) != true)
continue;
RemoveItem(key, datItem);
}
}
}
///
/// Get the items associated with a bucket name
///
/// Name of the bucket to retrive items for
/// Indicates if RemoveKey filtering is performed
/// List representing the bucket items, empty on missing
public List GetItemsForBucket(string? bucketName, bool filter = false)
{
if (bucketName == null)
return [];
#if NET40_OR_GREATER || NETCOREAPP
if (!_items.TryGetValue(bucketName, out var items))
return [];
#else
if (!_items.ContainsKey(bucketName))
return [];
var items = _items[bucketName];
#endif
if (items == null)
return [];
var datItems = new List();
foreach (DatItem item in items)
{
if (!filter || item.GetBoolFieldValue(DatItem.RemoveKey) != true)
datItems.Add(item);
}
return datItems;
}
///
/// Remove a key from the file dictionary if it exists
///
/// Key in the dictionary to remove
public bool RemoveBucket(string key)
{
#if NET40_OR_GREATER || NETCOREAPP
bool removed = _items.TryRemove(key, out var list);
#else
if (!_items.ContainsKey(key))
return false;
bool removed = true;
var list = _items[key];
_items.Remove(key);
#endif
if (list == null)
return removed;
foreach (var item in list)
{
DatStatistics.RemoveItemStatistics(item);
}
return removed;
}
///
/// Remove the first instance of a value from the file dictionary if it exists
///
/// Key in the dictionary to remove from
/// Value to remove from the dictionary
public bool RemoveItem(string key, DatItem value)
{
// Explicit lock for some weird corner cases
lock (key)
{
// If the key doesn't exist, return
#if NET40_OR_GREATER || NETCOREAPP
if (!_items.TryGetValue(key, out var list) || list == null)
return false;
#else
if (!_items.ContainsKey(key))
return false;
var list = _items[key];
if (list == null)
return false;
#endif
// If the value doesn't exist in the key, return
if (!list.Exists(i => i.Equals(value)))
return false;
// Remove the statistics first
DatStatistics.RemoveItemStatistics(value);
return list.Remove(value);
}
}
///
/// Override the internal ItemKey value
///
///
public void SetBucketedBy(ItemKey newBucket)
{
_bucketedBy = newBucket;
}
///
/// Add a value to the file dictionary
///
/// Key in the dictionary to add to
/// Value to add to the dictionary
internal void AddItem(string key, DatItem value)
{
// Explicit lock for some weird corner cases
lock (key)
{
// Ensure the key exists
EnsureBucketingKey(key);
// If item is null, don't add it
if (value == null)
return;
// Now add the value
_items[key]!.Add(value);
// Now update the statistics
DatStatistics.AddItemStatistics(value);
}
}
#endregion
#region Bucketing
///
/// Take the arbitrarily bucketed Files Dictionary and convert to one bucketed by a user-defined method
///
/// ItemKey enum representing how to bucket the individual items
/// True if the key should be lowercased (default), false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
public void BucketBy(ItemKey bucketBy, bool lower = true, bool norename = true)
{
// If we have a situation where there's no dictionary or no keys at all, we skip
if (_items == null || _items.Count == 0)
return;
// If the sorted type isn't the same, we want to sort the dictionary accordingly
if (_bucketedBy != bucketBy && bucketBy != ItemKey.NULL)
{
_logger.User($"Organizing roms by {bucketBy}");
PerformBucketing(bucketBy, lower, norename);
}
// Sort the dictionary to be consistent
_logger.User($"Sorting roms by {bucketBy}");
PerformSorting();
}
///
/// Perform deduplication on the current sorted dictionary
///
public void Deduplicate()
{
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(SortedKeys, key =>
#else
foreach (var key in SortedKeys)
#endif
{
// Get the possibly unsorted list
List sortedList = GetItemsForBucket(key);
// Sort and merge the list
Sort(ref sortedList, false);
sortedList = Merge(sortedList);
// Add the list back to the dictionary
RemoveBucket(key);
sortedList.ForEach(item => AddItem(key, item));
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Merge an arbitrary set of DatItems based on the supplied information
///
/// List of DatItem objects representing the items to be merged
/// A List of DatItem objects representing the merged items
/// TODO: Make this internal like the DB counterpart
public static List Merge(List? items)
{
// Check for null or blank inputs first
if (items == null || items.Count == 0)
return [];
// Create output list
List output = [];
// Then deduplicate them by checking to see if data matches previous saved roms
int nodumpCount = 0;
foreach (DatItem datItem in items)
{
// If we don't have a Disk, File, Media, or Rom, we skip checking for duplicates
if (datItem is not Disk && datItem is not DatItems.Formats.File && datItem is not Media && datItem is not Rom)
continue;
// If it's a nodump, add and skip
if (datItem is Rom rom && rom.GetStringFieldValue(Models.Metadata.Rom.StatusKey).AsEnumValue() == ItemStatus.Nodump)
{
output.Add(datItem);
nodumpCount++;
continue;
}
else if (datItem is Disk disk && disk.GetStringFieldValue(Models.Metadata.Disk.StatusKey).AsEnumValue() == ItemStatus.Nodump)
{
output.Add(datItem);
nodumpCount++;
continue;
}
// If it's the first non-nodump item in the list, don't touch it
if (output.Count == nodumpCount)
{
output.Add(datItem);
continue;
}
// Find the index of the first duplicate, if one exists
int pos = output.FindIndex(lastItem => datItem.GetDuplicateStatus(lastItem) != 0x00);
if (pos < 0)
{
output.Add(datItem);
continue;
}
// Get the duplicate item
DatItem savedItem = output[pos];
DupeType dupetype = datItem.GetDuplicateStatus(savedItem);
// Disks, File, Media, and Roms have more information to fill
if (datItem is Disk diskItem && savedItem is Disk savedDisk)
savedDisk.FillMissingInformation(diskItem);
else if (datItem is DatItems.Formats.File fileItem && savedItem is DatItems.Formats.File savedFile)
savedFile.FillMissingInformation(fileItem);
else if (datItem is Media mediaItem && savedItem is Media savedMedia)
savedMedia.FillMissingInformation(mediaItem);
else if (datItem is Rom romItem && savedItem is Rom savedRom)
savedRom.FillMissingInformation(romItem);
// Set the duplicate type on the saved item
savedItem.SetFieldValue(DatItem.DupeTypeKey, dupetype);
// Get the sources associated with the items
var savedSource = savedItem.GetFieldValue(DatItem.SourceKey);
var itemSource = datItem.GetFieldValue(DatItem.SourceKey);
// Get the machines associated with the items
var savedMachine = savedItem.GetFieldValue(DatItem.MachineKey);
var itemMachine = datItem.GetFieldValue(DatItem.MachineKey);
// If the current source has a lower ID than the saved, use the saved source
if (itemSource?.Index < savedSource?.Index)
{
datItem.SetFieldValue(DatItem.SourceKey, savedSource.Clone() as Source);
savedItem.CopyMachineInformation(datItem);
savedItem.SetName(datItem.GetName());
}
// If the saved machine is a child of the current machine, use the current machine instead
if (savedMachine?.GetStringFieldValue(Models.Metadata.Machine.CloneOfKey) == itemMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey)
|| savedMachine?.GetStringFieldValue(Models.Metadata.Machine.RomOfKey) == itemMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey))
{
savedItem.CopyMachineInformation(datItem);
savedItem.SetName(datItem.GetName());
}
// Replace the original item in the list
output.RemoveAt(pos);
output.Insert(pos, savedItem);
}
// Then return the result
return output;
}
///
/// List all duplicates found in a DAT based on a DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// List of matched DatItem objects
/// This also sets the remove flag on any duplicates found
/// TODO: Figure out if removal should be a flag or just removed entirely
internal List GetDuplicates(DatItem datItem, bool sorted = false)
{
// Check for an empty rom list first
if (DatStatistics.TotalCount == 0)
return [];
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datItem, sorted);
// Get the items for the current key, if possible
List items = GetItemsForBucket(key, filter: false);
if (items.Count == 0)
return [];
// Try to find duplicates
List output = [];
foreach (DatItem other in items)
{
// Skip items marked for removal
if (other.GetBoolFieldValue(DatItem.RemoveKey) == true)
continue;
// Mark duplicates for future removal
if (datItem.Equals(other))
{
other.SetFieldValue(DatItem.RemoveKey, true);
output.Add(other);
}
}
// Return any matching items
return output;
}
///
/// Check if a DAT contains the given DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// True if it contains the rom, false otherwise
internal bool HasDuplicates(DatItem datItem, bool sorted = false)
{
// Check for an empty rom list first
if (DatStatistics.TotalCount == 0)
return false;
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datItem, sorted);
// Try to find duplicates
List roms = GetItemsForBucket(key);
if (roms.Count == 0)
return false;
return roms.FindIndex(datItem.Equals) > -1;
}
///
/// Ensure the key exists in the items dictionary
///
/// Key to ensure
private void EnsureBucketingKey(string key)
{
// If the key is missing from the dictionary, add it
#if NET40_OR_GREATER || NETCOREAPP
_items.GetOrAdd(key, []);
#else
if (!_items.ContainsKey(key))
_items[key] = [];
#endif
}
///
/// Get the highest-order Field value that represents the statistics
///
private ItemKey GetBestAvailable()
{
// Get the required counts
long diskCount = DatStatistics.GetItemCount(ItemType.Disk);
long mediaCount = DatStatistics.GetItemCount(ItemType.Media);
long romCount = DatStatistics.GetItemCount(ItemType.Rom);
long nodumpCount = DatStatistics.GetStatusCount(ItemStatus.Nodump);
// If all items are supposed to have a SHA-512, we bucket by that
if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA512))
return ItemKey.SHA512;
// If all items are supposed to have a SHA-384, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA384))
return ItemKey.SHA384;
// If all items are supposed to have a SHA-256, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA256))
return ItemKey.SHA256;
// If all items are supposed to have a SHA-1, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.SHA1))
return ItemKey.SHA1;
// If all items are supposed to have a MD5, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD5))
return ItemKey.MD5;
// If all items are supposed to have a MD4, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD4))
return ItemKey.MD4;
// If all items are supposed to have a MD2, we bucket by that
else if (diskCount + mediaCount + romCount - nodumpCount == DatStatistics.GetHashCount(HashType.MD2))
return ItemKey.MD2;
// Otherwise, we bucket by CRC
else
return ItemKey.CRC;
}
///
/// Get the bucketing key for a given item
/// The current item
/// ItemKey value representing what key to get
/// True if the key should be lowercased, false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
///
private static string GetBucketKey(DatItem datItem, ItemKey bucketBy, bool lower, bool norename)
{
if (datItem == null)
return string.Empty;
// Treat NULL like machine
if (bucketBy == ItemKey.NULL)
bucketBy = ItemKey.Machine;
// Get the bucket key
return datItem.GetKey(bucketBy, lower, norename);
}
///
/// Perform bucketing based on the item key provided
///
/// ItemKey enum representing how to bucket the individual items
/// True if the key should be lowercased, false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
private void PerformBucketing(ItemKey bucketBy, bool lower, bool norename)
{
// Set the sorted type
_bucketedBy = bucketBy;
// First do the initial sort of all of the roms inplace
List oldkeys = [.. SortedKeys];
#if NET452_OR_GREATER || NETCOREAPP
Parallel.For(0, oldkeys.Count, Core.Globals.ParallelOptions, k =>
#elif NET40_OR_GREATER
Parallel.For(0, oldkeys.Count, k =>
#else
for (int k = 0; k < oldkeys.Count; k++)
#endif
{
string key = oldkeys[k];
if (GetItemsForBucket(key).Count == 0)
RemoveBucket(key);
// Now add each of the roms to their respective keys
for (int i = 0; i < GetItemsForBucket(key).Count; i++)
{
DatItem item = GetItemsForBucket(key)[i];
if (item == null)
continue;
// We want to get the key most appropriate for the given sorting type
string newkey = item.GetKey(bucketBy, lower, norename);
// If the key is different, move the item to the new key
if (newkey != key)
{
AddItem(newkey, item);
bool removed = RemoveItem(key, item);
if (!removed)
break;
i--; // This make sure that the pointer stays on the correct since one was removed
}
}
// If the key is now empty, remove it
if (GetItemsForBucket(key).Count == 0)
RemoveBucket(key);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Perform inplace sorting of the dictionary
///
private void PerformSorting()
{
#if NET452_OR_GREATER || NETCOREAPP
Parallel.ForEach(SortedKeys, Core.Globals.ParallelOptions, key =>
#elif NET40_OR_GREATER
Parallel.ForEach(SortedKeys, key =>
#else
foreach (var key in SortedKeys)
#endif
{
// Get the possibly unsorted list
List sortedList = GetItemsForBucket(key);
// Sort the list of items to be consistent
Sort(ref sortedList, false);
// Add the list back to the dictionary
RemoveBucket(key);
sortedList.ForEach(item => AddItem(key, item));
#if NET40_OR_GREATER || NETCOREAPP
});
#else
}
#endif
}
///
/// Sort a list of DatItem objects by SourceID, Game, and Name (in order)
///
/// List of DatItem objects representing the items to be sorted
/// True if files are not renamed, false otherwise
/// True if it sorted correctly, false otherwise
private bool Sort(ref List items, bool norename)
{
items.Sort(delegate (DatItem x, DatItem y)
{
try
{
var nc = new NaturalComparer();
// Get the machines
Machine? xMachine = x.GetFieldValue(DatItem.MachineKey);
Machine? yMachine = y.GetFieldValue(DatItem.MachineKey);
// If machine names don't match
string? xMachineName = xMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
string? yMachineName = yMachine?.GetStringFieldValue(Models.Metadata.Machine.NameKey);
if (xMachineName != yMachineName)
return nc.Compare(xMachineName, yMachineName);
// If types don't match
string? xType = x.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
string? yType = y.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
if (xType != yType)
return xType.AsEnumValue() - yType.AsEnumValue();
// If directory names don't match
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
if (xDirectoryName != yDirectoryName)
return nc.Compare(xDirectoryName, yDirectoryName);
// If item names don't match
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
if (xName != yName)
return nc.Compare(xName, yName);
// Otherwise, compare on machine or source, depending on the flag
int? xSourceIndex = x.GetFieldValue(DatItem.SourceKey)?.Index;
int? ySourceIndex = y.GetFieldValue(DatItem.SourceKey)?.Index;
return (norename ? nc.Compare(xMachineName, yMachineName) : (xSourceIndex - ySourceIndex) ?? 0);
}
catch
{
// Absorb the error
return 0;
}
});
return true;
}
///
/// Sort the input DAT and get the key to be used by the item
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// Key to try to use
private string SortAndGetKey(DatItem datItem, bool sorted = false)
{
// If we're not already sorted, take care of it
if (!sorted)
BucketBy(GetBestAvailable());
// Now that we have the sorted type, we get the proper key
return GetBucketKey(datItem, _bucketedBy, lower: true, norename: true);
}
#endregion
#region Statistics
///
/// Recalculate the statistics for the Dat
///
public void RecalculateStats()
{
// Wipe out any stats already there
DatStatistics.ResetStatistics();
// If we have a blank Dat in any way, return
if (_items == null || _items.Count == 0)
return;
// Loop through and add
foreach (string key in _items.Keys)
{
List? datItems = _items[key];
if (datItems == null)
continue;
foreach (DatItem item in datItems)
{
DatStatistics.AddItemStatistics(item);
}
}
}
#endregion
}
}