2020-12-10 10:58:00 -08:00
|
|
|
|
using System;
|
2022-11-03 16:46:53 -07:00
|
|
|
|
using System.Collections.Generic;
|
2020-12-10 10:58:00 -08:00
|
|
|
|
using System.IO;
|
2020-09-07 14:47:27 -07:00
|
|
|
|
using System.Xml.Serialization;
|
2022-11-03 16:05:07 -07:00
|
|
|
|
using Newtonsoft.Json;
|
2025-05-02 22:34:38 -04:00
|
|
|
|
using SabreTools.Core.Filter;
|
2020-12-10 22:16:53 -08:00
|
|
|
|
using SabreTools.Core.Tools;
|
2020-12-10 10:58:00 -08:00
|
|
|
|
using SabreTools.DatItems;
|
2021-02-02 10:23:43 -08:00
|
|
|
|
using SabreTools.DatItems.Formats;
|
2024-03-04 23:56:05 -05:00
|
|
|
|
using SabreTools.Hashing;
|
2024-10-24 00:36:44 -04:00
|
|
|
|
using SabreTools.IO.Logging;
|
2025-02-12 15:23:27 -05:00
|
|
|
|
using SabreTools.Matching.Compare;
|
2016-10-26 22:10:47 -07:00
|
|
|
|
|
2020-12-08 16:37:08 -08:00
|
|
|
|
namespace SabreTools.DatFiles
|
2016-04-19 01:11:23 -07:00
|
|
|
|
{
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Represents a format-agnostic DAT
|
|
|
|
|
|
/// </summary>
|
2020-09-08 10:12:41 -07:00
|
|
|
|
[JsonObject("datfile"), XmlRoot("datfile")]
|
2024-03-10 00:20:56 -05:00
|
|
|
|
public abstract partial class DatFile
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-31 14:04:10 -07:00
|
|
|
|
#region Fields
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-27 10:26:08 -07:00
|
|
|
|
/// Header values
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-10-07 15:42:30 -07:00
|
|
|
|
[JsonProperty("header"), XmlElement("header")]
|
2024-03-19 19:39:54 -04:00
|
|
|
|
public DatHeader Header { get; private set; } = new DatHeader();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2025-01-29 22:51:30 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Modifier values
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonProperty("modifiers"), XmlElement("modifiers")]
|
2025-01-30 14:05:12 -05:00
|
|
|
|
public DatModifiers Modifiers { get; private set; } = new DatModifiers();
|
2025-01-29 22:51:30 -05:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <summary>
|
2020-07-27 10:26:08 -07:00
|
|
|
|
/// DatItems and related statistics
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-10-07 15:42:30 -07:00
|
|
|
|
[JsonProperty("items"), XmlElement("items")]
|
2025-01-14 10:38:46 -05:00
|
|
|
|
public ItemDictionary Items { get; private set; } = new ItemDictionary();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2024-03-19 15:03:22 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// DatItems and related statistics
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonProperty("items"), XmlElement("items")]
|
2024-03-19 18:40:24 -04:00
|
|
|
|
public ItemDictionaryDB ItemsDB { get; private set; } = new ItemDictionaryDB();
|
2024-03-19 15:03:22 -04:00
|
|
|
|
|
2025-01-12 23:15:30 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// DAT statistics
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
|
|
|
|
|
public DatStatistics DatStatistics => Items.DatStatistics;
|
|
|
|
|
|
//public DatStatistics DatStatistics => ItemsDB.DatStatistics;
|
|
|
|
|
|
|
2025-01-09 06:14:01 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// List of supported types for writing
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public abstract ItemType[] SupportedTypes { get; }
|
|
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-10-07 15:42:30 -07:00
|
|
|
|
#region Logging
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Logging object
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
[JsonIgnore, XmlIgnore]
|
2025-01-08 16:59:44 -05:00
|
|
|
|
protected Logger _logger;
|
2020-10-07 15:42:30 -07:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#region Constructors
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Create a new DatFile from an existing one
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datFile">DatFile to get the values from</param>
|
2023-08-10 23:22:14 -04:00
|
|
|
|
public DatFile(DatFile? datFile)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2025-01-08 16:59:44 -05:00
|
|
|
|
_logger = new Logger(this);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
if (datFile != null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2025-01-09 03:44:42 -05:00
|
|
|
|
Header = (DatHeader)datFile.Header.Clone();
|
2025-01-29 22:51:30 -05:00
|
|
|
|
Modifiers = (DatModifiers)datFile.Modifiers.Clone();
|
2020-07-31 14:04:10 -07:00
|
|
|
|
Items = datFile.Items;
|
2024-03-19 15:21:01 -04:00
|
|
|
|
ItemsDB = datFile.ItemsDB;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-08-27 20:56:50 -07:00
|
|
|
|
/// Fill the header values based on existing Header and path
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="path">Path used for creating a name, if necessary</param>
|
|
|
|
|
|
/// <param name="bare">True if the date should be omitted from name and description, false otherwise</param>
|
|
|
|
|
|
public void FillHeaderFromPath(string path, bool bare)
|
|
|
|
|
|
{
|
2024-07-15 12:48:26 -04:00
|
|
|
|
// Get the header strings
|
|
|
|
|
|
string? name = Header.GetStringFieldValue(Models.Metadata.Header.NameKey);
|
|
|
|
|
|
string? description = Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey);
|
|
|
|
|
|
string? date = Header.GetStringFieldValue(Models.Metadata.Header.DateKey);
|
|
|
|
|
|
|
2020-08-27 20:56:50 -07:00
|
|
|
|
// If the description is defined but not the name, set the name from the description
|
2024-07-15 12:48:26 -04:00
|
|
|
|
if (string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(description))
|
2020-08-27 20:56:50 -07:00
|
|
|
|
{
|
2025-01-09 08:44:33 -05:00
|
|
|
|
name = description + (bare ? string.Empty : $" ({date})");
|
2020-08-27 20:56:50 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the name is defined but not the description, set the description from the name
|
2024-07-15 12:48:26 -04:00
|
|
|
|
else if (!string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
|
2020-08-27 20:56:50 -07:00
|
|
|
|
{
|
2025-01-09 08:44:33 -05:00
|
|
|
|
description = name + (bare ? string.Empty : $" ({date})");
|
2020-08-27 20:56:50 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If neither the name or description are defined, set them from the automatic values
|
2024-07-15 12:48:26 -04:00
|
|
|
|
else if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
|
2020-08-27 20:56:50 -07:00
|
|
|
|
{
|
|
|
|
|
|
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
|
2024-10-19 21:41:08 -04:00
|
|
|
|
#if NETFRAMEWORK
|
2025-01-09 08:44:33 -05:00
|
|
|
|
name = splitpath[splitpath.Length - 1];
|
|
|
|
|
|
description = splitpath[splitpath.Length - 1] + (bare ? string.Empty : $" ({date})");
|
2024-10-19 21:41:08 -04:00
|
|
|
|
#else
|
2025-01-09 08:44:33 -05:00
|
|
|
|
name = splitpath[^1] + (bare ? string.Empty : $" ({date})");
|
|
|
|
|
|
description = splitpath[^1] + (bare ? string.Empty : $" ({date})");
|
2024-10-19 21:41:08 -04:00
|
|
|
|
#endif
|
2020-08-27 20:56:50 -07:00
|
|
|
|
}
|
2025-01-09 08:44:33 -05:00
|
|
|
|
|
|
|
|
|
|
// Trim both fields
|
|
|
|
|
|
name = name?.Trim();
|
|
|
|
|
|
description = description?.Trim();
|
|
|
|
|
|
|
|
|
|
|
|
// Set the fields back
|
|
|
|
|
|
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, name);
|
|
|
|
|
|
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, description);
|
2020-08-27 20:56:50 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endregion
|
2023-08-10 23:22:14 -04:00
|
|
|
|
|
2024-03-19 19:39:54 -04:00
|
|
|
|
#region Accessors
|
2024-03-19 18:40:24 -04:00
|
|
|
|
|
2025-01-14 22:07:05 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove any keys that have null or empty values
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void ClearEmpty()
|
|
|
|
|
|
{
|
|
|
|
|
|
ClearEmptyImpl();
|
|
|
|
|
|
ClearEmptyImplDB();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-08 17:11:52 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Set the internal header
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datHeader">Replacement header to be used</param>
|
2025-01-30 10:22:20 -05:00
|
|
|
|
public void SetHeader(DatHeader? datHeader)
|
2025-01-08 17:11:52 -05:00
|
|
|
|
{
|
2025-01-30 10:22:20 -05:00
|
|
|
|
if (datHeader != null)
|
2025-01-30 14:05:12 -05:00
|
|
|
|
Header = (DatHeader)datHeader.Clone();
|
2025-01-08 17:11:52 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-29 22:51:30 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Set the internal header
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datHeader">Replacement header to be used</param>
|
|
|
|
|
|
public void SetModifiers(DatModifiers datModifers)
|
|
|
|
|
|
{
|
|
|
|
|
|
Modifiers = (DatModifiers)datModifers.Clone();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 22:07:05 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove any keys that have null or empty values
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void ClearEmptyImpl()
|
|
|
|
|
|
{
|
|
|
|
|
|
foreach (string key in Items.SortedKeys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the value is empty, remove
|
|
|
|
|
|
List<DatItem> value = GetItemsForBucket(key);
|
|
|
|
|
|
if (value.Count == 0)
|
|
|
|
|
|
RemoveBucket(key);
|
|
|
|
|
|
|
|
|
|
|
|
// If there are no non-blank items, remove
|
|
|
|
|
|
else if (value.FindIndex(i => i != null && i is not Blank) == -1)
|
|
|
|
|
|
RemoveBucket(key);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove any keys that have null or empty values
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void ClearEmptyImplDB()
|
|
|
|
|
|
{
|
|
|
|
|
|
foreach (string key in ItemsDB.SortedKeys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the value is empty, remove
|
|
|
|
|
|
List<DatItem> value = [.. GetItemsForBucketDB(key).Values];
|
|
|
|
|
|
if (value.Count == 0)
|
|
|
|
|
|
RemoveBucketDB(key);
|
|
|
|
|
|
|
|
|
|
|
|
// If there are no non-blank items, remove
|
|
|
|
|
|
else if (value.FindIndex(i => i != null && i is not Blank) == -1)
|
|
|
|
|
|
RemoveBucketDB(key);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-19 18:40:24 -04:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
2025-01-12 23:15:30 -05:00
|
|
|
|
#region Item Dictionary Passthrough - Accessors
|
2025-01-12 22:10:48 -05:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a DatItem to the dictionary after checking
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">Item data to check against</param>
|
|
|
|
|
|
/// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
|
|
|
|
|
|
/// <returns>The key for the item</returns>
|
|
|
|
|
|
public string AddItem(DatItem item, bool statsOnly)
|
|
|
|
|
|
{
|
|
|
|
|
|
return Items.AddItem(item, statsOnly);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a DatItem to the dictionary after validation
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">Item data to validate</param>
|
|
|
|
|
|
/// <param name="machineIndex">Index of the machine related to the item</param>
|
|
|
|
|
|
/// <param name="sourceIndex">Index of the source related to the item</param>
|
|
|
|
|
|
/// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
|
|
|
|
|
|
/// <returns>The index for the added item, -1 on error</returns>
|
|
|
|
|
|
public long AddItemDB(DatItem item, long machineIndex, long sourceIndex, bool statsOnly)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.AddItem(item, machineIndex, sourceIndex, statsOnly);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a machine, returning the insert index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public long AddMachineDB(Machine machine)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.AddMachine(machine);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a source, returning the insert index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public long AddSourceDB(Source source)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.AddSource(source);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove all items marked for removal
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void ClearMarked()
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.ClearMarked();
|
|
|
|
|
|
ItemsDB.ClearMarked();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-12 23:15:30 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the items associated with a bucket name
|
|
|
|
|
|
/// </summary>
|
2025-01-13 13:22:48 -05:00
|
|
|
|
public List<DatItem> GetItemsForBucket(string? bucketName, bool filter = false)
|
2025-01-12 23:15:30 -05:00
|
|
|
|
=> Items.GetItemsForBucket(bucketName, filter);
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the indices and items associated with a bucket name
|
|
|
|
|
|
/// </summary>
|
2025-01-13 13:22:48 -05:00
|
|
|
|
public Dictionary<long, DatItem> GetItemsForBucketDB(string? bucketName, bool filter = false)
|
2025-01-12 23:15:30 -05:00
|
|
|
|
=> ItemsDB.GetItemsForBucket(bucketName, filter);
|
|
|
|
|
|
|
2025-01-13 10:01:27 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get all machines and their indicies
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public IDictionary<long, Machine> GetMachinesDB()
|
|
|
|
|
|
=> ItemsDB.GetMachines();
|
|
|
|
|
|
|
2025-02-24 09:20:46 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the index and machine associated with an item index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public KeyValuePair<long, Machine?> GetMachineForItemDB(long itemIndex)
|
2025-02-24 09:27:43 -05:00
|
|
|
|
=> ItemsDB.GetMachineForItem(itemIndex);
|
2025-02-24 09:20:46 -05:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get the index and source associated with an item index
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public KeyValuePair<long, Source?> GetSourceForItemDB(long itemIndex)
|
2025-02-24 09:27:43 -05:00
|
|
|
|
=> ItemsDB.GetSourceForItem(itemIndex);
|
2025-02-24 09:20:46 -05:00
|
|
|
|
|
2025-01-12 22:10:48 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a key from the file dictionary if it exists
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="key">Key in the dictionary to remove</param>
|
2025-01-14 15:32:14 -05:00
|
|
|
|
public bool RemoveBucket(string key)
|
|
|
|
|
|
{
|
|
|
|
|
|
return Items.RemoveBucket(key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 22:07:05 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a key from the file dictionary if it exists
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="key">Key in the dictionary to remove</param>
|
|
|
|
|
|
public bool RemoveBucketDB(string key)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.RemoveBucket(key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 15:32:14 -05:00
|
|
|
|
/// <summary>
|
2025-05-03 23:20:23 -04:00
|
|
|
|
/// Remove the indexed instance of a value from the file dictionary if it exists
|
2025-01-14 15:32:14 -05:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="key">Key in the dictionary to remove from</param>
|
|
|
|
|
|
/// <param name="value">Value to remove from the dictionary</param>
|
2025-05-03 23:20:23 -04:00
|
|
|
|
/// <param name="index">Index of the item to be removed</param>
|
|
|
|
|
|
public bool RemoveItem(string key, DatItem value, int index)
|
2025-01-12 22:10:48 -05:00
|
|
|
|
{
|
2025-05-03 23:20:23 -04:00
|
|
|
|
return Items.RemoveItem(key, value, index);
|
2025-01-12 22:10:48 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 22:34:58 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove an item, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveItemDB(long itemIndex)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.RemoveItem(itemIndex);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a machine, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveMachineDB(long machineIndex)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.RemoveMachine(machineIndex);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove a machine, returning if it could be removed
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public bool RemoveMachineDB(string machineName)
|
|
|
|
|
|
{
|
|
|
|
|
|
return ItemsDB.RemoveMachine(machineName);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-12 22:10:48 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Reset the internal item dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void ResetDictionary()
|
|
|
|
|
|
{
|
2025-01-14 10:38:46 -05:00
|
|
|
|
Items = new ItemDictionary();
|
2025-01-12 22:10:48 -05:00
|
|
|
|
ItemsDB = new ItemDictionaryDB();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2025-01-12 23:15:30 -05:00
|
|
|
|
#region Item Dictionary Passthrough - Bucketing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Take the arbitrarily bucketed Files Dictionary and convert to one bucketed by a user-defined method
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="bucketBy">ItemKey enum representing how to bucket the individual items</param>
|
|
|
|
|
|
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
|
|
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
2025-01-14 20:21:54 -05:00
|
|
|
|
public void BucketBy(ItemKey bucketBy, bool lower = true, bool norename = true)
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.BucketBy(bucketBy, lower, norename);
|
2025-05-11 21:44:32 -04:00
|
|
|
|
//ItemsDB.BucketBy(bucketBy, lower, norename);
|
2025-01-14 20:21:54 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Perform deduplication based on the deduplication type provided
|
|
|
|
|
|
/// </summary>
|
2025-01-21 11:07:39 -05:00
|
|
|
|
public void Deduplicate()
|
2025-01-12 23:15:30 -05:00
|
|
|
|
{
|
2025-01-21 11:07:39 -05:00
|
|
|
|
Items.Deduplicate();
|
|
|
|
|
|
ItemsDB.Deduplicate();
|
2025-01-12 23:15:30 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// List all duplicates found in a DAT based on a DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>List of matched DatItem objects</returns>
|
|
|
|
|
|
public List<DatItem> GetDuplicates(DatItem datItem, bool sorted = false)
|
|
|
|
|
|
=> Items.GetDuplicates(datItem, sorted);
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// List all duplicates found in a DAT based on a DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>List of matched DatItem objects</returns>
|
|
|
|
|
|
public Dictionary<long, DatItem> GetDuplicatesDB(KeyValuePair<long, DatItem> datItem, bool sorted = false)
|
|
|
|
|
|
=> ItemsDB.GetDuplicates(datItem, sorted);
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Check if a DAT contains the given DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>True if it contains the rom, false otherwise</returns>
|
|
|
|
|
|
public bool HasDuplicates(DatItem datItem, bool sorted = false)
|
|
|
|
|
|
=> Items.HasDuplicates(datItem, sorted);
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Check if a DAT contains the given DatItem
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Item to try to match</param>
|
|
|
|
|
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>True if it contains the rom, false otherwise</returns>
|
|
|
|
|
|
public bool HasDuplicates(KeyValuePair<long, DatItem> datItem, bool sorted = false)
|
|
|
|
|
|
=> ItemsDB.HasDuplicates(datItem, sorted);
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2025-01-14 16:19:32 -05:00
|
|
|
|
#region Item Dictionary Passthrough - Statistics
|
2025-01-14 15:46:42 -05:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Recalculate the statistics for the Dat
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void RecalculateStats()
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.RecalculateStats();
|
|
|
|
|
|
ItemsDB.RecalculateStats();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-12-10 10:58:00 -08:00
|
|
|
|
#region Parsing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Parse DatFile and return all found games and roms within
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
|
|
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
2025-05-02 22:34:38 -04:00
|
|
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise</param>
|
2020-12-23 13:55:09 -08:00
|
|
|
|
/// <param name="statsOnly">True to only add item statistics while parsing, false otherwise</param>
|
2025-05-02 22:34:38 -04:00
|
|
|
|
/// <param name="filterRunner">Optional FilterRunner to filter items on parse</param>
|
2020-12-10 10:58:00 -08:00
|
|
|
|
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
2025-05-02 22:34:38 -04:00
|
|
|
|
public abstract void ParseFile(string filename,
|
|
|
|
|
|
int indexId,
|
|
|
|
|
|
bool keep,
|
|
|
|
|
|
bool statsOnly = false,
|
|
|
|
|
|
FilterRunner? filterRunner = null,
|
|
|
|
|
|
bool throwOnError = false);
|
2020-12-10 10:58:00 -08:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
2023-08-10 23:22:14 -04:00
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
#region Writing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="outfile">Name of the file to write to</param>
|
|
|
|
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
|
|
|
|
|
public abstract bool WriteToFile(string outfile, bool ignoreblanks = false, bool throwOnError = false);
|
|
|
|
|
|
|
2024-03-20 11:22:33 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="outfile">Name of the file to write to</param>
|
|
|
|
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
|
|
|
|
|
public abstract bool WriteToFileDB(string outfile, bool ignoreblanks = false, bool throwOnError = false);
|
|
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process an item and correctly set the item name
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">DatItem to update</param>
|
|
|
|
|
|
/// <param name="forceRemoveQuotes">True if the Quotes flag should be ignored, false otherwise</param>
|
2025-01-09 10:13:47 -05:00
|
|
|
|
/// <param name="forceRomName">True if the UseRomName should be always on, false otherwise</param>
|
2025-01-29 22:51:30 -05:00
|
|
|
|
/// <remarks>
|
|
|
|
|
|
/// There are some unique interactions that can occur because of the large number of effective
|
|
|
|
|
|
/// inputs into this method.
|
|
|
|
|
|
/// - If both a replacement extension is set and the remove extension flag is enabled,
|
|
|
|
|
|
/// the replacement extension will be overridden by the remove extension flag.
|
|
|
|
|
|
/// - Extension addition, removal, and replacement are not done at all if the output
|
|
|
|
|
|
/// depot is specified. Only prefix and postfix logic is applied.
|
|
|
|
|
|
/// - Both methods of using the item name are overridden if the output depot is specified.
|
|
|
|
|
|
/// Instead, the name is always set based on the SHA-1 hash.
|
|
|
|
|
|
/// </remarks>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal void ProcessItemName(DatItem item, Machine? machine, bool forceRemoveQuotes, bool forceRomName)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
{
|
2025-01-09 09:43:35 -05:00
|
|
|
|
// Get the relevant processing values
|
2025-01-29 22:51:30 -05:00
|
|
|
|
bool quotes = forceRemoveQuotes ? false : Modifiers.Quotes;
|
|
|
|
|
|
bool useRomName = forceRomName ? true : Modifiers.UseRomName;
|
2020-12-10 11:28:11 -08:00
|
|
|
|
|
2025-01-09 10:34:56 -05:00
|
|
|
|
// Create the full Prefix
|
2025-01-29 22:51:30 -05:00
|
|
|
|
string pre = Modifiers.Prefix + (quotes ? "\"" : string.Empty);
|
2025-01-09 10:34:56 -05:00
|
|
|
|
pre = FormatPrefixPostfix(item, machine, pre);
|
|
|
|
|
|
|
|
|
|
|
|
// Create the full Postfix
|
2025-01-29 22:51:30 -05:00
|
|
|
|
string post = (quotes ? "\"" : string.Empty) + Modifiers.Postfix;
|
2025-01-09 10:34:56 -05:00
|
|
|
|
post = FormatPrefixPostfix(item, machine, post);
|
|
|
|
|
|
|
2024-03-20 11:22:33 -04:00
|
|
|
|
// Get the name to update
|
2025-01-09 16:25:53 -05:00
|
|
|
|
string? name = (useRomName
|
2025-01-09 10:01:56 -05:00
|
|
|
|
? item.GetName()
|
2025-05-02 16:05:08 -04:00
|
|
|
|
: machine?.GetName()) ?? string.Empty;
|
2024-03-20 11:22:33 -04:00
|
|
|
|
|
|
|
|
|
|
// If we're in Depot mode, take care of that instead
|
2025-01-29 22:51:30 -05:00
|
|
|
|
if (Modifiers.OutputDepot?.IsActive == true)
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
2025-01-09 10:01:56 -05:00
|
|
|
|
if (item is Disk disk)
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
|
|
|
|
|
// We can only write out if there's a SHA-1
|
2024-10-24 04:21:13 -04:00
|
|
|
|
string? sha1 = disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
2025-01-29 22:51:30 -05:00
|
|
|
|
name = Utilities.GetDepotPath(sha1, Modifiers.OutputDepot.Depth)?.Replace('\\', '/');
|
2025-01-09 10:01:56 -05:00
|
|
|
|
item.SetName($"{pre}{name}{post}");
|
2024-03-20 11:22:33 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-01-09 16:25:53 -05:00
|
|
|
|
else if (item is DatItems.Formats.File file)
|
|
|
|
|
|
{
|
|
|
|
|
|
// We can only write out if there's a SHA-1
|
|
|
|
|
|
string? sha1 = file.SHA1;
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
|
|
|
|
|
{
|
2025-01-29 22:51:30 -05:00
|
|
|
|
name = Utilities.GetDepotPath(sha1, Modifiers.OutputDepot.Depth)?.Replace('\\', '/');
|
2025-01-09 16:25:53 -05:00
|
|
|
|
item.SetName($"{pre}{name}{post}");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-01-09 10:01:56 -05:00
|
|
|
|
else if (item is Media media)
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
|
|
|
|
|
// We can only write out if there's a SHA-1
|
2024-10-24 04:21:13 -04:00
|
|
|
|
string? sha1 = media.GetStringFieldValue(Models.Metadata.Media.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
2025-01-29 22:51:30 -05:00
|
|
|
|
name = Utilities.GetDepotPath(sha1, Modifiers.OutputDepot.Depth)?.Replace('\\', '/');
|
2025-01-09 10:01:56 -05:00
|
|
|
|
item.SetName($"{pre}{name}{post}");
|
2024-03-20 11:22:33 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-01-09 10:01:56 -05:00
|
|
|
|
else if (item is Rom rom)
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
|
|
|
|
|
// We can only write out if there's a SHA-1
|
2024-10-24 04:21:13 -04:00
|
|
|
|
string? sha1 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
2025-01-29 22:51:30 -05:00
|
|
|
|
name = Utilities.GetDepotPath(sha1, Modifiers.OutputDepot.Depth)?.Replace('\\', '/');
|
2025-01-09 10:01:56 -05:00
|
|
|
|
item.SetName($"{pre}{name}{post}");
|
2024-03-20 11:22:33 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-29 22:51:30 -05:00
|
|
|
|
if (!string.IsNullOrEmpty(Modifiers.ReplaceExtension) || Modifiers.RemoveExtension)
|
2024-03-20 11:22:33 -04:00
|
|
|
|
{
|
2025-01-29 22:51:30 -05:00
|
|
|
|
if (Modifiers.RemoveExtension)
|
|
|
|
|
|
Modifiers.ReplaceExtension = string.Empty;
|
2024-03-20 11:22:33 -04:00
|
|
|
|
|
|
|
|
|
|
string? dir = Path.GetDirectoryName(name);
|
|
|
|
|
|
if (dir != null)
|
|
|
|
|
|
{
|
|
|
|
|
|
dir = dir.TrimStart(Path.DirectorySeparatorChar);
|
2025-01-29 22:51:30 -05:00
|
|
|
|
name = Path.Combine(dir, Path.GetFileNameWithoutExtension(name) + Modifiers.ReplaceExtension);
|
2024-03-20 11:22:33 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-29 22:51:30 -05:00
|
|
|
|
if (!string.IsNullOrEmpty(Modifiers.AddExtension))
|
|
|
|
|
|
name += Modifiers.AddExtension;
|
2024-03-20 11:22:33 -04:00
|
|
|
|
|
2025-01-29 22:51:30 -05:00
|
|
|
|
if (useRomName && Modifiers.GameName)
|
2025-05-02 16:05:08 -04:00
|
|
|
|
name = Path.Combine(machine?.GetName() ?? string.Empty, name);
|
2024-03-20 11:22:33 -04:00
|
|
|
|
|
|
|
|
|
|
// Now assign back the formatted name
|
|
|
|
|
|
name = $"{pre}{name}{post}";
|
2025-01-09 09:52:47 -05:00
|
|
|
|
if (useRomName)
|
2025-01-09 10:01:56 -05:00
|
|
|
|
item.SetName(name);
|
2025-01-29 22:51:30 -05:00
|
|
|
|
else
|
2025-05-02 16:05:08 -04:00
|
|
|
|
machine?.SetName(name);
|
2025-01-09 09:52:47 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Format a prefix or postfix string
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">DatItem to create a prefix/postfix for</param>
|
|
|
|
|
|
/// <param name="machine">Machine to get information from</param>
|
|
|
|
|
|
/// <param name="fix">Prefix or postfix pattern to populate</param>
|
|
|
|
|
|
/// <returns>Sanitized string representing the postfix or prefix</returns>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal static string FormatPrefixPostfix(DatItem item, Machine? machine, string fix)
|
2025-01-09 09:52:47 -05:00
|
|
|
|
{
|
|
|
|
|
|
// Initialize strings
|
|
|
|
|
|
string? type = item.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
string
|
2025-05-02 16:05:08 -04:00
|
|
|
|
game = machine?.GetName() ?? string.Empty,
|
2025-01-09 09:52:47 -05:00
|
|
|
|
manufacturer = machine?.GetStringFieldValue(Models.Metadata.Machine.ManufacturerKey) ?? string.Empty,
|
|
|
|
|
|
publisher = machine?.GetStringFieldValue(Models.Metadata.Machine.PublisherKey) ?? string.Empty,
|
|
|
|
|
|
category = machine?.GetStringFieldValue(Models.Metadata.Machine.CategoryKey) ?? string.Empty,
|
2025-05-11 22:55:38 -04:00
|
|
|
|
name = item.GetName() ?? type.AsItemType().AsStringValue() ?? string.Empty,
|
2025-01-09 09:52:47 -05:00
|
|
|
|
crc = string.Empty,
|
|
|
|
|
|
md2 = string.Empty,
|
|
|
|
|
|
md4 = string.Empty,
|
|
|
|
|
|
md5 = string.Empty,
|
|
|
|
|
|
sha1 = string.Empty,
|
|
|
|
|
|
sha256 = string.Empty,
|
|
|
|
|
|
sha384 = string.Empty,
|
|
|
|
|
|
sha512 = string.Empty,
|
|
|
|
|
|
size = string.Empty,
|
|
|
|
|
|
spamsum = string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
// Ensure we have the proper values for replacement
|
|
|
|
|
|
if (item is Disk disk)
|
|
|
|
|
|
{
|
|
|
|
|
|
md5 = disk.GetStringFieldValue(Models.Metadata.Disk.MD5Key) ?? string.Empty;
|
|
|
|
|
|
sha1 = disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key) ?? string.Empty;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item is DatItems.Formats.File file)
|
|
|
|
|
|
{
|
2025-01-09 16:25:53 -05:00
|
|
|
|
name = $"{file.Id}.{file.Extension}";
|
|
|
|
|
|
size = file.Size.ToString() ?? string.Empty;
|
2025-01-09 09:52:47 -05:00
|
|
|
|
crc = file.CRC ?? string.Empty;
|
|
|
|
|
|
md5 = file.MD5 ?? string.Empty;
|
|
|
|
|
|
sha1 = file.SHA1 ?? string.Empty;
|
|
|
|
|
|
sha256 = file.SHA256 ?? string.Empty;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item is Media media)
|
|
|
|
|
|
{
|
|
|
|
|
|
md5 = media.GetStringFieldValue(Models.Metadata.Media.MD5Key) ?? string.Empty;
|
|
|
|
|
|
sha1 = media.GetStringFieldValue(Models.Metadata.Media.SHA1Key) ?? string.Empty;
|
|
|
|
|
|
sha256 = media.GetStringFieldValue(Models.Metadata.Media.SHA256Key) ?? string.Empty;
|
|
|
|
|
|
spamsum = media.GetStringFieldValue(Models.Metadata.Media.SpamSumKey) ?? string.Empty;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item is Rom rom)
|
|
|
|
|
|
{
|
|
|
|
|
|
crc = rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) ?? string.Empty;
|
|
|
|
|
|
md2 = rom.GetStringFieldValue(Models.Metadata.Rom.MD2Key) ?? string.Empty;
|
|
|
|
|
|
md4 = rom.GetStringFieldValue(Models.Metadata.Rom.MD4Key) ?? string.Empty;
|
|
|
|
|
|
md5 = rom.GetStringFieldValue(Models.Metadata.Rom.MD5Key) ?? string.Empty;
|
|
|
|
|
|
sha1 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key) ?? string.Empty;
|
|
|
|
|
|
sha256 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA256Key) ?? string.Empty;
|
|
|
|
|
|
sha384 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA384Key) ?? string.Empty;
|
|
|
|
|
|
sha512 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA512Key) ?? string.Empty;
|
|
|
|
|
|
size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey).ToString() ?? string.Empty;
|
|
|
|
|
|
spamsum = rom.GetStringFieldValue(Models.Metadata.Rom.SpamSumKey) ?? string.Empty;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now do bulk replacement where possible
|
|
|
|
|
|
fix = fix
|
|
|
|
|
|
.Replace("%game%", game)
|
|
|
|
|
|
.Replace("%machine%", game)
|
|
|
|
|
|
.Replace("%name%", name)
|
|
|
|
|
|
.Replace("%manufacturer%", manufacturer)
|
|
|
|
|
|
.Replace("%publisher%", publisher)
|
|
|
|
|
|
.Replace("%category%", category)
|
|
|
|
|
|
.Replace("%crc%", crc)
|
|
|
|
|
|
.Replace("%md2%", md2)
|
|
|
|
|
|
.Replace("%md4%", md4)
|
|
|
|
|
|
.Replace("%md5%", md5)
|
|
|
|
|
|
.Replace("%sha1%", sha1)
|
|
|
|
|
|
.Replace("%sha256%", sha256)
|
|
|
|
|
|
.Replace("%sha384%", sha384)
|
|
|
|
|
|
.Replace("%sha512%", sha512)
|
|
|
|
|
|
.Replace("%size%", size)
|
|
|
|
|
|
.Replace("%spamsum%", spamsum);
|
|
|
|
|
|
|
|
|
|
|
|
return fix;
|
2024-03-20 11:22:33 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process any DatItems that are "null", usually created from directory population
|
|
|
|
|
|
/// </summary>
|
2024-03-20 11:22:33 -04:00
|
|
|
|
/// <param name="item">DatItem to check for "null" status</param>
|
2025-01-09 16:53:55 -05:00
|
|
|
|
/// <returns>Cleaned DatItem, if possible</returns>
|
|
|
|
|
|
protected internal static DatItem ProcessNullifiedItem(DatItem item)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If we don't have a Rom, we can ignore it
|
2024-03-20 11:22:33 -04:00
|
|
|
|
if (item is not Rom rom)
|
|
|
|
|
|
return item;
|
2020-12-10 11:28:11 -08:00
|
|
|
|
|
2025-01-09 09:21:16 -05:00
|
|
|
|
// If the item has a size
|
|
|
|
|
|
if (rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey) != null)
|
|
|
|
|
|
return rom;
|
|
|
|
|
|
|
|
|
|
|
|
// If the item CRC isn't "null"
|
|
|
|
|
|
if (rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) != "null")
|
|
|
|
|
|
return rom;
|
|
|
|
|
|
|
2024-03-19 15:03:22 -04:00
|
|
|
|
// If the Rom has "null" characteristics, ensure all fields
|
2025-01-09 09:21:16 -05:00
|
|
|
|
rom.SetName(rom.GetName() == "null" ? "-" : rom.GetName());
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SizeKey, Constants.SizeZero.ToString());
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.CRCKey,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) == "null" ? ZeroHash.CRC32Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD2Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.MD2Key) == "null" ? ZeroHash.GetString(HashType.MD2) : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD4Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.MD4Key) == "null" ? ZeroHash.GetString(HashType.MD4) : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.MD5Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.MD5Key) == "null" ? ZeroHash.MD5Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA1Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key) == "null" ? ZeroHash.SHA1Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA256Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.SHA256Key) == "null" ? ZeroHash.SHA256Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA384Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.SHA384Key) == "null" ? ZeroHash.SHA384Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SHA512Key,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.SHA512Key) == "null" ? ZeroHash.SHA512Str : null);
|
|
|
|
|
|
rom.SetFieldValue<string?>(Models.Metadata.Rom.SpamSumKey,
|
|
|
|
|
|
rom.GetStringFieldValue(Models.Metadata.Rom.SpamSumKey) == "null" ? ZeroHash.SpamSumStr : null);
|
2024-03-19 15:03:22 -04:00
|
|
|
|
|
2025-01-09 09:17:04 -05:00
|
|
|
|
return rom;
|
2024-03-19 15:03:22 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2022-11-03 16:14:42 -07:00
|
|
|
|
/// <summary>
|
2022-11-03 16:46:53 -07:00
|
|
|
|
/// Return list of required fields missing from a DatItem
|
2022-11-03 16:14:42 -07:00
|
|
|
|
/// </summary>
|
2022-11-03 16:46:53 -07:00
|
|
|
|
/// <returns>List of missing required fields, null or empty if none were found</returns>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal virtual List<string>? GetMissingRequiredFields(DatItem datItem) => null;
|
2022-11-03 16:14:42 -07:00
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// <summary>
|
2025-01-09 20:10:56 -05:00
|
|
|
|
/// Get if a list contains any writable items
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItems">DatItems to check</param>
|
2025-01-09 20:10:56 -05:00
|
|
|
|
/// <returns>True if the list contains at least one writable item, false otherwise</returns>
|
|
|
|
|
|
/// <remarks>Empty list are kept with this</remarks>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal bool ContainsWritable(List<DatItem> datItems)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
{
|
2025-01-09 20:10:56 -05:00
|
|
|
|
// Empty list are considered writable
|
|
|
|
|
|
if (datItems.Count == 0)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
|
|
|
|
|
|
|
|
|
|
|
foreach (DatItem datItem in datItems)
|
|
|
|
|
|
{
|
2025-05-11 22:55:38 -04:00
|
|
|
|
ItemType itemType = datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType();
|
2025-01-09 06:14:01 -05:00
|
|
|
|
if (Array.Exists(SupportedTypes, t => t == itemType))
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-05-02 20:19:56 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get unique duplicate suffix on name collision
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <returns>String representing the suffix</returns>
|
|
|
|
|
|
protected static internal string GetDuplicateSuffix(DatItem datItem)
|
|
|
|
|
|
{
|
|
|
|
|
|
return datItem switch
|
|
|
|
|
|
{
|
|
|
|
|
|
Disk diskItem => GetDuplicateSuffix(diskItem),
|
|
|
|
|
|
DatItems.Formats.File fileItem => GetDuplicateSuffix(fileItem),
|
|
|
|
|
|
Media mediaItem => GetDuplicateSuffix(mediaItem),
|
|
|
|
|
|
Rom romItem => GetDuplicateSuffix(romItem),
|
|
|
|
|
|
_ => "_1",
|
|
|
|
|
|
};
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-07 15:28:01 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Resolve name duplicates in an arbitrary set of DatItems based on the supplied information
|
|
|
|
|
|
/// </summary>
|
2025-01-10 10:30:49 -05:00
|
|
|
|
/// <param name="datItems">List of DatItem objects representing the items to be merged</param>
|
2025-01-07 15:28:01 -05:00
|
|
|
|
/// <returns>A List of DatItem objects representing the renamed items</returns>
|
2025-01-10 10:30:49 -05:00
|
|
|
|
protected internal List<DatItem> ResolveNames(List<DatItem> datItems)
|
2025-01-07 15:28:01 -05:00
|
|
|
|
{
|
2025-01-09 20:45:26 -05:00
|
|
|
|
// Ignore empty lists
|
2025-01-10 10:30:49 -05:00
|
|
|
|
if (datItems.Count == 0)
|
2025-01-09 20:45:26 -05:00
|
|
|
|
return [];
|
|
|
|
|
|
|
2025-01-07 15:28:01 -05:00
|
|
|
|
// Create the output list
|
|
|
|
|
|
List<DatItem> output = [];
|
|
|
|
|
|
|
|
|
|
|
|
// First we want to make sure the list is in alphabetical order
|
2025-02-12 15:23:27 -05:00
|
|
|
|
Sort(ref datItems, true);
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through and check names
|
|
|
|
|
|
DatItem? lastItem = null;
|
|
|
|
|
|
string? lastrenamed = null;
|
|
|
|
|
|
int lastid = 0;
|
2025-01-10 10:30:49 -05:00
|
|
|
|
for (int i = 0; i < datItems.Count; i++)
|
2025-01-07 15:28:01 -05:00
|
|
|
|
{
|
2025-01-10 10:30:49 -05:00
|
|
|
|
DatItem datItem = datItems[i];
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
// If we have the first item, we automatically add it
|
|
|
|
|
|
if (lastItem == null)
|
|
|
|
|
|
{
|
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
|
lastItem = datItem;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Get the last item name, if applicable
|
|
|
|
|
|
string lastItemName = lastItem.GetName()
|
2025-05-11 22:55:38 -04:00
|
|
|
|
?? lastItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType().AsStringValue()
|
2025-01-07 15:28:01 -05:00
|
|
|
|
?? string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
// Get the current item name, if applicable
|
|
|
|
|
|
string datItemName = datItem.GetName()
|
2025-05-11 22:55:38 -04:00
|
|
|
|
?? datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType().AsStringValue()
|
2025-01-07 15:28:01 -05:00
|
|
|
|
?? string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
// If the current item exactly matches the last item, then we don't add it
|
|
|
|
|
|
#if NET20 || NET35
|
2025-05-02 19:54:23 -04:00
|
|
|
|
if ((Items.GetDuplicateStatus(datItem, lastItem) & DupeType.All) != 0)
|
2025-01-07 15:28:01 -05:00
|
|
|
|
#else
|
2025-05-02 19:54:23 -04:00
|
|
|
|
if (Items.GetDuplicateStatus(datItem, lastItem).HasFlag(DupeType.All))
|
2025-01-07 15:28:01 -05:00
|
|
|
|
#endif
|
|
|
|
|
|
{
|
2025-01-08 16:59:44 -05:00
|
|
|
|
_logger.Verbose($"Exact duplicate found for '{datItemName}'");
|
2025-01-07 15:28:01 -05:00
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the current name matches the previous name, rename the current item
|
|
|
|
|
|
else if (datItemName == lastItemName)
|
|
|
|
|
|
{
|
2025-01-08 16:59:44 -05:00
|
|
|
|
_logger.Verbose($"Name duplicate found for '{datItemName}'");
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
2025-01-09 05:52:33 -05:00
|
|
|
|
// Get the duplicate suffix
|
2025-05-02 20:19:56 -04:00
|
|
|
|
datItemName += GetDuplicateSuffix(datItem);
|
2025-01-09 05:52:33 -05:00
|
|
|
|
lastrenamed ??= datItemName;
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
// If we have a conflict with the last renamed item, do the right thing
|
|
|
|
|
|
if (datItemName == lastrenamed)
|
|
|
|
|
|
{
|
|
|
|
|
|
lastrenamed = datItemName;
|
|
|
|
|
|
datItemName += (lastid == 0 ? string.Empty : "_" + lastid);
|
|
|
|
|
|
lastid++;
|
|
|
|
|
|
}
|
|
|
|
|
|
// If we have no conflict, then we want to reset the lastrenamed and id
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
lastrenamed = null;
|
|
|
|
|
|
lastid = 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Set the item name back to the datItem
|
|
|
|
|
|
datItem.SetName(datItemName);
|
|
|
|
|
|
|
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we say that we have a valid named file
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
|
lastItem = datItem;
|
|
|
|
|
|
lastrenamed = null;
|
|
|
|
|
|
lastid = 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// One last sort to make sure this is ordered
|
2025-02-12 15:23:27 -05:00
|
|
|
|
Sort(ref output, true);
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
return output;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Resolve name duplicates in an arbitrary set of DatItems based on the supplied information
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mappings">List of item ID to DatItem mappings representing the items to be merged</param>
|
|
|
|
|
|
/// <returns>A List of DatItem objects representing the renamed items</returns>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal List<KeyValuePair<long, DatItem>> ResolveNamesDB(List<KeyValuePair<long, DatItem>> mappings)
|
2025-01-07 15:28:01 -05:00
|
|
|
|
{
|
2025-01-09 20:45:26 -05:00
|
|
|
|
// Ignore empty lists
|
|
|
|
|
|
if (mappings.Count == 0)
|
|
|
|
|
|
return [];
|
|
|
|
|
|
|
2025-01-07 15:28:01 -05:00
|
|
|
|
// Create the output dict
|
|
|
|
|
|
List<KeyValuePair<long, DatItem>> output = [];
|
|
|
|
|
|
|
|
|
|
|
|
// First we want to make sure the list is in alphabetical order
|
2025-02-12 15:23:27 -05:00
|
|
|
|
SortDB(ref mappings, true);
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through and check names
|
2025-05-02 19:54:23 -04:00
|
|
|
|
KeyValuePair<long, DatItem>? lastItem = null;
|
2025-01-07 15:28:01 -05:00
|
|
|
|
string? lastrenamed = null;
|
|
|
|
|
|
int lastid = 0;
|
|
|
|
|
|
foreach (var datItem in mappings)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If we have the first item, we automatically add it
|
|
|
|
|
|
if (lastItem == null)
|
|
|
|
|
|
{
|
|
|
|
|
|
output.Add(datItem);
|
2025-05-02 19:54:23 -04:00
|
|
|
|
lastItem = datItem;
|
2025-01-07 15:28:01 -05:00
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Get the last item name, if applicable
|
2025-05-02 19:54:23 -04:00
|
|
|
|
string lastItemName = lastItem.Value.Value.GetName()
|
2025-05-11 22:55:38 -04:00
|
|
|
|
?? lastItem.Value.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType().AsStringValue()
|
2025-01-07 15:28:01 -05:00
|
|
|
|
?? string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
// Get the current item name, if applicable
|
|
|
|
|
|
string datItemName = datItem.Value.GetName()
|
2025-05-11 22:55:38 -04:00
|
|
|
|
?? datItem.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType().AsStringValue()
|
2025-01-07 15:28:01 -05:00
|
|
|
|
?? string.Empty;
|
|
|
|
|
|
|
2025-05-02 19:54:23 -04:00
|
|
|
|
// Get sources for both items
|
|
|
|
|
|
var datItemSource = ItemsDB.GetSourceForItem(datItem.Key);
|
|
|
|
|
|
var lastItemSource = ItemsDB.GetSourceForItem(lastItem.Value.Key);
|
|
|
|
|
|
|
2025-01-07 15:28:01 -05:00
|
|
|
|
// If the current item exactly matches the last item, then we don't add it
|
|
|
|
|
|
#if NET20 || NET35
|
2025-05-02 19:54:23 -04:00
|
|
|
|
if ((ItemsDB.GetDuplicateStatus(datItem, datItemSource.Value, lastItem, lastItemSource.Value) & DupeType.All) != 0)
|
2025-01-07 15:28:01 -05:00
|
|
|
|
#else
|
2025-05-02 19:54:23 -04:00
|
|
|
|
if (ItemsDB.GetDuplicateStatus(datItem, datItemSource.Value, lastItem, lastItemSource.Value).HasFlag(DupeType.All))
|
2025-01-07 15:28:01 -05:00
|
|
|
|
#endif
|
|
|
|
|
|
{
|
2025-01-08 16:59:44 -05:00
|
|
|
|
_logger.Verbose($"Exact duplicate found for '{datItemName}'");
|
2025-01-07 15:28:01 -05:00
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the current name matches the previous name, rename the current item
|
|
|
|
|
|
else if (datItemName == lastItemName)
|
|
|
|
|
|
{
|
2025-01-08 16:59:44 -05:00
|
|
|
|
_logger.Verbose($"Name duplicate found for '{datItemName}'");
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
2025-01-09 05:52:33 -05:00
|
|
|
|
// Get the duplicate suffix
|
2025-05-02 20:19:56 -04:00
|
|
|
|
datItemName += GetDuplicateSuffix(datItem.Value);
|
2025-01-09 05:52:33 -05:00
|
|
|
|
lastrenamed ??= datItemName;
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
// If we have a conflict with the last renamed item, do the right thing
|
|
|
|
|
|
if (datItemName == lastrenamed)
|
|
|
|
|
|
{
|
|
|
|
|
|
lastrenamed = datItemName;
|
|
|
|
|
|
datItemName += (lastid == 0 ? string.Empty : "_" + lastid);
|
|
|
|
|
|
lastid++;
|
|
|
|
|
|
}
|
|
|
|
|
|
// If we have no conflict, then we want to reset the lastrenamed and id
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
lastrenamed = null;
|
|
|
|
|
|
lastid = 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Set the item name back to the datItem
|
|
|
|
|
|
datItem.Value.SetName(datItemName);
|
|
|
|
|
|
output.Add(datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we say that we have a valid named file
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
output.Add(datItem);
|
2025-05-02 19:54:23 -04:00
|
|
|
|
lastItem = datItem;
|
2025-01-07 15:28:01 -05:00
|
|
|
|
lastrenamed = null;
|
|
|
|
|
|
lastid = 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// One last sort to make sure this is ordered
|
2025-02-12 15:23:27 -05:00
|
|
|
|
SortDB(ref output, true);
|
2025-01-07 15:28:01 -05:00
|
|
|
|
|
|
|
|
|
|
return output;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get if an item should be ignored on write
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">DatItem to check</param>
|
2022-11-03 16:05:07 -07:00
|
|
|
|
/// <param name="ignoreBlanks">True if blank roms should be skipped on output, false otherwise</param>
|
2020-12-10 11:28:11 -08:00
|
|
|
|
/// <returns>True if the item should be skipped on write, false otherwise</returns>
|
2025-01-09 10:36:32 -05:00
|
|
|
|
protected internal bool ShouldIgnore(DatItem? datItem, bool ignoreBlanks)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
{
|
2022-11-03 16:29:06 -07:00
|
|
|
|
// If this is invoked with a null DatItem, we ignore
|
|
|
|
|
|
if (datItem == null)
|
|
|
|
|
|
{
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item was skipped because it was null");
|
2022-11-03 16:29:06 -07:00
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
// If the item is supposed to be removed, we ignore
|
2024-03-11 15:46:44 -04:00
|
|
|
|
if (datItem.GetBoolFieldValue(DatItem.RemoveKey) == true)
|
2022-11-03 16:27:58 -07:00
|
|
|
|
{
|
|
|
|
|
|
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it was marked for removal");
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
2022-11-03 16:27:58 -07:00
|
|
|
|
}
|
2020-12-10 11:28:11 -08:00
|
|
|
|
|
|
|
|
|
|
// If we have the Blank dat item, we ignore
|
2024-03-10 16:49:07 -04:00
|
|
|
|
if (datItem is Blank)
|
2022-11-03 16:27:58 -07:00
|
|
|
|
{
|
|
|
|
|
|
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it was of type 'Blank'");
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
2022-11-03 16:27:58 -07:00
|
|
|
|
}
|
2020-12-10 11:28:11 -08:00
|
|
|
|
|
|
|
|
|
|
// If we're ignoring blanks and we have a Rom
|
2024-03-10 16:49:07 -04:00
|
|
|
|
if (ignoreBlanks && datItem is Rom rom)
|
2020-12-10 11:28:11 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If we have a 0-size or blank rom, then we ignore
|
2024-03-11 15:46:44 -04:00
|
|
|
|
long? size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey);
|
2024-03-11 15:23:10 -04:00
|
|
|
|
if (size == 0 || size == null)
|
2022-11-03 16:27:58 -07:00
|
|
|
|
{
|
|
|
|
|
|
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it had an invalid size");
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
2022-11-03 16:27:58 -07:00
|
|
|
|
}
|
2020-12-10 11:28:11 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we have an item type not in the list of supported values
|
2025-05-11 22:55:38 -04:00
|
|
|
|
ItemType itemType = datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsItemType();
|
2025-01-09 06:14:01 -05:00
|
|
|
|
if (!Array.Exists(SupportedTypes, t => t == itemType))
|
2022-11-03 16:27:58 -07:00
|
|
|
|
{
|
|
|
|
|
|
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it was not supported for output");
|
2020-12-10 11:28:11 -08:00
|
|
|
|
return true;
|
2022-11-03 16:27:58 -07:00
|
|
|
|
}
|
2020-12-10 11:28:11 -08:00
|
|
|
|
|
2022-11-03 16:05:07 -07:00
|
|
|
|
// If we have an item with missing required fields
|
2024-03-05 23:41:00 -05:00
|
|
|
|
List<string>? missingFields = GetMissingRequiredFields(datItem);
|
2022-12-22 09:27:02 -08:00
|
|
|
|
if (missingFields != null && missingFields.Count != 0)
|
2022-11-03 16:27:58 -07:00
|
|
|
|
{
|
2024-03-19 15:03:22 -04:00
|
|
|
|
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
|
|
|
|
|
|
#if NET20 || NET35
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it was missing required fields: {string.Join(", ", [.. missingFields])}");
|
2024-03-19 15:03:22 -04:00
|
|
|
|
#else
|
2025-01-09 10:22:28 -05:00
|
|
|
|
_logger.Verbose($"Item '{itemString}' was skipped because it was missing required fields: {string.Join(", ", missingFields)}");
|
2024-03-19 15:03:22 -04:00
|
|
|
|
#endif
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-05-02 20:19:56 -04:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get unique duplicate suffix on name collision
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private static string GetDuplicateSuffix(Disk datItem)
|
|
|
|
|
|
{
|
|
|
|
|
|
string? md5 = datItem.GetStringFieldValue(Models.Metadata.Disk.MD5Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(md5))
|
|
|
|
|
|
return $"_{md5}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha1 = datItem.GetStringFieldValue(Models.Metadata.Disk.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
|
|
|
|
|
return $"_{sha1}";
|
|
|
|
|
|
|
|
|
|
|
|
return "_1";
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get unique duplicate suffix on name collision
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <returns>String representing the suffix</returns>
|
|
|
|
|
|
private static string GetDuplicateSuffix(DatItems.Formats.File datItem)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!string.IsNullOrEmpty(datItem.CRC))
|
|
|
|
|
|
return $"_{datItem.CRC}";
|
|
|
|
|
|
else if (!string.IsNullOrEmpty(datItem.MD5))
|
|
|
|
|
|
return $"_{datItem.MD5}";
|
|
|
|
|
|
else if (!string.IsNullOrEmpty(datItem.SHA1))
|
|
|
|
|
|
return $"_{datItem.SHA1}";
|
|
|
|
|
|
else if (!string.IsNullOrEmpty(datItem.SHA256))
|
|
|
|
|
|
return $"_{datItem.SHA256}";
|
|
|
|
|
|
else
|
|
|
|
|
|
return "_1";
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get unique duplicate suffix on name collision
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private static string GetDuplicateSuffix(Media datItem)
|
|
|
|
|
|
{
|
|
|
|
|
|
string? md5 = datItem.GetStringFieldValue(Models.Metadata.Media.MD5Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(md5))
|
|
|
|
|
|
return $"_{md5}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha1 = datItem.GetStringFieldValue(Models.Metadata.Media.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
|
|
|
|
|
return $"_{sha1}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha256 = datItem.GetStringFieldValue(Models.Metadata.Media.SHA256Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha256))
|
|
|
|
|
|
return $"_{sha256}";
|
|
|
|
|
|
|
|
|
|
|
|
string? spamSum = datItem.GetStringFieldValue(Models.Metadata.Media.SpamSumKey);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(spamSum))
|
|
|
|
|
|
return $"_{spamSum}";
|
|
|
|
|
|
|
|
|
|
|
|
return "_1";
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Get unique duplicate suffix on name collision
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private static string GetDuplicateSuffix(Rom datItem)
|
|
|
|
|
|
{
|
|
|
|
|
|
string? crc = datItem.GetStringFieldValue(Models.Metadata.Rom.CRCKey);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(crc))
|
|
|
|
|
|
return $"_{crc}";
|
|
|
|
|
|
|
|
|
|
|
|
string? md2 = datItem.GetStringFieldValue(Models.Metadata.Rom.MD2Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(md2))
|
|
|
|
|
|
return $"_{md2}";
|
|
|
|
|
|
|
|
|
|
|
|
string? md4 = datItem.GetStringFieldValue(Models.Metadata.Rom.MD4Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(md4))
|
|
|
|
|
|
return $"_{md4}";
|
|
|
|
|
|
|
|
|
|
|
|
string? md5 = datItem.GetStringFieldValue(Models.Metadata.Rom.MD5Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(md5))
|
|
|
|
|
|
return $"_{md5}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha1 = datItem.GetStringFieldValue(Models.Metadata.Rom.SHA1Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha1))
|
|
|
|
|
|
return $"_{sha1}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha256 = datItem.GetStringFieldValue(Models.Metadata.Rom.SHA256Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha256))
|
|
|
|
|
|
return $"_{sha256}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha384 = datItem.GetStringFieldValue(Models.Metadata.Rom.SHA384Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha384))
|
|
|
|
|
|
return $"_{sha384}";
|
|
|
|
|
|
|
|
|
|
|
|
string? sha512 = datItem.GetStringFieldValue(Models.Metadata.Rom.SHA512Key);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(sha512))
|
|
|
|
|
|
return $"_{sha512}";
|
|
|
|
|
|
|
|
|
|
|
|
string? spamSum = datItem.GetStringFieldValue(Models.Metadata.Rom.SpamSumKey);
|
|
|
|
|
|
if (!string.IsNullOrEmpty(spamSum))
|
|
|
|
|
|
return $"_{spamSum}";
|
|
|
|
|
|
|
|
|
|
|
|
return "_1";
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-02-12 15:23:27 -05:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Sort a list of DatItem objects by SourceID, Game, and Name (in order)
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="items">List of DatItem objects representing the items to be sorted</param>
|
|
|
|
|
|
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if it sorted correctly, false otherwise</returns>
|
|
|
|
|
|
private static bool Sort(ref List<DatItem> items, bool norename)
|
|
|
|
|
|
{
|
2025-05-19 10:33:57 -04:00
|
|
|
|
// Create the comparer extenal to the delegate
|
|
|
|
|
|
var nc = new NaturalComparer();
|
|
|
|
|
|
|
2025-02-12 15:23:27 -05:00
|
|
|
|
items.Sort(delegate (DatItem x, DatItem y)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2025-05-19 10:39:32 -04:00
|
|
|
|
// Compare on source if renaming
|
|
|
|
|
|
if (!norename)
|
|
|
|
|
|
{
|
|
|
|
|
|
int xSourceIndex = x.GetFieldValue<Source?>(DatItem.SourceKey)?.Index ?? 0;
|
|
|
|
|
|
int ySourceIndex = y.GetFieldValue<Source?>(DatItem.SourceKey)?.Index ?? 0;
|
|
|
|
|
|
if (xSourceIndex != ySourceIndex)
|
|
|
|
|
|
return xSourceIndex - ySourceIndex;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-02-12 15:23:27 -05:00
|
|
|
|
// If machine names don't match
|
2025-05-02 16:46:20 -04:00
|
|
|
|
string? xMachineName = x.GetMachine()?.GetName();
|
|
|
|
|
|
string? yMachineName = y.GetMachine()?.GetName();
|
2025-02-12 15:23:27 -05:00
|
|
|
|
if (xMachineName != yMachineName)
|
|
|
|
|
|
return nc.Compare(xMachineName, yMachineName);
|
|
|
|
|
|
|
|
|
|
|
|
// If types don't match
|
|
|
|
|
|
string? xType = x.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
string? yType = y.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
if (xType != yType)
|
2025-05-11 22:55:38 -04:00
|
|
|
|
return xType.AsItemType() - yType.AsItemType();
|
2025-02-12 15:23:27 -05:00
|
|
|
|
|
|
|
|
|
|
// If directory names don't match
|
|
|
|
|
|
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
|
|
|
|
|
|
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
|
|
|
|
|
|
if (xDirectoryName != yDirectoryName)
|
|
|
|
|
|
return nc.Compare(xDirectoryName, yDirectoryName);
|
|
|
|
|
|
|
|
|
|
|
|
// If item names don't match
|
|
|
|
|
|
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.GetName() ?? string.Empty));
|
|
|
|
|
|
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.GetName() ?? string.Empty));
|
2025-05-19 10:39:32 -04:00
|
|
|
|
return nc.Compare(xName, yName);
|
2025-02-12 15:23:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
catch
|
|
|
|
|
|
{
|
|
|
|
|
|
// Absorb the error
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Sort a list of DatItem objects by SourceID, Game, and Name (in order)
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mappings">List of item ID to DatItem mappings representing the items to be sorted</param>
|
|
|
|
|
|
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if it sorted correctly, false otherwise</returns>
|
2025-05-19 10:45:36 -04:00
|
|
|
|
private bool SortDB(ref List<KeyValuePair<long, DatItem>> mappings, bool norename)
|
2025-02-12 15:23:27 -05:00
|
|
|
|
{
|
2025-05-19 10:33:57 -04:00
|
|
|
|
// Create the comparer extenal to the delegate
|
|
|
|
|
|
var nc = new NaturalComparer();
|
|
|
|
|
|
|
2025-02-12 15:23:27 -05:00
|
|
|
|
mappings.Sort(delegate (KeyValuePair<long, DatItem> x, KeyValuePair<long, DatItem> y)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2025-05-19 10:39:32 -04:00
|
|
|
|
// Compare on source if renaming
|
|
|
|
|
|
if (!norename)
|
|
|
|
|
|
{
|
2025-05-19 10:45:36 -04:00
|
|
|
|
int xSourceIndex = ItemsDB.GetSourceForItem(x.Key).Value?.Index ?? 0;
|
|
|
|
|
|
int ySourceIndex = ItemsDB.GetSourceForItem(y.Key).Value?.Index ?? 0;
|
2025-05-19 10:39:32 -04:00
|
|
|
|
if (xSourceIndex != ySourceIndex)
|
|
|
|
|
|
return xSourceIndex - ySourceIndex;
|
|
|
|
|
|
}
|
2025-02-12 15:23:27 -05:00
|
|
|
|
|
|
|
|
|
|
// If machine names don't match
|
2025-05-19 10:45:36 -04:00
|
|
|
|
string? xMachineName = ItemsDB.GetMachineForItem(x.Key).Value?.GetName();
|
|
|
|
|
|
string? yMachineName = ItemsDB.GetMachineForItem(y.Key).Value?.GetName();
|
2025-02-12 15:23:27 -05:00
|
|
|
|
if (xMachineName != yMachineName)
|
|
|
|
|
|
return nc.Compare(xMachineName, yMachineName);
|
|
|
|
|
|
|
|
|
|
|
|
// If types don't match
|
|
|
|
|
|
string? xType = x.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
string? yType = y.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
|
|
|
|
|
|
if (xType != yType)
|
2025-05-11 22:55:38 -04:00
|
|
|
|
return xType.AsItemType() - yType.AsItemType();
|
2025-02-12 15:23:27 -05:00
|
|
|
|
|
|
|
|
|
|
// If directory names don't match
|
|
|
|
|
|
string? xDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName() ?? string.Empty));
|
|
|
|
|
|
string? yDirectoryName = Path.GetDirectoryName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName() ?? string.Empty));
|
|
|
|
|
|
if (xDirectoryName != yDirectoryName)
|
|
|
|
|
|
return nc.Compare(xDirectoryName, yDirectoryName);
|
|
|
|
|
|
|
|
|
|
|
|
// If item names don't match
|
|
|
|
|
|
string? xName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(x.Value.GetName() ?? string.Empty));
|
|
|
|
|
|
string? yName = Path.GetFileName(TextHelper.RemovePathUnsafeCharacters(y.Value.GetName() ?? string.Empty));
|
2025-05-19 10:39:32 -04:00
|
|
|
|
return nc.Compare(xName, yName);
|
2025-02-12 15:23:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
catch
|
|
|
|
|
|
{
|
|
|
|
|
|
// Absorb the error
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-12-10 11:28:11 -08:00
|
|
|
|
#endregion
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2016-04-19 01:11:23 -07:00
|
|
|
|
}
|