using System;
using System.Collections.Generic;
using System.IO;
using System.Xml.Serialization;
using Newtonsoft.Json;
using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Hashing;
using SabreTools.IO.Logging;
namespace SabreTools.DatFiles
{
///
/// Represents a format-agnostic DAT
///
[JsonObject("datfile"), XmlRoot("datfile")]
public abstract partial class DatFile
{
#region Fields
///
/// Header values
///
[JsonProperty("header"), XmlElement("header")]
public DatHeader Header { get; private set; } = new DatHeader();
///
/// DatItems and related statistics
///
[JsonProperty("items"), XmlElement("items")]
public ItemDictionary Items { get; private set; } = [];
///
/// DatItems and related statistics
///
[JsonProperty("items"), XmlElement("items")]
public ItemDictionaryDB ItemsDB { get; private set; } = new ItemDictionaryDB();
///
/// DAT statistics
///
[JsonIgnore, XmlIgnore]
public DatStatistics DatStatistics => Items.DatStatistics;
//public DatStatistics DatStatistics => ItemsDB.DatStatistics;
///
/// List of supported types for writing
///
public abstract ItemType[] SupportedTypes { get; }
#endregion
#region Logging
///
/// Logging object
///
[JsonIgnore, XmlIgnore]
protected Logger _logger;
#endregion
#region Constructors
///
/// Create a new DatFile from an existing one
///
/// DatFile to get the values from
public DatFile(DatFile? datFile)
{
_logger = new Logger(this);
if (datFile != null)
{
Header = (DatHeader)datFile.Header.Clone();
Items = datFile.Items;
ItemsDB = datFile.ItemsDB;
}
}
///
/// Fill the header values based on existing Header and path
///
/// Path used for creating a name, if necessary
/// True if the date should be omitted from name and description, false otherwise
public void FillHeaderFromPath(string path, bool bare)
{
// Get the header strings
string? name = Header.GetStringFieldValue(Models.Metadata.Header.NameKey);
string? description = Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey);
string? date = Header.GetStringFieldValue(Models.Metadata.Header.DateKey);
// If the description is defined but not the name, set the name from the description
if (string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(description))
{
name = description + (bare ? string.Empty : $" ({date})");
}
// If the name is defined but not the description, set the description from the name
else if (!string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
{
description = name + (bare ? string.Empty : $" ({date})");
}
// If neither the name or description are defined, set them from the automatic values
else if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
{
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
#if NETFRAMEWORK
name = splitpath[splitpath.Length - 1];
description = splitpath[splitpath.Length - 1] + (bare ? string.Empty : $" ({date})");
#else
name = splitpath[^1] + (bare ? string.Empty : $" ({date})");
description = splitpath[^1] + (bare ? string.Empty : $" ({date})");
#endif
}
// Trim both fields
name = name?.Trim();
description = description?.Trim();
// Set the fields back
Header.SetFieldValue(Models.Metadata.Header.NameKey, name);
Header.SetFieldValue(Models.Metadata.Header.DescriptionKey, description);
}
#endregion
#region Accessors
///
/// Set the internal header
///
/// Replacement header to be used
public void SetHeader(DatHeader datHeader)
{
// TODO: Figure out why clone loses data here
Header = datHeader;
}
#endregion
#region Item Dictionary Passthrough - Accessors
///
/// Add a value to the file dictionary
///
/// Key in the dictionary to add to
/// Value to add to the dictionary
public void Add(string key, DatItem value)
{
Items.Add(key, value);
}
///
/// Add a range of values to the file dictionary
///
/// Key in the dictionary to add to
/// Value to add to the dictionary
public void Add(string key, List? value)
{
Items.Add(key, value);
}
///
/// Add a DatItem to the dictionary after checking
///
/// Item data to check against
/// True to only add item statistics while parsing, false otherwise
/// The key for the item
public string AddItem(DatItem item, bool statsOnly)
{
return Items.AddItem(item, statsOnly);
}
///
/// Add a DatItem to the dictionary after validation
///
/// Item data to validate
/// Index of the machine related to the item
/// Index of the source related to the item
/// True to only add item statistics while parsing, false otherwise
/// The index for the added item, -1 on error
public long AddItemDB(DatItem item, long machineIndex, long sourceIndex, bool statsOnly)
{
return ItemsDB.AddItem(item, machineIndex, sourceIndex, statsOnly);
}
///
/// Add a machine, returning the insert index
///
public long AddMachineDB(Machine machine)
{
return ItemsDB.AddMachine(machine);
}
///
/// Add a source, returning the insert index
///
public long AddSourceDB(Source source)
{
return ItemsDB.AddSource(source);
}
///
/// Remove any keys that have null or empty values
///
public void ClearEmpty()
{
Items.ClearEmpty();
ItemsDB.ClearEmpty();
}
///
/// Remove all items marked for removal
///
public void ClearMarked()
{
Items.ClearMarked();
ItemsDB.ClearMarked();
}
///
/// Get the items associated with a bucket name
///
public List GetItemsForBucket(string? bucketName, bool filter = false)
=> Items.GetItemsForBucket(bucketName, filter);
///
/// Get the indices and items associated with a bucket name
///
public Dictionary GetItemsForBucketDB(string? bucketName, bool filter = false)
=> ItemsDB.GetItemsForBucket(bucketName, filter);
///
/// Get all machines and their indicies
///
public IDictionary GetMachinesDB()
=> ItemsDB.GetMachines();
///
/// Remove a key from the file dictionary if it exists
///
/// Key in the dictionary to remove
public bool Remove(string key)
{
return Items.Remove(key);
}
///
/// Reset the internal item dictionary
///
public void ResetDictionary()
{
Items.Clear();
ItemsDB = new ItemDictionaryDB();
}
#endregion
#region Item Dictionary Passthrough - Bucketing
///
/// Take the arbitrarily bucketed Files Dictionary and convert to one bucketed by a user-defined method
///
/// ItemKey enum representing how to bucket the individual items
/// Dedupe type that should be used
/// True if the key should be lowercased (default), false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
public void BucketBy(ItemKey bucketBy, DedupeType dedupeType, bool lower = true, bool norename = true)
{
Items.BucketBy(bucketBy, dedupeType, lower, norename);
ItemsDB.BucketBy(bucketBy, dedupeType, lower, norename);
}
///
/// List all duplicates found in a DAT based on a DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// List of matched DatItem objects
public List GetDuplicates(DatItem datItem, bool sorted = false)
=> Items.GetDuplicates(datItem, sorted);
///
/// List all duplicates found in a DAT based on a DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// List of matched DatItem objects
public Dictionary GetDuplicatesDB(DatItem datItem, bool sorted = false)
=> ItemsDB.GetDuplicates(datItem, sorted);
///
/// List all duplicates found in a DAT based on a DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// List of matched DatItem objects
public Dictionary GetDuplicatesDB(KeyValuePair datItem, bool sorted = false)
=> ItemsDB.GetDuplicates(datItem, sorted);
///
/// Check if a DAT contains the given DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// True if it contains the rom, false otherwise
public bool HasDuplicates(DatItem datItem, bool sorted = false)
=> Items.HasDuplicates(datItem, sorted);
///
/// Check if a DAT contains the given DatItem
///
/// Item to try to match
/// True if the DAT is already sorted accordingly, false otherwise (default)
/// True if it contains the rom, false otherwise
public bool HasDuplicates(KeyValuePair datItem, bool sorted = false)
=> ItemsDB.HasDuplicates(datItem, sorted);
#endregion
#region Item Dictionary Passthrough - Filtering
///
/// Execute all filters in a filter runner on the items in the dictionary
///
/// Preconfigured filter runner to use
public void ExecuteFilters(FilterRunner filterRunner)
{
Items.ExecuteFilters(filterRunner);
ItemsDB.ExecuteFilters(filterRunner);
}
#endregion
#region Parsing
///
/// Parse DatFile and return all found games and roms within
///
/// Name of the file to be parsed
/// Index ID for the DAT
/// True if full pathnames are to be kept, false otherwise (default)
/// True to only add item statistics while parsing, false otherwise
/// True if the error that is thrown should be thrown back to the caller, false otherwise
public abstract void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false);
#endregion
#region Writing
///
/// Create and open an output file for writing direct from a dictionary
///
/// Name of the file to write to
/// True if blank roms should be skipped on output, false otherwise (default)
/// True if the error that is thrown should be thrown back to the caller, false otherwise
/// True if the DAT was written correctly, false otherwise
public abstract bool WriteToFile(string outfile, bool ignoreblanks = false, bool throwOnError = false);
///
/// Create and open an output file for writing direct from a dictionary
///
/// Name of the file to write to
/// True if blank roms should be skipped on output, false otherwise (default)
/// True if the error that is thrown should be thrown back to the caller, false otherwise
/// True if the DAT was written correctly, false otherwise
public abstract bool WriteToFileDB(string outfile, bool ignoreblanks = false, bool throwOnError = false);
///
/// Process an item and correctly set the item name
///
/// DatItem to update
/// True if the Quotes flag should be ignored, false otherwise
/// True if the UseRomName should be always on, false otherwise
protected internal void ProcessItemName(DatItem item, Machine? machine, bool forceRemoveQuotes, bool forceRomName)
{
// Get the relevant processing values
bool quotes = forceRemoveQuotes ? false : Header.GetBoolFieldValue(DatHeader.QuotesKey) ?? false;
bool useRomName = forceRomName ? true : Header.GetBoolFieldValue(DatHeader.UseRomNameKey) ?? false;
// Create the full Prefix
string pre = Header.GetStringFieldValue(DatHeader.PrefixKey) + (quotes ? "\"" : string.Empty);
pre = FormatPrefixPostfix(item, machine, pre);
// Create the full Postfix
string post = (quotes ? "\"" : string.Empty) + Header.GetStringFieldValue(DatHeader.PostfixKey);
post = FormatPrefixPostfix(item, machine, post);
// Get the name to update
string? name = (useRomName
? item.GetName()
: machine?.GetStringFieldValue(Models.Metadata.Machine.NameKey)) ?? string.Empty;
// If we're in Depot mode, take care of that instead
var outputDepot = Header.GetFieldValue(DatHeader.OutputDepotKey);
if (outputDepot?.IsActive == true)
{
if (item is Disk disk)
{
// We can only write out if there's a SHA-1
string? sha1 = disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key);
if (!string.IsNullOrEmpty(sha1))
{
name = Utilities.GetDepotPath(sha1, outputDepot.Depth)?.Replace('\\', '/');
item.SetName($"{pre}{name}{post}");
}
}
else if (item is DatItems.Formats.File file)
{
// We can only write out if there's a SHA-1
string? sha1 = file.SHA1;
if (!string.IsNullOrEmpty(sha1))
{
name = Utilities.GetDepotPath(sha1, outputDepot.Depth)?.Replace('\\', '/');
item.SetName($"{pre}{name}{post}");
}
}
else if (item is Media media)
{
// We can only write out if there's a SHA-1
string? sha1 = media.GetStringFieldValue(Models.Metadata.Media.SHA1Key);
if (!string.IsNullOrEmpty(sha1))
{
name = Utilities.GetDepotPath(sha1, outputDepot.Depth)?.Replace('\\', '/');
item.SetName($"{pre}{name}{post}");
}
}
else if (item is Rom rom)
{
// We can only write out if there's a SHA-1
string? sha1 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key);
if (!string.IsNullOrEmpty(sha1))
{
name = Utilities.GetDepotPath(sha1, outputDepot.Depth)?.Replace('\\', '/');
item.SetName($"{pre}{name}{post}");
}
}
return;
}
string? replaceExtension = Header.GetStringFieldValue(DatHeader.ReplaceExtensionKey);
bool? removeExtension = Header.GetBoolFieldValue(DatHeader.RemoveExtensionKey);
if (!string.IsNullOrEmpty(replaceExtension) || removeExtension == true)
{
if (removeExtension == true)
Header.SetFieldValue(DatHeader.ReplaceExtensionKey, string.Empty);
string? dir = Path.GetDirectoryName(name);
if (dir != null)
{
dir = dir.TrimStart(Path.DirectorySeparatorChar);
name = Path.Combine(dir, Path.GetFileNameWithoutExtension(name) + replaceExtension);
}
}
string? addExtension = Header.GetStringFieldValue(DatHeader.AddExtensionKey);
if (!string.IsNullOrEmpty(addExtension))
name += addExtension;
if (useRomName && Header.GetBoolFieldValue(DatHeader.GameNameKey) == true)
name = Path.Combine(machine?.GetStringFieldValue(Models.Metadata.Machine.NameKey) ?? string.Empty, name);
// Now assign back the formatted name
name = $"{pre}{name}{post}";
if (useRomName)
item.SetName(name);
else if (machine != null)
machine.SetFieldValue(Models.Metadata.Machine.NameKey, name);
}
///
/// Format a prefix or postfix string
///
/// DatItem to create a prefix/postfix for
/// Machine to get information from
/// Prefix or postfix pattern to populate
/// Sanitized string representing the postfix or prefix
protected internal static string FormatPrefixPostfix(DatItem item, Machine? machine, string fix)
{
// Initialize strings
string? type = item.GetStringFieldValue(Models.Metadata.DatItem.TypeKey);
string
game = machine?.GetStringFieldValue(Models.Metadata.Machine.NameKey) ?? string.Empty,
manufacturer = machine?.GetStringFieldValue(Models.Metadata.Machine.ManufacturerKey) ?? string.Empty,
publisher = machine?.GetStringFieldValue(Models.Metadata.Machine.PublisherKey) ?? string.Empty,
category = machine?.GetStringFieldValue(Models.Metadata.Machine.CategoryKey) ?? string.Empty,
name = item.GetName() ?? type.AsEnumValue().AsStringValue() ?? string.Empty,
crc = string.Empty,
md2 = string.Empty,
md4 = string.Empty,
md5 = string.Empty,
sha1 = string.Empty,
sha256 = string.Empty,
sha384 = string.Empty,
sha512 = string.Empty,
size = string.Empty,
spamsum = string.Empty;
// Ensure we have the proper values for replacement
if (item is Disk disk)
{
md5 = disk.GetStringFieldValue(Models.Metadata.Disk.MD5Key) ?? string.Empty;
sha1 = disk.GetStringFieldValue(Models.Metadata.Disk.SHA1Key) ?? string.Empty;
}
else if (item is DatItems.Formats.File file)
{
name = $"{file.Id}.{file.Extension}";
size = file.Size.ToString() ?? string.Empty;
crc = file.CRC ?? string.Empty;
md5 = file.MD5 ?? string.Empty;
sha1 = file.SHA1 ?? string.Empty;
sha256 = file.SHA256 ?? string.Empty;
}
else if (item is Media media)
{
md5 = media.GetStringFieldValue(Models.Metadata.Media.MD5Key) ?? string.Empty;
sha1 = media.GetStringFieldValue(Models.Metadata.Media.SHA1Key) ?? string.Empty;
sha256 = media.GetStringFieldValue(Models.Metadata.Media.SHA256Key) ?? string.Empty;
spamsum = media.GetStringFieldValue(Models.Metadata.Media.SpamSumKey) ?? string.Empty;
}
else if (item is Rom rom)
{
crc = rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) ?? string.Empty;
md2 = rom.GetStringFieldValue(Models.Metadata.Rom.MD2Key) ?? string.Empty;
md4 = rom.GetStringFieldValue(Models.Metadata.Rom.MD4Key) ?? string.Empty;
md5 = rom.GetStringFieldValue(Models.Metadata.Rom.MD5Key) ?? string.Empty;
sha1 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key) ?? string.Empty;
sha256 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA256Key) ?? string.Empty;
sha384 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA384Key) ?? string.Empty;
sha512 = rom.GetStringFieldValue(Models.Metadata.Rom.SHA512Key) ?? string.Empty;
size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey).ToString() ?? string.Empty;
spamsum = rom.GetStringFieldValue(Models.Metadata.Rom.SpamSumKey) ?? string.Empty;
}
// Now do bulk replacement where possible
fix = fix
.Replace("%game%", game)
.Replace("%machine%", game)
.Replace("%name%", name)
.Replace("%manufacturer%", manufacturer)
.Replace("%publisher%", publisher)
.Replace("%category%", category)
.Replace("%crc%", crc)
.Replace("%md2%", md2)
.Replace("%md4%", md4)
.Replace("%md5%", md5)
.Replace("%sha1%", sha1)
.Replace("%sha256%", sha256)
.Replace("%sha384%", sha384)
.Replace("%sha512%", sha512)
.Replace("%size%", size)
.Replace("%spamsum%", spamsum);
return fix;
}
///
/// Process any DatItems that are "null", usually created from directory population
///
/// DatItem to check for "null" status
/// Cleaned DatItem, if possible
protected internal static DatItem ProcessNullifiedItem(DatItem item)
{
// If we don't have a Rom, we can ignore it
if (item is not Rom rom)
return item;
// If the item has a size
if (rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey) != null)
return rom;
// If the item CRC isn't "null"
if (rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) != "null")
return rom;
// If the Rom has "null" characteristics, ensure all fields
rom.SetName(rom.GetName() == "null" ? "-" : rom.GetName());
rom.SetFieldValue(Models.Metadata.Rom.SizeKey, Constants.SizeZero.ToString());
rom.SetFieldValue(Models.Metadata.Rom.CRCKey,
rom.GetStringFieldValue(Models.Metadata.Rom.CRCKey) == "null" ? ZeroHash.CRC32Str : null);
rom.SetFieldValue(Models.Metadata.Rom.MD2Key,
rom.GetStringFieldValue(Models.Metadata.Rom.MD2Key) == "null" ? ZeroHash.GetString(HashType.MD2) : null);
rom.SetFieldValue(Models.Metadata.Rom.MD4Key,
rom.GetStringFieldValue(Models.Metadata.Rom.MD4Key) == "null" ? ZeroHash.GetString(HashType.MD4) : null);
rom.SetFieldValue(Models.Metadata.Rom.MD5Key,
rom.GetStringFieldValue(Models.Metadata.Rom.MD5Key) == "null" ? ZeroHash.MD5Str : null);
rom.SetFieldValue(Models.Metadata.Rom.SHA1Key,
rom.GetStringFieldValue(Models.Metadata.Rom.SHA1Key) == "null" ? ZeroHash.SHA1Str : null);
rom.SetFieldValue(Models.Metadata.Rom.SHA256Key,
rom.GetStringFieldValue(Models.Metadata.Rom.SHA256Key) == "null" ? ZeroHash.SHA256Str : null);
rom.SetFieldValue(Models.Metadata.Rom.SHA384Key,
rom.GetStringFieldValue(Models.Metadata.Rom.SHA384Key) == "null" ? ZeroHash.SHA384Str : null);
rom.SetFieldValue(Models.Metadata.Rom.SHA512Key,
rom.GetStringFieldValue(Models.Metadata.Rom.SHA512Key) == "null" ? ZeroHash.SHA512Str : null);
rom.SetFieldValue(Models.Metadata.Rom.SpamSumKey,
rom.GetStringFieldValue(Models.Metadata.Rom.SpamSumKey) == "null" ? ZeroHash.SpamSumStr : null);
return rom;
}
///
/// Return list of required fields missing from a DatItem
///
/// List of missing required fields, null or empty if none were found
protected internal virtual List? GetMissingRequiredFields(DatItem datItem) => null;
///
/// Get if a list contains any writable items
///
/// DatItems to check
/// True if the list contains at least one writable item, false otherwise
/// Empty list are kept with this
protected internal bool ContainsWritable(List datItems)
{
// Empty list are considered writable
if (datItems.Count == 0)
return true;
foreach (DatItem datItem in datItems)
{
ItemType itemType = datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue();
if (Array.Exists(SupportedTypes, t => t == itemType))
return true;
}
return false;
}
///
/// Resolve name duplicates in an arbitrary set of DatItems based on the supplied information
///
/// List of DatItem objects representing the items to be merged
/// A List of DatItem objects representing the renamed items
protected internal List ResolveNames(List datItems)
{
// Ignore empty lists
if (datItems.Count == 0)
return [];
// Create the output list
List output = [];
// First we want to make sure the list is in alphabetical order
DatFileTool.Sort(ref datItems, true);
// Now we want to loop through and check names
DatItem? lastItem = null;
string? lastrenamed = null;
int lastid = 0;
for (int i = 0; i < datItems.Count; i++)
{
DatItem datItem = datItems[i];
// If we have the first item, we automatically add it
if (lastItem == null)
{
output.Add(datItem);
lastItem = datItem;
continue;
}
// Get the last item name, if applicable
string lastItemName = lastItem.GetName()
?? lastItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue().AsStringValue()
?? string.Empty;
// Get the current item name, if applicable
string datItemName = datItem.GetName()
?? datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue().AsStringValue()
?? string.Empty;
// If the current item exactly matches the last item, then we don't add it
#if NET20 || NET35
if ((datItem.GetDuplicateStatus(lastItem) & DupeType.All) != 0)
#else
if (datItem.GetDuplicateStatus(lastItem).HasFlag(DupeType.All))
#endif
{
_logger.Verbose($"Exact duplicate found for '{datItemName}'");
continue;
}
// If the current name matches the previous name, rename the current item
else if (datItemName == lastItemName)
{
_logger.Verbose($"Name duplicate found for '{datItemName}'");
// Get the duplicate suffix
datItemName += datItem.GetDuplicateSuffix();
lastrenamed ??= datItemName;
// If we have a conflict with the last renamed item, do the right thing
if (datItemName == lastrenamed)
{
lastrenamed = datItemName;
datItemName += (lastid == 0 ? string.Empty : "_" + lastid);
lastid++;
}
// If we have no conflict, then we want to reset the lastrenamed and id
else
{
lastrenamed = null;
lastid = 0;
}
// Set the item name back to the datItem
datItem.SetName(datItemName);
output.Add(datItem);
}
// Otherwise, we say that we have a valid named file
else
{
output.Add(datItem);
lastItem = datItem;
lastrenamed = null;
lastid = 0;
}
}
// One last sort to make sure this is ordered
DatFileTool.Sort(ref output, true);
return output;
}
///
/// Resolve name duplicates in an arbitrary set of DatItems based on the supplied information
///
/// List of item ID to DatItem mappings representing the items to be merged
/// A List of DatItem objects representing the renamed items
protected internal List> ResolveNamesDB(List> mappings)
{
// Ignore empty lists
if (mappings.Count == 0)
return [];
// Create the output dict
List> output = [];
// First we want to make sure the list is in alphabetical order
DatFileTool.SortDB(ref mappings, true);
// Now we want to loop through and check names
DatItem? lastItem = null;
string? lastrenamed = null;
int lastid = 0;
foreach (var datItem in mappings)
{
// If we have the first item, we automatically add it
if (lastItem == null)
{
output.Add(datItem);
lastItem = datItem.Value;
continue;
}
// Get the last item name, if applicable
string lastItemName = lastItem.GetName()
?? lastItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue().AsStringValue()
?? string.Empty;
// Get the current item name, if applicable
string datItemName = datItem.Value.GetName()
?? datItem.Value.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue().AsStringValue()
?? string.Empty;
// If the current item exactly matches the last item, then we don't add it
#if NET20 || NET35
if ((datItem.Value.GetDuplicateStatus(lastItem) & DupeType.All) != 0)
#else
if (datItem.Value.GetDuplicateStatus(lastItem).HasFlag(DupeType.All))
#endif
{
_logger.Verbose($"Exact duplicate found for '{datItemName}'");
continue;
}
// If the current name matches the previous name, rename the current item
else if (datItemName == lastItemName)
{
_logger.Verbose($"Name duplicate found for '{datItemName}'");
// Get the duplicate suffix
datItemName += datItem.Value.GetDuplicateSuffix();
lastrenamed ??= datItemName;
// If we have a conflict with the last renamed item, do the right thing
if (datItemName == lastrenamed)
{
lastrenamed = datItemName;
datItemName += (lastid == 0 ? string.Empty : "_" + lastid);
lastid++;
}
// If we have no conflict, then we want to reset the lastrenamed and id
else
{
lastrenamed = null;
lastid = 0;
}
// Set the item name back to the datItem
datItem.Value.SetName(datItemName);
output.Add(datItem);
}
// Otherwise, we say that we have a valid named file
else
{
output.Add(datItem);
lastItem = datItem.Value;
lastrenamed = null;
lastid = 0;
}
}
// One last sort to make sure this is ordered
DatFileTool.SortDB(ref output, true);
return output;
}
///
/// Get if an item should be ignored on write
///
/// DatItem to check
/// True if blank roms should be skipped on output, false otherwise
/// True if the item should be skipped on write, false otherwise
protected internal bool ShouldIgnore(DatItem? datItem, bool ignoreBlanks)
{
// If this is invoked with a null DatItem, we ignore
if (datItem == null)
{
_logger.Verbose($"Item was skipped because it was null");
return true;
}
// If the item is supposed to be removed, we ignore
if (datItem.GetBoolFieldValue(DatItem.RemoveKey) == true)
{
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
_logger.Verbose($"Item '{itemString}' was skipped because it was marked for removal");
return true;
}
// If we have the Blank dat item, we ignore
if (datItem is Blank)
{
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
_logger.Verbose($"Item '{itemString}' was skipped because it was of type 'Blank'");
return true;
}
// If we're ignoring blanks and we have a Rom
if (ignoreBlanks && datItem is Rom rom)
{
// If we have a 0-size or blank rom, then we ignore
long? size = rom.GetInt64FieldValue(Models.Metadata.Rom.SizeKey);
if (size == 0 || size == null)
{
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
_logger.Verbose($"Item '{itemString}' was skipped because it had an invalid size");
return true;
}
}
// If we have an item type not in the list of supported values
ItemType itemType = datItem.GetStringFieldValue(Models.Metadata.DatItem.TypeKey).AsEnumValue();
if (!Array.Exists(SupportedTypes, t => t == itemType))
{
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
_logger.Verbose($"Item '{itemString}' was skipped because it was not supported for output");
return true;
}
// If we have an item with missing required fields
List? missingFields = GetMissingRequiredFields(datItem);
if (missingFields != null && missingFields.Count != 0)
{
string itemString = JsonConvert.SerializeObject(datItem, Formatting.None);
#if NET20 || NET35
_logger.Verbose($"Item '{itemString}' was skipped because it was missing required fields: {string.Join(", ", [.. missingFields])}");
#else
_logger.Verbose($"Item '{itemString}' was skipped because it was missing required fields: {string.Join(", ", missingFields)}");
#endif
return true;
}
return false;
}
#endregion
}
}