Split Modification class functionality

This had the potential to cause a lot of issues the way it was. Moving the actual functionality for cleaning, filtering, and applying extras to their appropriate classes allows for less redirection when calling into the code. Modification as a class was essentially a shell around things that should have just been a single call.
This commit is contained in:
Matt Nadareski
2021-02-01 11:43:38 -08:00
parent 99aad96dfb
commit 69010dea7f
15 changed files with 689 additions and 666 deletions

View File

@@ -64,7 +64,7 @@ namespace RombaSharp.Features
"DatItem.SHA512", "DatItem.SHA512",
"DatItem.SpamSum", "DatItem.SpamSum",
}); });
Modification.ApplyCleaning(datfile, cleaner); cleaner.ApplyCleaning(datfile);
Writer.Write(datfile, outdat); Writer.Write(datfile, outdat);
} }
} }

View File

@@ -14,7 +14,7 @@ namespace SabreTools.DatTools
/// <summary> /// <summary>
/// Helper methods for updating and converting DatFiles /// Helper methods for updating and converting DatFiles
/// </summary> /// </summary>
public class DatFileTool public static class DatFileTool
{ {
#region Logging #region Logging
@@ -25,6 +25,40 @@ namespace SabreTools.DatTools
#endregion #endregion
/// <summary>
/// Apply SuperDAT naming logic to a merged DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="inputs">List of inputs to use for renaming</param>
public static void ApplySuperDAT(DatFile datFile, List<ParentablePath> inputs)
{
List<string> keys = datFile.Items.Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key].ToList();
List<DatItem> newItems = new List<DatItem>();
foreach (DatItem item in items)
{
DatItem newItem = item;
string filename = inputs[newItem.Source.Index].CurrentPath;
string rootpath = inputs[newItem.Source.Index].ParentPath;
if (!string.IsNullOrWhiteSpace(rootpath))
rootpath += Path.DirectorySeparatorChar.ToString();
filename = filename.Remove(0, rootpath.Length);
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ newItem.Machine.Name;
newItems.Add(newItem);
}
datFile.Items.Remove(key);
datFile.Items.AddRange(key, newItems);
});
}
/// <summary> /// <summary>
/// Replace item values from the base set represented by the current DAT /// Replace item values from the base set represented by the current DAT
/// </summary> /// </summary>

View File

@@ -1,6 +1,9 @@
using System.Collections.Generic; using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -10,11 +13,13 @@ using SabreTools.DatFiles;
using SabreTools.DatItems; using SabreTools.DatItems;
using SabreTools.Logging; using SabreTools.Logging;
[assembly: InternalsVisibleTo("SabreTools.Test")]
namespace SabreTools.Filtering namespace SabreTools.Filtering
{ {
/// <summary> /// <summary>
/// Represents the cleaning operations that need to be performed on a set of items, usually a DAT /// Represents the cleaning operations that need to be performed on a set of items, usually a DAT
/// </summary> /// </summary>
public class Cleaner public class Cleaner
{ {
#region Exclusion Fields #region Exclusion Fields
@@ -200,11 +205,91 @@ namespace SabreTools.Filtering
#region Cleaning #region Cleaning
/// <summary>
/// Apply cleaning methods to the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if cleaning was successful, false on error</returns>
public bool ApplyCleaning(DatFile datFile, bool throwOnError = false)
{
try
{
// Perform item-level cleaning
CleanDatItems(datFile);
// Bucket and dedupe according to the flag
if (DedupeRoms == DedupeType.Full)
datFile.Items.BucketBy(ItemKey.CRC, DedupeRoms);
else if (DedupeRoms == DedupeType.Game)
datFile.Items.BucketBy(ItemKey.Machine, DedupeRoms);
// Process description to machine name
if (DescriptionAsName == true)
MachineDescriptionToName(datFile);
// If we are removing scene dates, do that now
if (SceneDateStrip == true)
StripSceneDatesFromItems(datFile);
// Run the one rom per game logic, if required
if (OneGamePerRegion == true)
SetOneGamePerRegion(datFile);
// Run the one rom per game logic, if required
if (OneRomPerGame == true)
SetOneRomPerGame(datFile);
// If we are removing fields, do that now
RemoveFieldsFromItems(datFile);
// Remove all marked items
datFile.Items.ClearMarked();
// We remove any blanks, if we aren't supposed to have any
if (KeepEmptyGames == false)
datFile.Items.ClearEmpty();
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Clean individual items based on the current filter
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
internal void CleanDatItems(DatFile datFile)
{
List<string> keys = datFile.Items.Keys.ToList();
foreach (string key in keys)
{
// For every item in the current key
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If we have a null item, we can't clean it it
if (item == null)
continue;
// Run cleaning per item
CleanDatItem(item);
}
// Assign back for caution
datFile.Items[key] = items;
}
}
/// <summary> /// <summary>
/// Clean a DatItem according to the cleaner /// Clean a DatItem according to the cleaner
/// </summary> /// </summary>
/// <param name="datItem">DatItem to clean</param> /// <param name="datItem">DatItem to clean</param>
public void CleanDatItem(DatItem datItem) internal void CleanDatItem(DatItem datItem)
{ {
// If we're stripping unicode characters, strip machine name and description // If we're stripping unicode characters, strip machine name and description
if (RemoveUnicode) if (RemoveUnicode)
@@ -243,7 +328,7 @@ namespace SabreTools.Filtering
/// </summary> /// </summary>
/// <param name="game">Name of the game to be cleaned</param> /// <param name="game">Name of the game to be cleaned</param>
/// <returns>The cleaned name</returns> /// <returns>The cleaned name</returns>
private string CleanGameName(string game) internal string CleanGameName(string game)
{ {
if (game == null) if (game == null)
return null; return null;
@@ -258,12 +343,71 @@ namespace SabreTools.Filtering
return game; return game;
} }
/// <summary>
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
internal void MachineDescriptionToName(DatFile datFile, bool throwOnError = false)
{
try
{
// First we want to get a mapping for all games to description
ConcurrentDictionary<string, string> mapping = new ConcurrentDictionary<string, string>();
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If the key mapping doesn't exist, add it
mapping.TryAdd(item.Machine.Name, item.Machine.Description.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
}
});
// Now we loop through every item and update accordingly
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
List<DatItem> newItems = new List<DatItem>();
foreach (DatItem item in items)
{
// Update machine name
if (!string.IsNullOrWhiteSpace(item.Machine.Name) && mapping.ContainsKey(item.Machine.Name))
item.Machine.Name = mapping[item.Machine.Name];
// Update cloneof
if (!string.IsNullOrWhiteSpace(item.Machine.CloneOf) && mapping.ContainsKey(item.Machine.CloneOf))
item.Machine.CloneOf = mapping[item.Machine.CloneOf];
// Update romof
if (!string.IsNullOrWhiteSpace(item.Machine.RomOf) && mapping.ContainsKey(item.Machine.RomOf))
item.Machine.RomOf = mapping[item.Machine.RomOf];
// Update sampleof
if (!string.IsNullOrWhiteSpace(item.Machine.SampleOf) && mapping.ContainsKey(item.Machine.SampleOf))
item.Machine.SampleOf = mapping[item.Machine.SampleOf];
// Add the new item to the output list
newItems.Add(item);
}
// Replace the old list of roms with the new one
datFile.Items.Remove(key);
datFile.Items.AddRange(key, newItems);
});
}
catch (Exception ex) when (!throwOnError)
{
logger.Warning(ex.ToString());
}
}
/// <summary> /// <summary>
/// Replace accented characters /// Replace accented characters
/// </summary> /// </summary>
/// <param name="input">String to be parsed</param> /// <param name="input">String to be parsed</param>
/// <returns>String with characters replaced</returns> /// <returns>String with characters replaced</returns>
private string NormalizeChars(string input) internal string NormalizeChars(string input)
{ {
if (input == null) if (input == null)
return null; return null;
@@ -319,7 +463,7 @@ namespace SabreTools.Filtering
/// </summary> /// </summary>
/// <param name="s">Input string to clean</param> /// <param name="s">Input string to clean</param>
/// <returns>Cleaned string</returns> /// <returns>Cleaned string</returns>
private string RemoveUnicodeCharacters(string s) internal string RemoveUnicodeCharacters(string s)
{ {
if (s == null) if (s == null)
return null; return null;
@@ -332,7 +476,7 @@ namespace SabreTools.Filtering
/// </summary> /// </summary>
/// <param name="input">String to be parsed</param> /// <param name="input">String to be parsed</param>
/// <returns>String with characters replaced</returns> /// <returns>String with characters replaced</returns>
private string RussianToLatin(string input) internal string RussianToLatin(string input)
{ {
if (input == null) if (input == null)
return null; return null;
@@ -367,7 +511,7 @@ namespace SabreTools.Filtering
/// </summary> /// </summary>
/// <param name="input">String to be parsed</param> /// <param name="input">String to be parsed</param>
/// <returns>String with characters replaced</returns> /// <returns>String with characters replaced</returns>
private string SearchPattern(string input) internal string SearchPattern(string input)
{ {
if (input == null) if (input == null)
return null; return null;
@@ -409,10 +553,238 @@ namespace SabreTools.Filtering
return input; return input;
} }
/// <summary>
/// Filter a DAT using 1G1R logic given an ordered set of regions
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <remarks>
/// In the most technical sense, the way that the region list is being used does not
/// confine its values to be just regions. Since it's essentially acting like a
/// specialized version of the machine name filter, anything that is usually encapsulated
/// in parenthesis would be matched on, including disc numbers, languages, editions,
/// and anything else commonly used. Please note that, unlike other existing 1G1R
/// solutions, this does not have the ability to contain custom mappings of parent
/// to clone sets based on name, nor does it have the ability to match on the
/// Release DatItem type.
/// </remarks>
internal void SetOneGamePerRegion(DatFile datFile)
{
// If we have null region list, make it empty
if (RegionList == null)
RegionList = new List<string>();
// For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Then we want to get a mapping of all machines to parents
Dictionary<string, List<string>> parents = new Dictionary<string, List<string>>();
foreach (string key in datFile.Items.Keys)
{
DatItem item = datFile.Items[key][0];
// Match on CloneOf first
if (!string.IsNullOrEmpty(item.Machine.CloneOf))
{
if (!parents.ContainsKey(item.Machine.CloneOf.ToLowerInvariant()))
parents.Add(item.Machine.CloneOf.ToLowerInvariant(), new List<string>());
parents[item.Machine.CloneOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
// Then by RomOf
else if (!string.IsNullOrEmpty(item.Machine.RomOf))
{
if (!parents.ContainsKey(item.Machine.RomOf.ToLowerInvariant()))
parents.Add(item.Machine.RomOf.ToLowerInvariant(), new List<string>());
parents[item.Machine.RomOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
// Otherwise, treat it as a parent
else
{
if (!parents.ContainsKey(item.Machine.Name.ToLowerInvariant()))
parents.Add(item.Machine.Name.ToLowerInvariant(), new List<string>());
parents[item.Machine.Name.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
}
// Once we have the full list of mappings, filter out games to keep
foreach (string key in parents.Keys)
{
// Find the first machine that matches the regions in order, if possible
string machine = default;
foreach (string region in RegionList)
{
machine = parents[key].FirstOrDefault(m => Regex.IsMatch(m, @"\(.*" + region + @".*\)", RegexOptions.IgnoreCase));
if (machine != default)
break;
}
// If we didn't get a match, use the parent
if (machine == default)
machine = key;
// Remove the key from the list
parents[key].Remove(machine);
// Remove the rest of the items from this key
parents[key].ForEach(k => datFile.Items.Remove(k));
}
// Finally, strip out the parent tags
Splitter.RemoveTagsFromChild(datFile);
}
/// <summary>
/// Ensure that all roms are in their own game (or at least try to ensure)
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
internal void SetOneRomPerGame(DatFile datFile)
{
// Because this introduces subfolders, we need to set the SuperDAT type
datFile.Header.Type = "SuperDAT";
// For each rom, we want to update the game to be "<game name>/<rom name>"
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
for (int i = 0; i < items.Count; i++)
{
DatItemTool.SetOneRomPerGame(items[i]);
}
});
}
/// <summary>
/// Strip the dates from the beginning of scene-style set names
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
internal void StripSceneDatesFromItems(DatFile datFile)
{
// Output the logging statement
logger.User("Stripping scene-style dates");
// Set the regex pattern to use
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
// Now process all of the roms
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
for (int j = 0; j < items.Count; j++)
{
DatItem item = items[j];
if (Regex.IsMatch(item.Machine.Name, pattern))
item.Machine.Name = Regex.Replace(item.Machine.Name, pattern, "$2");
if (Regex.IsMatch(item.Machine.Description, pattern))
item.Machine.Description = Regex.Replace(item.Machine.Description, pattern, "$2");
items[j] = item;
}
datFile.Items.Remove(key);
datFile.Items.AddRange(key, items);
});
}
#endregion #endregion
#region Filtering #region Filtering
/// <summary>
/// Apply a set of Filters on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="perMachine">True if entire machines are considered, false otherwise (default)</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DatFile was filtered, false on error</returns>
public bool ApplyFilters(DatFile datFile, bool perMachine = false, bool throwOnError = false)
{
// If we have null filters, return false
if (MachineFilter == null || DatItemFilter == null)
return false;
// If we're filtering per machine, bucket by machine first
if (perMachine)
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
try
{
// Loop over every key in the dictionary
List<string> keys = datFile.Items.Keys.ToList();
foreach (string key in keys)
{
// For every item in the current key
bool machinePass = true;
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If we have a null item, we can't pass it
if (item == null)
continue;
// If the item is already filtered out, we skip
if (item.Remove)
continue;
// If the rom doesn't pass the filter, mark for removal
if (!PassesFilters(item))
{
item.Remove = true;
// If we're in machine mode, set and break
if (perMachine)
{
machinePass = false;
break;
}
}
}
// If we didn't pass and we're in machine mode, set all items as remove
if (perMachine && !machinePass)
{
foreach (DatItem item in items)
{
item.Remove = true;
}
}
// Assign back for caution
datFile.Items[key] = items;
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Check to see if a DatItem passes the filters
/// </summary>
/// <param name="datItem">DatItem to check</param>
/// <returns>True if the item passed the filter, false otherwise</returns>
internal bool PassesFilters(DatItem datItem)
{
// Null item means it will never pass
if (datItem == null)
return false;
// Filter on Machine fields
if (!MachineFilter.PassesFilters(datItem.Machine))
return false;
// Filter on DatItem fields
return DatItemFilter.PassesFilters(datItem);
}
/// <summary> /// <summary>
/// Split the parts of a filter statement /// Split the parts of a filter statement
/// </summary> /// </summary>
@@ -439,25 +811,6 @@ namespace SabreTools.Filtering
return (filterFieldString, filterValue, negate); return (filterFieldString, filterValue, negate);
} }
/// <summary>
/// Check to see if a DatItem passes the filters
/// </summary>
/// <param name="datItem">DatItem to check</param>
/// <returns>True if the item passed the filter, false otherwise</returns>
public bool PassesFilters(DatItem datItem)
{
// Null item means it will never pass
if (datItem == null)
return false;
// Filter on Machine fields
if (!MachineFilter.PassesFilters(datItem.Machine))
return false;
// Filter on DatItem fields
return DatItemFilter.PassesFilters(datItem);
}
#endregion #endregion
#region Removal #region Removal

View File

@@ -33,7 +33,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Remover Population #region Population
/// <inheritdoc/> /// <inheritdoc/>
public override bool SetRemover(string field) public override bool SetRemover(string field)
@@ -55,7 +55,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Remover Running #region Running
/// <summary> /// <summary>
/// Remove fields with given values /// Remove fields with given values

View File

@@ -207,7 +207,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Filter Population #region Population
/// <summary> /// <summary>
/// Set multiple filters from key /// Set multiple filters from key
@@ -763,7 +763,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Filter Running #region Running
/// <summary> /// <summary>
/// Check to see if a DatItem passes the filters /// Check to see if a DatItem passes the filters

View File

@@ -38,7 +38,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Remover Population #region Population
/// <inheritdoc/> /// <inheritdoc/>
public override bool SetRemover(string field) public override bool SetRemover(string field)
@@ -68,7 +68,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Remover Running #region Running
/// <summary> /// <summary>
/// Remove fields with given values /// Remove fields with given values

View File

@@ -1,6 +1,10 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using SabreTools.Core;
using SabreTools.Core.Tools; using SabreTools.Core.Tools;
using SabreTools.DatFiles;
using SabreTools.DatItems;
using SabreTools.Logging; using SabreTools.Logging;
namespace SabreTools.Filtering namespace SabreTools.Filtering
@@ -37,7 +41,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Extras Population #region Population
/// <summary> /// <summary>
/// Populate item using field:file inputs /// Populate item using field:file inputs
@@ -68,5 +72,96 @@ namespace SabreTools.Filtering
} }
#endregion #endregion
#region Running
/// <summary>
/// Apply a set of Extra INIs on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the extras were applied, false on error</returns>
public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
{
try
{
// Bucket by game first
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
// Create a new set of mappings based on the items
var machineMap = new Dictionary<string, Dictionary<MachineField, string>>();
var datItemMap = new Dictionary<string, Dictionary<DatItemField, string>>();
// Loop through each of the extras
foreach (ExtraIniItem item in Items)
{
foreach (var mapping in item.Mappings)
{
string key = mapping.Key;
List<string> machineNames = mapping.Value;
// Loop through the machines and add the new mappings
foreach (string machine in machineNames)
{
if (item.MachineField != MachineField.NULL)
{
if (!machineMap.ContainsKey(machine))
machineMap[machine] = new Dictionary<MachineField, string>();
machineMap[machine][item.MachineField] = key;
}
else if (item.DatItemField != DatItemField.NULL)
{
if (!datItemMap.ContainsKey(machine))
datItemMap[machine] = new Dictionary<DatItemField, string>();
datItemMap[machine][item.DatItemField] = key;
}
}
}
}
// Now apply the new set of Machine mappings
foreach (string key in machineMap.Keys)
{
// If the key doesn't exist, continue
if (!datFile.Items.ContainsKey(key))
continue;
List<DatItem> datItems = datFile.Items[key];
var mappings = machineMap[key];
foreach (var datItem in datItems)
{
DatItemTool.SetFields(datItem.Machine, mappings);
}
}
// Now apply the new set of DatItem mappings
foreach (string key in datItemMap.Keys)
{
// If the key doesn't exist, continue
if (!datFile.Items.ContainsKey(key))
continue;
List<DatItem> datItems = datFile.Items[key];
var mappings = datItemMap[key];
foreach (var datItem in datItems)
{
DatItemTool.SetFields(datItem, mappings, null);
}
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
#endregion
} }
} }

View File

@@ -112,7 +112,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Filter Population #region Population
/// <summary> /// <summary>
/// Set multiple filters from key /// Set multiple filters from key
@@ -323,7 +323,7 @@ namespace SabreTools.Filtering
#endregion #endregion
#region Filter Running #region Running
/// <summary> /// <summary>
/// Check to see if a Machine passes the filters /// Check to see if a Machine passes the filters

View File

@@ -1,25 +1,25 @@
using System; using System;
using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq; using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using SabreTools.Core; using SabreTools.Core;
using SabreTools.DatFiles; using SabreTools.DatFiles;
using SabreTools.DatItems; using SabreTools.DatItems;
using SabreTools.Filtering;
using SabreTools.IO;
using SabreTools.Logging; using SabreTools.Logging;
namespace SabreTools.DatTools namespace SabreTools.Filtering
{ {
/// <summary> public class Splitter
/// Helper methods for cleaning and filtering DatFiles
/// </summary>
public class Modification
{ {
#region Fields
/// <summary>
/// Splitting mode to apply
/// </summary>
public MergingFlag SplitType { get; set; }
#endregion
#region Logging #region Logging
/// <summary> /// <summary>
@@ -28,548 +28,72 @@ namespace SabreTools.DatTools
private static readonly Logger logger = new Logger(); private static readonly Logger logger = new Logger();
#endregion #endregion
/// <summary>
/// Apply cleaning methods to the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="cleaner">Cleaner to use</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if cleaning was successful, false on error</returns>
public static bool ApplyCleaning(DatFile datFile, Cleaner cleaner, bool throwOnError = false)
{
try
{
// Perform item-level cleaning
CleanDatItems(datFile, cleaner);
// Bucket and dedupe according to the flag
if (cleaner?.DedupeRoms == DedupeType.Full)
datFile.Items.BucketBy(ItemKey.CRC, cleaner.DedupeRoms);
else if (cleaner?.DedupeRoms == DedupeType.Game)
datFile.Items.BucketBy(ItemKey.Machine, cleaner.DedupeRoms);
// Process description to machine name
if (cleaner?.DescriptionAsName == true)
MachineDescriptionToName(datFile);
// If we are removing scene dates, do that now
if (cleaner?.SceneDateStrip == true)
StripSceneDatesFromItems(datFile);
// Run the one rom per game logic, if required
if (cleaner?.OneGamePerRegion == true)
OneGamePerRegion(datFile, cleaner.RegionList);
// Run the one rom per game logic, if required
if (cleaner?.OneRomPerGame == true)
OneRomPerGame(datFile);
// If we are removing fields, do that now
cleaner.RemoveFieldsFromItems(datFile);
// Remove all marked items
datFile.Items.ClearMarked();
// We remove any blanks, if we aren't supposed to have any
if (cleaner?.KeepEmptyGames == false)
datFile.Items.ClearEmpty();
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Apply a set of Extra INIs on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="extras">ExtrasIni to use</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the extras were applied, false on error</returns>
public static bool ApplyExtras(DatFile datFile, ExtraIni extras, bool throwOnError = false)
{
try
{
// Bucket by game first
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
// Create a new set of mappings based on the items
var machineMap = new Dictionary<string, Dictionary<MachineField, string>>();
var datItemMap = new Dictionary<string, Dictionary<DatItemField, string>>();
// Loop through each of the extras
foreach (ExtraIniItem item in extras.Items)
{
foreach (var mapping in item.Mappings)
{
string key = mapping.Key;
List<string> machineNames = mapping.Value;
// Loop through the machines and add the new mappings
foreach (string machine in machineNames)
{
if (item.MachineField != MachineField.NULL)
{
if (!machineMap.ContainsKey(machine))
machineMap[machine] = new Dictionary<MachineField, string>();
machineMap[machine][item.MachineField] = key;
}
else if (item.DatItemField != DatItemField.NULL)
{
if (!datItemMap.ContainsKey(machine))
datItemMap[machine] = new Dictionary<DatItemField, string>();
datItemMap[machine][item.DatItemField] = key;
}
}
}
}
// Now apply the new set of Machine mappings
foreach (string key in machineMap.Keys)
{
// If the key doesn't exist, continue
if (!datFile.Items.ContainsKey(key))
continue;
List<DatItem> datItems = datFile.Items[key];
var mappings = machineMap[key];
foreach (var datItem in datItems)
{
DatItemTool.SetFields(datItem.Machine, mappings);
}
}
// Now apply the new set of DatItem mappings
foreach (string key in datItemMap.Keys)
{
// If the key doesn't exist, continue
if (!datFile.Items.ContainsKey(key))
continue;
List<DatItem> datItems = datFile.Items[key];
var mappings = datItemMap[key];
foreach (var datItem in datItems)
{
DatItemTool.SetFields(datItem, mappings, null);
}
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Apply a set of Filters on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="cleaner">Cleaner to use</param>
/// <param name="perMachine">True if entire machines are considered, false otherwise (default)</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DatFile was filtered, false on error</returns>
public static bool ApplyFilters(DatFile datFile, Cleaner cleaner, bool perMachine = false, bool throwOnError = false)
{
// If we have a null cleaner or filters, return false
if (cleaner == null || cleaner.MachineFilter == null || cleaner.DatItemFilter == null)
return false;
// If we're filtering per machine, bucket by machine first
if (perMachine)
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
try
{
// Loop over every key in the dictionary
List<string> keys = datFile.Items.Keys.ToList();
foreach (string key in keys)
{
// For every item in the current key
bool machinePass = true;
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If we have a null item, we can't pass it
if (item == null)
continue;
// If the item is already filtered out, we skip
if (item.Remove)
continue;
// If the rom doesn't pass the filter, mark for removal
if (!cleaner.PassesFilters(item))
{
item.Remove = true;
// If we're in machine mode, set and break
if (perMachine)
{
machinePass = false;
break;
}
}
}
// If we didn't pass and we're in machine mode, set all items as remove
if (perMachine && !machinePass)
{
foreach (DatItem item in items)
{
item.Remove = true;
}
}
// Assign back for caution
datFile.Items[key] = items;
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Apply splitting on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="splitType">Split type to try</param>
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DatFile was split, false on error</returns>
public static bool ApplySplitting(DatFile datFile, MergingFlag splitType, bool useTags, bool throwOnError = false)
{
try
{
// If we are using tags from the DAT, set the proper input for split type unless overridden
if (useTags && splitType == MergingFlag.None)
splitType = datFile.Header.ForceMerging;
// Run internal splitting
switch (splitType)
{
case MergingFlag.None:
// No-op
break;
case MergingFlag.Device:
CreateDeviceNonMergedSets(datFile, DedupeType.None);
break;
case MergingFlag.Full:
CreateFullyNonMergedSets(datFile, DedupeType.None);
break;
case MergingFlag.NonMerged:
CreateNonMergedSets(datFile, DedupeType.None);
break;
case MergingFlag.Merged:
CreateMergedSets(datFile, DedupeType.None);
break;
case MergingFlag.Split:
CreateSplitSets(datFile, DedupeType.None);
break;
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary>
/// Apply SuperDAT naming logic to a merged DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="inputs">List of inputs to use for renaming</param>
public static void ApplySuperDAT(DatFile datFile, List<ParentablePath> inputs)
{
List<string> keys = datFile.Items.Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key].ToList();
List<DatItem> newItems = new List<DatItem>();
foreach (DatItem item in items)
{
DatItem newItem = item;
string filename = inputs[newItem.Source.Index].CurrentPath;
string rootpath = inputs[newItem.Source.Index].ParentPath;
if (!string.IsNullOrWhiteSpace(rootpath))
rootpath += Path.DirectorySeparatorChar.ToString();
filename = filename.Remove(0, rootpath.Length);
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ newItem.Machine.Name;
newItems.Add(newItem);
}
datFile.Items.Remove(key);
datFile.Items.AddRange(key, newItems);
});
}
/// <summary>
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public static void MachineDescriptionToName(DatFile datFile, bool throwOnError = false)
{
try
{
// First we want to get a mapping for all games to description
ConcurrentDictionary<string, string> mapping = new ConcurrentDictionary<string, string>();
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If the key mapping doesn't exist, add it
mapping.TryAdd(item.Machine.Name, item.Machine.Description.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
}
});
// Now we loop through every item and update accordingly
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
List<DatItem> newItems = new List<DatItem>();
foreach (DatItem item in items)
{
// Update machine name
if (!string.IsNullOrWhiteSpace(item.Machine.Name) && mapping.ContainsKey(item.Machine.Name))
item.Machine.Name = mapping[item.Machine.Name];
// Update cloneof
if (!string.IsNullOrWhiteSpace(item.Machine.CloneOf) && mapping.ContainsKey(item.Machine.CloneOf))
item.Machine.CloneOf = mapping[item.Machine.CloneOf];
// Update romof
if (!string.IsNullOrWhiteSpace(item.Machine.RomOf) && mapping.ContainsKey(item.Machine.RomOf))
item.Machine.RomOf = mapping[item.Machine.RomOf];
// Update sampleof
if (!string.IsNullOrWhiteSpace(item.Machine.SampleOf) && mapping.ContainsKey(item.Machine.SampleOf))
item.Machine.SampleOf = mapping[item.Machine.SampleOf];
// Add the new item to the output list
newItems.Add(item);
}
// Replace the old list of roms with the new one
datFile.Items.Remove(key);
datFile.Items.AddRange(key, newItems);
});
}
catch (Exception ex) when (!throwOnError)
{
logger.Warning(ex.ToString());
}
}
/// <summary>
/// Filter a DAT using 1G1R logic given an ordered set of regions
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="regions">Ordered list of regions to use</param>
/// <remarks>
/// In the most technical sense, the way that the region list is being used does not
/// confine its values to be just regions. Since it's essentially acting like a
/// specialized version of the machine name filter, anything that is usually encapsulated
/// in parenthesis would be matched on, including disc numbers, languages, editions,
/// and anything else commonly used. Please note that, unlike other existing 1G1R
/// solutions, this does not have the ability to contain custom mappings of parent
/// to clone sets based on name, nor does it have the ability to match on the
/// Release DatItem type.
/// </remarks>
public static void OneGamePerRegion(DatFile datFile, List<string> regions)
{
// If we have null region list, make it empty
if (regions == null)
regions = new List<string>();
// For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Then we want to get a mapping of all machines to parents
Dictionary<string, List<string>> parents = new Dictionary<string, List<string>>();
foreach (string key in datFile.Items.Keys)
{
DatItem item = datFile.Items[key][0];
// Match on CloneOf first
if (!string.IsNullOrEmpty(item.Machine.CloneOf))
{
if (!parents.ContainsKey(item.Machine.CloneOf.ToLowerInvariant()))
parents.Add(item.Machine.CloneOf.ToLowerInvariant(), new List<string>());
parents[item.Machine.CloneOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
// Then by RomOf
else if (!string.IsNullOrEmpty(item.Machine.RomOf))
{
if (!parents.ContainsKey(item.Machine.RomOf.ToLowerInvariant()))
parents.Add(item.Machine.RomOf.ToLowerInvariant(), new List<string>());
parents[item.Machine.RomOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
// Otherwise, treat it as a parent
else
{
if (!parents.ContainsKey(item.Machine.Name.ToLowerInvariant()))
parents.Add(item.Machine.Name.ToLowerInvariant(), new List<string>());
parents[item.Machine.Name.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
}
}
// Once we have the full list of mappings, filter out games to keep
foreach (string key in parents.Keys)
{
// Find the first machine that matches the regions in order, if possible
string machine = default;
foreach (string region in regions)
{
machine = parents[key].FirstOrDefault(m => Regex.IsMatch(m, @"\(.*" + region + @".*\)", RegexOptions.IgnoreCase));
if (machine != default)
break;
}
// If we didn't get a match, use the parent
if (machine == default)
machine = key;
// Remove the key from the list
parents[key].Remove(machine);
// Remove the rest of the items from this key
parents[key].ForEach(k => datFile.Items.Remove(k));
}
// Finally, strip out the parent tags
RemoveTagsFromChild(datFile);
}
/// <summary>
/// Ensure that all roms are in their own game (or at least try to ensure)
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
public static void OneRomPerGame(DatFile datFile)
{
// Because this introduces subfolders, we need to set the SuperDAT type
datFile.Header.Type = "SuperDAT";
// For each rom, we want to update the game to be "<game name>/<rom name>"
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
for (int i = 0; i < items.Count; i++)
{
DatItemTool.SetOneRomPerGame(items[i]);
}
});
}
/// <summary>
/// Strip the dates from the beginning of scene-style set names
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
public static void StripSceneDatesFromItems(DatFile datFile)
{
// Output the logging statement
logger.User("Stripping scene-style dates");
// Set the regex pattern to use
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
// Now process all of the roms
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = datFile.Items[key];
for (int j = 0; j < items.Count; j++)
{
DatItem item = items[j];
if (Regex.IsMatch(item.Machine.Name, pattern))
item.Machine.Name = Regex.Replace(item.Machine.Name, pattern, "$2");
if (Regex.IsMatch(item.Machine.Description, pattern))
item.Machine.Description = Regex.Replace(item.Machine.Description, pattern, "$2");
items[j] = item;
}
datFile.Items.Remove(key);
datFile.Items.AddRange(key, items);
});
}
/// <summary>
/// Clean individual items based on the current filter
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="cleaner">Cleaner to use</param>
private static void CleanDatItems(DatFile datFile, Cleaner cleaner)
{
List<string> keys = datFile.Items.Keys.ToList();
foreach (string key in keys)
{
// For every item in the current key
List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items)
{
// If we have a null item, we can't clean it it
if (item == null)
continue;
// Run cleaning per item
cleaner.CleanDatItem(item);
}
// Assign back for caution
datFile.Items[key] = items;
}
}
// TODO: Should any of these create a new DatFile in the process? // TODO: Should any of these create a new DatFile in the process?
// The reason this comes up is that doing any of the splits or merges // The reason this comes up is that doing any of the splits or merges
// is an inherently destructive process. Making it output a new DatFile // is an inherently destructive process. Making it output a new DatFile
// might make it easier to deal with multiple internal steps. On the other // might make it easier to deal with multiple internal steps. On the other
// hand, this will increase memory usage significantly and would force the // hand, this will increase memory usage significantly and would force the
// existing paths to behave entirely differently // existing paths to behave entirely differently
#region Internal Splitting/Merging #region Running
/// <summary>
/// Apply splitting on the DatFile
/// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
/// <returns>True if the DatFile was split, false on error</returns>
public bool ApplySplitting(DatFile datFile, bool useTags, bool throwOnError = false)
{
try
{
// If we are using tags from the DAT, set the proper input for split type unless overridden
if (useTags && SplitType == MergingFlag.None)
SplitType = datFile.Header.ForceMerging;
// Run internal splitting
switch (SplitType)
{
case MergingFlag.None:
// No-op
break;
case MergingFlag.Device:
CreateDeviceNonMergedSets(datFile);
break;
case MergingFlag.Full:
CreateFullyNonMergedSets(datFile);
break;
case MergingFlag.NonMerged:
CreateNonMergedSets(datFile);
break;
case MergingFlag.Merged:
CreateMergedSets(datFile);
break;
case MergingFlag.Split:
CreateSplitSets(datFile);
break;
}
}
catch (Exception ex) when (!throwOnError)
{
logger.Error(ex);
return false;
}
return true;
}
/// <summary> /// <summary>
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags /// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="mergeroms">Dedupe type to be used</param> internal static void CreateDeviceNonMergedSets(DatFile datFile)
private static void CreateDeviceNonMergedSets(DatFile datFile, DedupeType mergeroms)
{ {
logger.User("Creating device non-merged sets from the DAT"); logger.User("Creating device non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game // For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true); datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(datFile, false, false)) ; while (AddRomsFromDevices(datFile, false, false)) ;
@@ -583,13 +107,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets /// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="mergeroms">Dedupe type to be used</param> internal static void CreateFullyNonMergedSets(DatFile datFile)
private static void CreateFullyNonMergedSets(DatFile datFile, DedupeType mergeroms)
{ {
logger.User("Creating fully non-merged sets from the DAT"); logger.User("Creating fully non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game // For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true); datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(datFile, true, true)) ; while (AddRomsFromDevices(datFile, true, true)) ;
@@ -607,13 +130,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create merged sets and remove the tags /// Use cloneof tags to create merged sets and remove the tags
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="mergeroms">Dedupe type to be used</param> internal static void CreateMergedSets(DatFile datFile)
private static void CreateMergedSets(DatFile datFile, DedupeType mergeroms)
{ {
logger.User("Creating merged sets from the DAT"); logger.User("Creating merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game // For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true); datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
AddRomsFromChildren(datFile); AddRomsFromChildren(datFile);
@@ -630,13 +152,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create non-merged sets and remove the tags /// Use cloneof tags to create non-merged sets and remove the tags
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="mergeroms">Dedupe type to be used</param> internal static void CreateNonMergedSets(DatFile datFile)
private static void CreateNonMergedSets(DatFile datFile, DedupeType mergeroms)
{ {
logger.User("Creating non-merged sets from the DAT"); logger.User("Creating non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game // For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true); datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
AddRomsFromParent(datFile); AddRomsFromParent(datFile);
@@ -653,13 +174,12 @@ namespace SabreTools.DatTools
/// Use cloneof and romof tags to create split sets and remove the tags /// Use cloneof and romof tags to create split sets and remove the tags
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="mergeroms">Dedupe type to be used</param> internal static void CreateSplitSets(DatFile datFile)
private static void CreateSplitSets(DatFile datFile, DedupeType mergeroms)
{ {
logger.User("Creating split sets from the DAT"); logger.User("Creating split sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game // For sake of ease, the first thing we want to do is bucket by game
datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true); datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information // Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild(datFile); RemoveRomsFromChild(datFile);
@@ -676,7 +196,7 @@ namespace SabreTools.DatTools
/// Use romof tags to add roms to the children /// Use romof tags to add roms to the children
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
private static void AddRomsFromBios(DatFile datFile) internal static void AddRomsFromBios(DatFile datFile)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -717,7 +237,7 @@ namespace SabreTools.DatTools
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="dev">True if only child device sets are touched, false for non-device sets (default)</param> /// <param name="dev">True if only child device sets are touched, false for non-device sets (default)</param>
/// <param name="useSlotOptions">True if slotoptions tags are used as well, false otherwise</param> /// <param name="useSlotOptions">True if slotoptions tags are used as well, false otherwise</param>
private static bool AddRomsFromDevices(DatFile datFile, bool dev = false, bool useSlotOptions = false) internal static bool AddRomsFromDevices(DatFile datFile, bool dev = false, bool useSlotOptions = false)
{ {
bool foundnew = false; bool foundnew = false;
List<string> machines = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> machines = datFile.Items.Keys.OrderBy(g => g).ToList();
@@ -845,7 +365,7 @@ namespace SabreTools.DatTools
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process /// Use cloneof tags to add roms to the children, setting the new romof tag in the process
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
private static void AddRomsFromParent(DatFile datFile) internal static void AddRomsFromParent(DatFile datFile)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -896,7 +416,7 @@ namespace SabreTools.DatTools
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="subfolder">True to add DatItems to subfolder of parent (not including Disk), false otherwise</param> /// <param name="subfolder">True to add DatItems to subfolder of parent (not including Disk), false otherwise</param>
private static void AddRomsFromChildren(DatFile datFile, bool subfolder = true) internal static void AddRomsFromChildren(DatFile datFile, bool subfolder = true)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -1008,7 +528,7 @@ namespace SabreTools.DatTools
/// Remove all BIOS and device sets /// Remove all BIOS and device sets
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
private static void RemoveBiosAndDeviceSets(DatFile datFile) internal static void RemoveBiosAndDeviceSets(DatFile datFile)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -1027,7 +547,7 @@ namespace SabreTools.DatTools
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
/// <param name="bios">True if only child Bios sets are touched, false for non-bios sets (default)</param> /// <param name="bios">True if only child Bios sets are touched, false for non-bios sets (default)</param>
private static void RemoveBiosRomsFromChild(DatFile datFile, bool bios = false) internal static void RemoveBiosRomsFromChild(DatFile datFile, bool bios = false)
{ {
// Loop through the romof tags // Loop through the romof tags
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
@@ -1071,7 +591,7 @@ namespace SabreTools.DatTools
/// Use cloneof tags to remove roms from the children /// Use cloneof tags to remove roms from the children
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
private static void RemoveRomsFromChild(DatFile datFile) internal static void RemoveRomsFromChild(DatFile datFile)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -1118,7 +638,7 @@ namespace SabreTools.DatTools
/// Remove all romof and cloneof tags from all games /// Remove all romof and cloneof tags from all games
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to run operations on</param> /// <param name="datFile">Current DatFile object to run operations on</param>
private static void RemoveTagsFromChild(DatFile datFile) internal static void RemoveTagsFromChild(DatFile datFile)
{ {
List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList(); List<string> games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games) foreach (string game in games)
@@ -1135,4 +655,4 @@ namespace SabreTools.DatTools
#endregion #endregion
} }
} }

View File

@@ -1735,6 +1735,11 @@ Some special strings that can be used:
/// </summary> /// </summary>
protected string OutputDir { get; set; } protected string OutputDir { get; set; }
/// <summary>
/// Pre-configured Splitter
/// </summary>
protected Filtering.Splitter Splitter { get; set; }
#endregion #endregion
#region Add Feature Groups #region Add Feature Groups
@@ -1801,6 +1806,7 @@ Some special strings that can be used:
Extras = GetExtras(features); Extras = GetExtras(features);
Header = GetDatHeader(features); Header = GetDatHeader(features);
OutputDir = GetString(features, OutputDirStringValue); OutputDir = GetString(features, OutputDirStringValue);
Splitter = GetSplitter(features);
// Set threading flag, if necessary // Set threading flag, if necessary
if (features.ContainsKey(ThreadsInt32Value)) if (features.ContainsKey(ThreadsInt32Value))
@@ -1895,26 +1901,6 @@ Some special strings that can be used:
return splittingMode; return splittingMode;
} }
/// <summary>
/// Get SplitType from feature list
/// </summary>
protected MergingFlag GetSplitType(Dictionary<string, Feature> features)
{
MergingFlag splitType = MergingFlag.None;
if (GetBoolean(features, DatDeviceNonMergedValue))
splitType = MergingFlag.Device;
else if (GetBoolean(features, DatFullNonMergedValue))
splitType = MergingFlag.Full;
else if (GetBoolean(features, DatMergedValue))
splitType = MergingFlag.Merged;
else if (GetBoolean(features, DatNonMergedValue))
splitType = MergingFlag.NonMerged;
else if (GetBoolean(features, DatSplitValue))
splitType = MergingFlag.Split;
return splitType;
}
/// <summary> /// <summary>
/// Get StatReportFormat from feature list /// Get StatReportFormat from feature list
/// </summary> /// </summary>
@@ -2135,6 +2121,38 @@ Some special strings that can be used:
return extraIni; return extraIni;
} }
/// <summary>
/// Get Splitter from feature list
/// </summary>
private Filtering.Splitter GetSplitter(Dictionary<string, Feature> features)
{
Filtering.Splitter splitter = new Filtering.Splitter
{
SplitType = GetSplitType(features),
};
return splitter;
}
/// <summary>
/// Get SplitType from feature list
/// </summary>
private MergingFlag GetSplitType(Dictionary<string, Feature> features)
{
MergingFlag splitType = MergingFlag.None;
if (GetBoolean(features, DatDeviceNonMergedValue))
splitType = MergingFlag.Device;
else if (GetBoolean(features, DatFullNonMergedValue))
splitType = MergingFlag.Full;
else if (GetBoolean(features, DatMergedValue))
splitType = MergingFlag.Merged;
else if (GetBoolean(features, DatNonMergedValue))
splitType = MergingFlag.NonMerged;
else if (GetBoolean(features, DatSplitValue))
splitType = MergingFlag.Split;
return splitType;
}
#endregion #endregion
#region Protected Helpers #region Protected Helpers

View File

@@ -168,7 +168,7 @@ Reset the internal state: reset();";
"DatItem.SpamSum", "DatItem.SpamSum",
"DatItem.Date", "DatItem.Date",
}); });
Modification.ApplyCleaning(datFile, dfdCleaner); dfdCleaner.ApplyCleaning(datFile);
break; break;
@@ -213,18 +213,18 @@ Reset the internal state: reset();";
} }
// Create cleaner to run filters from // Create cleaner to run filters from
Cleaner cleaner = new Cleaner Cleaner filterCleaner = new Cleaner
{ {
MachineFilter = new MachineFilter(), MachineFilter = new MachineFilter(),
DatItemFilter = new DatItemFilter(), DatItemFilter = new DatItemFilter(),
}; };
// Set the possible filters // Set the possible filters
cleaner.MachineFilter.SetFilter(filterMachineField, filterValue, filterRemove.Value); filterCleaner.MachineFilter.SetFilter(filterMachineField, filterValue, filterRemove.Value);
cleaner.DatItemFilter.SetFilter(filterDatItemField, filterValue, filterRemove.Value); filterCleaner.DatItemFilter.SetFilter(filterDatItemField, filterValue, filterRemove.Value);
// Apply the filters blindly // Apply the filters blindly
Modification.ApplyFilters(datFile, cleaner, filterPerMachine.Value); filterCleaner.ApplyFilters(datFile, filterPerMachine.Value);
// Cleanup after the filter // Cleanup after the filter
// TODO: We might not want to remove immediately // TODO: We might not want to remove immediately
@@ -269,7 +269,7 @@ Reset the internal state: reset();";
extraIni.Items.Add(extraIniItem); extraIni.Items.Add(extraIniItem);
// Apply the extra INI blindly // Apply the extra INI blindly
Modification.ApplyExtras(datFile, extraIni); extraIni.ApplyExtras(datFile);
break; break;
@@ -293,7 +293,8 @@ Reset the internal state: reset();";
} }
// Apply the merging flag // Apply the merging flag
Modification.ApplySplitting(datFile, mergingFlag, false); Filtering.Splitter splitter = new Filtering.Splitter { SplitType = mergingFlag };
splitter.ApplySplitting(datFile, false);
break; break;
@@ -307,7 +308,8 @@ Reset the internal state: reset();";
} }
// Apply the logic // Apply the logic
Modification.MachineDescriptionToName(datFile); Cleaner descNameCleaner = new Cleaner { DescriptionAsName = true };
descNameCleaner.ApplyCleaning(datFile);
break; break;
@@ -321,7 +323,8 @@ Reset the internal state: reset();";
} }
// Run the 1G1R functionality // Run the 1G1R functionality
Modification.OneGamePerRegion(datFile, command.Arguments); Cleaner ogorCleaner = new Cleaner { OneGamePerRegion = true, RegionList = command.Arguments };
ogorCleaner.ApplyCleaning(datFile);
break; break;
@@ -335,7 +338,8 @@ Reset the internal state: reset();";
} }
// Apply the logic // Apply the logic
Modification.OneRomPerGame(datFile); Cleaner orpgCleaner = new Cleaner { OneRomPerGame = true };
orpgCleaner.ApplyCleaning(datFile);
break; break;
@@ -365,7 +369,8 @@ Reset the internal state: reset();";
} }
// Apply the logic // Apply the logic
Modification.StripSceneDatesFromItems(datFile); Cleaner stripCleaner = new Cleaner { SceneDateStrip = true };
stripCleaner.ApplyCleaning(datFile);
break; break;

View File

@@ -2,7 +2,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using SabreTools.Core;
using SabreTools.DatFiles; using SabreTools.DatFiles;
using SabreTools.DatTools; using SabreTools.DatTools;
using SabreTools.FileTypes; using SabreTools.FileTypes;
@@ -62,10 +61,10 @@ namespace SabreTools.Features
bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue); bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
var includeInScan = GetIncludeInScan(features); var includeInScan = GetIncludeInScan(features);
var skipFileType = GetSkipFileType(features); var skipFileType = GetSkipFileType(features);
var splitType = GetSplitType(features);
// Apply the specialized field removals to the cleaner // Apply the specialized field removals to the cleaner
Cleaner.PopulateExclusionsFromList(new List<string> { "DatItem.Date" }); if (!addFileDates)
Cleaner.PopulateExclusionsFromList(new List<string> { "DatItem.Date" });
// Create a new DATFromDir object and process the inputs // Create a new DATFromDir object and process the inputs
DatFile basedat = DatFile.Create(Header); DatFile basedat = DatFile.Create(Header);
@@ -95,10 +94,10 @@ namespace SabreTools.Features
if (success) if (success)
{ {
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(datdata, Extras); Extras.ApplyExtras(datdata);
Modification.ApplySplitting(datdata, splitType, false); Splitter.ApplySplitting(datdata, false);
Modification.ApplyFilters(datdata, Cleaner); Cleaner.ApplyFilters(datdata);
Modification.ApplyCleaning(datdata, Cleaner); Cleaner.ApplyCleaning(datdata);
// Write out the file // Write out the file
Writer.Write(datdata, OutputDir); Writer.Write(datdata, OutputDir);

View File

@@ -65,7 +65,7 @@ namespace SabreTools.Features
// Extension splitting // Extension splitting
if (splittingMode.HasFlag(SplittingMode.Extension)) if (splittingMode.HasFlag(SplittingMode.Extension))
{ {
(DatFile extADat, DatFile extBDat) = Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue)); (DatFile extADat, DatFile extBDat) = DatTools.Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");
@@ -79,7 +79,7 @@ namespace SabreTools.Features
// Hash splitting // Hash splitting
if (splittingMode.HasFlag(SplittingMode.Hash)) if (splittingMode.HasFlag(SplittingMode.Hash))
{ {
Dictionary<DatItemField, DatFile> typeDats = Splitter.SplitByHash(internalDat); Dictionary<DatItemField, DatFile> typeDats = DatTools.Splitter.SplitByHash(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");
@@ -96,7 +96,7 @@ namespace SabreTools.Features
if (splittingMode.HasFlag(SplittingMode.Level)) if (splittingMode.HasFlag(SplittingMode.Level))
{ {
logger.Warning("This feature is not implemented: level-split"); logger.Warning("This feature is not implemented: level-split");
Splitter.SplitByLevel( DatTools.Splitter.SplitByLevel(
internalDat, internalDat,
OutputDir, OutputDir,
GetBoolean(features, ShortValue), GetBoolean(features, ShortValue),
@@ -106,7 +106,7 @@ namespace SabreTools.Features
// Size splitting // Size splitting
if (splittingMode.HasFlag(SplittingMode.Size)) if (splittingMode.HasFlag(SplittingMode.Size))
{ {
(DatFile lessThan, DatFile greaterThan) = Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value)); (DatFile lessThan, DatFile greaterThan) = DatTools.Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");
@@ -120,7 +120,7 @@ namespace SabreTools.Features
// Type splitting // Type splitting
if (splittingMode.HasFlag(SplittingMode.Type)) if (splittingMode.HasFlag(SplittingMode.Type))
{ {
Dictionary<ItemType, DatFile> typeDats = Splitter.SplitByType(internalDat); Dictionary<ItemType, DatFile> typeDats = DatTools.Splitter.SplitByType(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");

View File

@@ -156,10 +156,10 @@ namespace SabreTools.Features
|| datFile.Header.DatFormat.HasFlag(DatFormat.SSV)); || datFile.Header.DatFormat.HasFlag(DatFormat.SSV));
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(datFile, Extras); Extras.ApplyExtras(datFile);
Modification.ApplySplitting(datFile, GetSplitType(features), false); Splitter.ApplySplitting(datFile, false);
Modification.ApplyFilters(datFile, Cleaner); Cleaner.ApplyFilters(datFile);
Modification.ApplyCleaning(datFile, Cleaner); Cleaner.ApplyCleaning(datFile);
// Get the correct output path // Get the correct output path
string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
@@ -194,10 +194,10 @@ namespace SabreTools.Features
datHeaders = DatFileTool.PopulateUserData(userInputDat, inputPaths); datHeaders = DatFileTool.PopulateUserData(userInputDat, inputPaths);
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(userInputDat, Extras); Extras.ApplyExtras(userInputDat);
Modification.ApplySplitting(userInputDat, GetSplitType(features), false); Splitter.ApplySplitting(userInputDat, false);
Modification.ApplyFilters(userInputDat, Cleaner); Cleaner.ApplyFilters(userInputDat);
Modification.ApplyCleaning(userInputDat, Cleaner); Cleaner.ApplyCleaning(userInputDat);
// Output only DatItems that are duplicated across inputs // Output only DatItems that are duplicated across inputs
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly)) if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
@@ -286,10 +286,10 @@ namespace SabreTools.Features
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true); Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(repDat, Extras); Extras.ApplyExtras(repDat);
Modification.ApplySplitting(repDat, GetSplitType(features), false); Splitter.ApplySplitting(repDat, false);
Modification.ApplyFilters(repDat, Cleaner); Cleaner.ApplyFilters(repDat);
Modification.ApplyCleaning(repDat, Cleaner); Cleaner.ApplyCleaning(repDat);
// Now replace the fields from the base DatFile // Now replace the fields from the base DatFile
DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue)); DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));
@@ -311,10 +311,10 @@ namespace SabreTools.Features
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true); Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(repDat, Extras); Extras.ApplyExtras(repDat);
Modification.ApplySplitting(repDat, GetSplitType(features), false); Splitter.ApplySplitting(repDat, false);
Modification.ApplyFilters(repDat, Cleaner); Cleaner.ApplyFilters(repDat);
Modification.ApplyCleaning(repDat, Cleaner); Cleaner.ApplyCleaning(repDat);
// Now replace the fields from the base DatFile // Now replace the fields from the base DatFile
DatFileTool.BaseReplace( DatFileTool.BaseReplace(
@@ -336,7 +336,7 @@ namespace SabreTools.Features
{ {
// If we're in SuperDAT mode, prefix all games with their respective DATs // If we're in SuperDAT mode, prefix all games with their respective DATs
if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase)) if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
Modification.ApplySuperDAT(userInputDat, inputPaths); DatFileTool.ApplySuperDAT(userInputDat, inputPaths);
Writer.Write(userInputDat, OutputDir); Writer.Write(userInputDat, OutputDir);
} }

View File

@@ -50,7 +50,6 @@ namespace SabreTools.Features
TreatAsFile asFiles = GetTreatAsFiles(features); TreatAsFile asFiles = GetTreatAsFiles(features);
bool hashOnly = GetBoolean(features, HashOnlyValue); bool hashOnly = GetBoolean(features, HashOnlyValue);
bool quickScan = GetBoolean(features, QuickValue); bool quickScan = GetBoolean(features, QuickValue);
var splitType = GetSplitType(features);
// If we are in individual mode, process each DAT on their own // If we are in individual mode, process each DAT on their own
if (GetBoolean(features, IndividualValue)) if (GetBoolean(features, IndividualValue))
@@ -62,10 +61,10 @@ namespace SabreTools.Features
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true); Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(datdata, Extras); Extras.ApplyExtras(datdata);
Modification.ApplySplitting(datdata, splitType, true); Splitter.ApplySplitting(datdata, true);
Modification.ApplyFilters(datdata, Cleaner); Cleaner.ApplyFilters(datdata);
Modification.ApplyCleaning(datdata, Cleaner); Cleaner.ApplyCleaning(datdata);
// Set depot information // Set depot information
datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;
@@ -109,10 +108,10 @@ namespace SabreTools.Features
} }
// Perform additional processing steps // Perform additional processing steps
Modification.ApplyExtras(datdata, Extras); Extras.ApplyExtras(datdata);
Modification.ApplySplitting(datdata, splitType, true); Splitter.ApplySplitting(datdata, true);
Modification.ApplyFilters(datdata, Cleaner); Cleaner.ApplyFilters(datdata);
Modification.ApplyCleaning(datdata, Cleaner); Cleaner.ApplyCleaning(datdata);
// Set depot information // Set depot information
datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation; datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;