diff --git a/RombaSharp/Features/Dir2Dat.cs b/RombaSharp/Features/Dir2Dat.cs
index a3469500..7836a4b1 100644
--- a/RombaSharp/Features/Dir2Dat.cs
+++ b/RombaSharp/Features/Dir2Dat.cs
@@ -64,7 +64,7 @@ namespace RombaSharp.Features
"DatItem.SHA512",
"DatItem.SpamSum",
});
- Modification.ApplyCleaning(datfile, cleaner);
+ cleaner.ApplyCleaning(datfile);
Writer.Write(datfile, outdat);
}
}
diff --git a/SabreTools.DatTools/DatFileTool.cs b/SabreTools.DatTools/DatFileTool.cs
index 4b40dccb..5fc48dfa 100644
--- a/SabreTools.DatTools/DatFileTool.cs
+++ b/SabreTools.DatTools/DatFileTool.cs
@@ -14,7 +14,7 @@ namespace SabreTools.DatTools
///
/// Helper methods for updating and converting DatFiles
///
- public class DatFileTool
+ public static class DatFileTool
{
#region Logging
@@ -25,6 +25,40 @@ namespace SabreTools.DatTools
#endregion
+ ///
+ /// Apply SuperDAT naming logic to a merged DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// List of inputs to use for renaming
+ public static void ApplySuperDAT(DatFile datFile, List inputs)
+ {
+ List keys = datFile.Items.Keys.ToList();
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = datFile.Items[key].ToList();
+ List newItems = new List();
+ foreach (DatItem item in items)
+ {
+ DatItem newItem = item;
+ string filename = inputs[newItem.Source.Index].CurrentPath;
+ string rootpath = inputs[newItem.Source.Index].ParentPath;
+
+ if (!string.IsNullOrWhiteSpace(rootpath))
+ rootpath += Path.DirectorySeparatorChar.ToString();
+
+ filename = filename.Remove(0, rootpath.Length);
+ newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ + newItem.Machine.Name;
+
+ newItems.Add(newItem);
+ }
+
+ datFile.Items.Remove(key);
+ datFile.Items.AddRange(key, newItems);
+ });
+ }
+
///
/// Replace item values from the base set represented by the current DAT
///
diff --git a/SabreTools.Filtering/Cleaner.cs b/SabreTools.Filtering/Cleaner.cs
index 7d5ba136..b1635780 100644
--- a/SabreTools.Filtering/Cleaner.cs
+++ b/SabreTools.Filtering/Cleaner.cs
@@ -1,6 +1,9 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Runtime.CompilerServices;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -10,11 +13,13 @@ using SabreTools.DatFiles;
using SabreTools.DatItems;
using SabreTools.Logging;
+[assembly: InternalsVisibleTo("SabreTools.Test")]
namespace SabreTools.Filtering
{
///
/// Represents the cleaning operations that need to be performed on a set of items, usually a DAT
///
+
public class Cleaner
{
#region Exclusion Fields
@@ -200,11 +205,91 @@ namespace SabreTools.Filtering
#region Cleaning
+ ///
+ /// Apply cleaning methods to the DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// True if the error that is thrown should be thrown back to the caller, false otherwise
+ /// True if cleaning was successful, false on error
+ public bool ApplyCleaning(DatFile datFile, bool throwOnError = false)
+ {
+ try
+ {
+ // Perform item-level cleaning
+ CleanDatItems(datFile);
+
+ // Bucket and dedupe according to the flag
+ if (DedupeRoms == DedupeType.Full)
+ datFile.Items.BucketBy(ItemKey.CRC, DedupeRoms);
+ else if (DedupeRoms == DedupeType.Game)
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeRoms);
+
+ // Process description to machine name
+ if (DescriptionAsName == true)
+ MachineDescriptionToName(datFile);
+
+ // If we are removing scene dates, do that now
+ if (SceneDateStrip == true)
+ StripSceneDatesFromItems(datFile);
+
+ // Run the one rom per game logic, if required
+ if (OneGamePerRegion == true)
+ SetOneGamePerRegion(datFile);
+
+ // Run the one rom per game logic, if required
+ if (OneRomPerGame == true)
+ SetOneRomPerGame(datFile);
+
+ // If we are removing fields, do that now
+ RemoveFieldsFromItems(datFile);
+
+ // Remove all marked items
+ datFile.Items.ClearMarked();
+
+ // We remove any blanks, if we aren't supposed to have any
+ if (KeepEmptyGames == false)
+ datFile.Items.ClearEmpty();
+ }
+ catch (Exception ex) when (!throwOnError)
+ {
+ logger.Error(ex);
+ return false;
+ }
+
+ return true;
+ }
+
+ ///
+ /// Clean individual items based on the current filter
+ ///
+ /// Current DatFile object to run operations on
+ internal void CleanDatItems(DatFile datFile)
+ {
+ List keys = datFile.Items.Keys.ToList();
+ foreach (string key in keys)
+ {
+ // For every item in the current key
+ List items = datFile.Items[key];
+ foreach (DatItem item in items)
+ {
+ // If we have a null item, we can't clean it it
+ if (item == null)
+ continue;
+
+ // Run cleaning per item
+ CleanDatItem(item);
+ }
+
+ // Assign back for caution
+ datFile.Items[key] = items;
+ }
+ }
+
///
/// Clean a DatItem according to the cleaner
///
/// DatItem to clean
- public void CleanDatItem(DatItem datItem)
+ internal void CleanDatItem(DatItem datItem)
{
// If we're stripping unicode characters, strip machine name and description
if (RemoveUnicode)
@@ -243,7 +328,7 @@ namespace SabreTools.Filtering
///
/// Name of the game to be cleaned
/// The cleaned name
- private string CleanGameName(string game)
+ internal string CleanGameName(string game)
{
if (game == null)
return null;
@@ -258,12 +343,71 @@ namespace SabreTools.Filtering
return game;
}
+ ///
+ /// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
+ ///
+ /// Current DatFile object to run operations on
+ /// True if the error that is thrown should be thrown back to the caller, false otherwise
+ internal void MachineDescriptionToName(DatFile datFile, bool throwOnError = false)
+ {
+ try
+ {
+ // First we want to get a mapping for all games to description
+ ConcurrentDictionary mapping = new ConcurrentDictionary();
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = datFile.Items[key];
+ foreach (DatItem item in items)
+ {
+ // If the key mapping doesn't exist, add it
+ mapping.TryAdd(item.Machine.Name, item.Machine.Description.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
+ }
+ });
+
+ // Now we loop through every item and update accordingly
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = datFile.Items[key];
+ List newItems = new List();
+ foreach (DatItem item in items)
+ {
+ // Update machine name
+ if (!string.IsNullOrWhiteSpace(item.Machine.Name) && mapping.ContainsKey(item.Machine.Name))
+ item.Machine.Name = mapping[item.Machine.Name];
+
+ // Update cloneof
+ if (!string.IsNullOrWhiteSpace(item.Machine.CloneOf) && mapping.ContainsKey(item.Machine.CloneOf))
+ item.Machine.CloneOf = mapping[item.Machine.CloneOf];
+
+ // Update romof
+ if (!string.IsNullOrWhiteSpace(item.Machine.RomOf) && mapping.ContainsKey(item.Machine.RomOf))
+ item.Machine.RomOf = mapping[item.Machine.RomOf];
+
+ // Update sampleof
+ if (!string.IsNullOrWhiteSpace(item.Machine.SampleOf) && mapping.ContainsKey(item.Machine.SampleOf))
+ item.Machine.SampleOf = mapping[item.Machine.SampleOf];
+
+ // Add the new item to the output list
+ newItems.Add(item);
+ }
+
+ // Replace the old list of roms with the new one
+ datFile.Items.Remove(key);
+ datFile.Items.AddRange(key, newItems);
+ });
+ }
+ catch (Exception ex) when (!throwOnError)
+ {
+ logger.Warning(ex.ToString());
+ }
+ }
+
///
/// Replace accented characters
///
/// String to be parsed
/// String with characters replaced
- private string NormalizeChars(string input)
+ internal string NormalizeChars(string input)
{
if (input == null)
return null;
@@ -319,7 +463,7 @@ namespace SabreTools.Filtering
///
/// Input string to clean
/// Cleaned string
- private string RemoveUnicodeCharacters(string s)
+ internal string RemoveUnicodeCharacters(string s)
{
if (s == null)
return null;
@@ -332,7 +476,7 @@ namespace SabreTools.Filtering
///
/// String to be parsed
/// String with characters replaced
- private string RussianToLatin(string input)
+ internal string RussianToLatin(string input)
{
if (input == null)
return null;
@@ -367,7 +511,7 @@ namespace SabreTools.Filtering
///
/// String to be parsed
/// String with characters replaced
- private string SearchPattern(string input)
+ internal string SearchPattern(string input)
{
if (input == null)
return null;
@@ -409,10 +553,238 @@ namespace SabreTools.Filtering
return input;
}
+ ///
+ /// Filter a DAT using 1G1R logic given an ordered set of regions
+ ///
+ /// Current DatFile object to run operations on
+ ///
+ /// In the most technical sense, the way that the region list is being used does not
+ /// confine its values to be just regions. Since it's essentially acting like a
+ /// specialized version of the machine name filter, anything that is usually encapsulated
+ /// in parenthesis would be matched on, including disc numbers, languages, editions,
+ /// and anything else commonly used. Please note that, unlike other existing 1G1R
+ /// solutions, this does not have the ability to contain custom mappings of parent
+ /// to clone sets based on name, nor does it have the ability to match on the
+ /// Release DatItem type.
+ ///
+ internal void SetOneGamePerRegion(DatFile datFile)
+ {
+ // If we have null region list, make it empty
+ if (RegionList == null)
+ RegionList = new List();
+
+ // For sake of ease, the first thing we want to do is bucket by game
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
+
+ // Then we want to get a mapping of all machines to parents
+ Dictionary> parents = new Dictionary>();
+ foreach (string key in datFile.Items.Keys)
+ {
+ DatItem item = datFile.Items[key][0];
+
+ // Match on CloneOf first
+ if (!string.IsNullOrEmpty(item.Machine.CloneOf))
+ {
+ if (!parents.ContainsKey(item.Machine.CloneOf.ToLowerInvariant()))
+ parents.Add(item.Machine.CloneOf.ToLowerInvariant(), new List());
+
+ parents[item.Machine.CloneOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
+ }
+
+ // Then by RomOf
+ else if (!string.IsNullOrEmpty(item.Machine.RomOf))
+ {
+ if (!parents.ContainsKey(item.Machine.RomOf.ToLowerInvariant()))
+ parents.Add(item.Machine.RomOf.ToLowerInvariant(), new List());
+
+ parents[item.Machine.RomOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
+ }
+
+ // Otherwise, treat it as a parent
+ else
+ {
+ if (!parents.ContainsKey(item.Machine.Name.ToLowerInvariant()))
+ parents.Add(item.Machine.Name.ToLowerInvariant(), new List());
+
+ parents[item.Machine.Name.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
+ }
+ }
+
+ // Once we have the full list of mappings, filter out games to keep
+ foreach (string key in parents.Keys)
+ {
+ // Find the first machine that matches the regions in order, if possible
+ string machine = default;
+ foreach (string region in RegionList)
+ {
+ machine = parents[key].FirstOrDefault(m => Regex.IsMatch(m, @"\(.*" + region + @".*\)", RegexOptions.IgnoreCase));
+ if (machine != default)
+ break;
+ }
+
+ // If we didn't get a match, use the parent
+ if (machine == default)
+ machine = key;
+
+ // Remove the key from the list
+ parents[key].Remove(machine);
+
+ // Remove the rest of the items from this key
+ parents[key].ForEach(k => datFile.Items.Remove(k));
+ }
+
+ // Finally, strip out the parent tags
+ Splitter.RemoveTagsFromChild(datFile);
+ }
+
+ ///
+ /// Ensure that all roms are in their own game (or at least try to ensure)
+ ///
+ /// Current DatFile object to run operations on
+ internal void SetOneRomPerGame(DatFile datFile)
+ {
+ // Because this introduces subfolders, we need to set the SuperDAT type
+ datFile.Header.Type = "SuperDAT";
+
+ // For each rom, we want to update the game to be "/"
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = datFile.Items[key];
+ for (int i = 0; i < items.Count; i++)
+ {
+ DatItemTool.SetOneRomPerGame(items[i]);
+ }
+ });
+ }
+
+ ///
+ /// Strip the dates from the beginning of scene-style set names
+ ///
+ /// Current DatFile object to run operations on
+ internal void StripSceneDatesFromItems(DatFile datFile)
+ {
+ // Output the logging statement
+ logger.User("Stripping scene-style dates");
+
+ // Set the regex pattern to use
+ string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
+
+ // Now process all of the roms
+ Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
+ {
+ List items = datFile.Items[key];
+ for (int j = 0; j < items.Count; j++)
+ {
+ DatItem item = items[j];
+ if (Regex.IsMatch(item.Machine.Name, pattern))
+ item.Machine.Name = Regex.Replace(item.Machine.Name, pattern, "$2");
+
+ if (Regex.IsMatch(item.Machine.Description, pattern))
+ item.Machine.Description = Regex.Replace(item.Machine.Description, pattern, "$2");
+
+ items[j] = item;
+ }
+
+ datFile.Items.Remove(key);
+ datFile.Items.AddRange(key, items);
+ });
+ }
+
#endregion
#region Filtering
+ ///
+ /// Apply a set of Filters on the DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// True if entire machines are considered, false otherwise (default)
+ /// True if the error that is thrown should be thrown back to the caller, false otherwise
+ /// True if the DatFile was filtered, false on error
+ public bool ApplyFilters(DatFile datFile, bool perMachine = false, bool throwOnError = false)
+ {
+ // If we have null filters, return false
+ if (MachineFilter == null || DatItemFilter == null)
+ return false;
+
+ // If we're filtering per machine, bucket by machine first
+ if (perMachine)
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
+
+ try
+ {
+ // Loop over every key in the dictionary
+ List keys = datFile.Items.Keys.ToList();
+ foreach (string key in keys)
+ {
+ // For every item in the current key
+ bool machinePass = true;
+ List items = datFile.Items[key];
+ foreach (DatItem item in items)
+ {
+ // If we have a null item, we can't pass it
+ if (item == null)
+ continue;
+
+ // If the item is already filtered out, we skip
+ if (item.Remove)
+ continue;
+
+ // If the rom doesn't pass the filter, mark for removal
+ if (!PassesFilters(item))
+ {
+ item.Remove = true;
+
+ // If we're in machine mode, set and break
+ if (perMachine)
+ {
+ machinePass = false;
+ break;
+ }
+ }
+ }
+
+ // If we didn't pass and we're in machine mode, set all items as remove
+ if (perMachine && !machinePass)
+ {
+ foreach (DatItem item in items)
+ {
+ item.Remove = true;
+ }
+ }
+
+ // Assign back for caution
+ datFile.Items[key] = items;
+ }
+ }
+ catch (Exception ex) when (!throwOnError)
+ {
+ logger.Error(ex);
+ return false;
+ }
+
+ return true;
+ }
+
+ ///
+ /// Check to see if a DatItem passes the filters
+ ///
+ /// DatItem to check
+ /// True if the item passed the filter, false otherwise
+ internal bool PassesFilters(DatItem datItem)
+ {
+ // Null item means it will never pass
+ if (datItem == null)
+ return false;
+
+ // Filter on Machine fields
+ if (!MachineFilter.PassesFilters(datItem.Machine))
+ return false;
+
+ // Filter on DatItem fields
+ return DatItemFilter.PassesFilters(datItem);
+ }
+
///
/// Split the parts of a filter statement
///
@@ -439,25 +811,6 @@ namespace SabreTools.Filtering
return (filterFieldString, filterValue, negate);
}
- ///
- /// Check to see if a DatItem passes the filters
- ///
- /// DatItem to check
- /// True if the item passed the filter, false otherwise
- public bool PassesFilters(DatItem datItem)
- {
- // Null item means it will never pass
- if (datItem == null)
- return false;
-
- // Filter on Machine fields
- if (!MachineFilter.PassesFilters(datItem.Machine))
- return false;
-
- // Filter on DatItem fields
- return DatItemFilter.PassesFilters(datItem);
- }
-
#endregion
#region Removal
diff --git a/SabreTools.Filtering/DatHeaderRemover.cs b/SabreTools.Filtering/DatHeaderRemover.cs
index 12bbb60a..18bf1a02 100644
--- a/SabreTools.Filtering/DatHeaderRemover.cs
+++ b/SabreTools.Filtering/DatHeaderRemover.cs
@@ -33,7 +33,7 @@ namespace SabreTools.Filtering
#endregion
- #region Remover Population
+ #region Population
///
public override bool SetRemover(string field)
@@ -55,7 +55,7 @@ namespace SabreTools.Filtering
#endregion
- #region Remover Running
+ #region Running
///
/// Remove fields with given values
diff --git a/SabreTools.Filtering/DatItemFilter.cs b/SabreTools.Filtering/DatItemFilter.cs
index 47023f49..db2793e6 100644
--- a/SabreTools.Filtering/DatItemFilter.cs
+++ b/SabreTools.Filtering/DatItemFilter.cs
@@ -207,7 +207,7 @@ namespace SabreTools.Filtering
#endregion
- #region Filter Population
+ #region Population
///
/// Set multiple filters from key
@@ -763,7 +763,7 @@ namespace SabreTools.Filtering
#endregion
- #region Filter Running
+ #region Running
///
/// Check to see if a DatItem passes the filters
diff --git a/SabreTools.Filtering/DatItemRemover.cs b/SabreTools.Filtering/DatItemRemover.cs
index e688f69b..c6341093 100644
--- a/SabreTools.Filtering/DatItemRemover.cs
+++ b/SabreTools.Filtering/DatItemRemover.cs
@@ -38,7 +38,7 @@ namespace SabreTools.Filtering
#endregion
- #region Remover Population
+ #region Population
///
public override bool SetRemover(string field)
@@ -68,7 +68,7 @@ namespace SabreTools.Filtering
#endregion
- #region Remover Running
+ #region Running
///
/// Remove fields with given values
diff --git a/SabreTools.Filtering/ExtraIni.cs b/SabreTools.Filtering/ExtraIni.cs
index cb9653b8..575a8482 100644
--- a/SabreTools.Filtering/ExtraIni.cs
+++ b/SabreTools.Filtering/ExtraIni.cs
@@ -1,6 +1,10 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
+using SabreTools.Core;
using SabreTools.Core.Tools;
+using SabreTools.DatFiles;
+using SabreTools.DatItems;
using SabreTools.Logging;
namespace SabreTools.Filtering
@@ -37,7 +41,7 @@ namespace SabreTools.Filtering
#endregion
- #region Extras Population
+ #region Population
///
/// Populate item using field:file inputs
@@ -68,5 +72,96 @@ namespace SabreTools.Filtering
}
#endregion
+
+ #region Running
+
+ ///
+ /// Apply a set of Extra INIs on the DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// True if the error that is thrown should be thrown back to the caller, false otherwise
+ /// True if the extras were applied, false on error
+ public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
+ {
+ try
+ {
+ // Bucket by game first
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
+
+ // Create a new set of mappings based on the items
+ var machineMap = new Dictionary>();
+ var datItemMap = new Dictionary>();
+
+ // Loop through each of the extras
+ foreach (ExtraIniItem item in Items)
+ {
+ foreach (var mapping in item.Mappings)
+ {
+ string key = mapping.Key;
+ List machineNames = mapping.Value;
+
+ // Loop through the machines and add the new mappings
+ foreach (string machine in machineNames)
+ {
+ if (item.MachineField != MachineField.NULL)
+ {
+ if (!machineMap.ContainsKey(machine))
+ machineMap[machine] = new Dictionary();
+
+ machineMap[machine][item.MachineField] = key;
+ }
+ else if (item.DatItemField != DatItemField.NULL)
+ {
+ if (!datItemMap.ContainsKey(machine))
+ datItemMap[machine] = new Dictionary();
+
+ datItemMap[machine][item.DatItemField] = key;
+ }
+ }
+ }
+ }
+
+ // Now apply the new set of Machine mappings
+ foreach (string key in machineMap.Keys)
+ {
+ // If the key doesn't exist, continue
+ if (!datFile.Items.ContainsKey(key))
+ continue;
+
+ List datItems = datFile.Items[key];
+ var mappings = machineMap[key];
+
+ foreach (var datItem in datItems)
+ {
+ DatItemTool.SetFields(datItem.Machine, mappings);
+ }
+ }
+
+ // Now apply the new set of DatItem mappings
+ foreach (string key in datItemMap.Keys)
+ {
+ // If the key doesn't exist, continue
+ if (!datFile.Items.ContainsKey(key))
+ continue;
+
+ List datItems = datFile.Items[key];
+ var mappings = datItemMap[key];
+
+ foreach (var datItem in datItems)
+ {
+ DatItemTool.SetFields(datItem, mappings, null);
+ }
+ }
+ }
+ catch (Exception ex) when (!throwOnError)
+ {
+ logger.Error(ex);
+ return false;
+ }
+
+ return true;
+ }
+
+ #endregion
}
}
diff --git a/SabreTools.Filtering/MachineFilter.cs b/SabreTools.Filtering/MachineFilter.cs
index 99f452fc..c71f27f5 100644
--- a/SabreTools.Filtering/MachineFilter.cs
+++ b/SabreTools.Filtering/MachineFilter.cs
@@ -112,7 +112,7 @@ namespace SabreTools.Filtering
#endregion
- #region Filter Population
+ #region Population
///
/// Set multiple filters from key
@@ -323,7 +323,7 @@ namespace SabreTools.Filtering
#endregion
- #region Filter Running
+ #region Running
///
/// Check to see if a Machine passes the filters
diff --git a/SabreTools.DatTools/Modification.cs b/SabreTools.Filtering/Splitter.cs
similarity index 54%
rename from SabreTools.DatTools/Modification.cs
rename to SabreTools.Filtering/Splitter.cs
index 5c2c2082..c5cecbf6 100644
--- a/SabreTools.DatTools/Modification.cs
+++ b/SabreTools.Filtering/Splitter.cs
@@ -1,25 +1,25 @@
-using System;
-using System.Collections.Concurrent;
+using System;
using System.Collections.Generic;
-using System.IO;
using System.Linq;
-using System.Text.RegularExpressions;
-using System.Threading.Tasks;
using SabreTools.Core;
using SabreTools.DatFiles;
using SabreTools.DatItems;
-using SabreTools.Filtering;
-using SabreTools.IO;
using SabreTools.Logging;
-namespace SabreTools.DatTools
+namespace SabreTools.Filtering
{
- ///
- /// Helper methods for cleaning and filtering DatFiles
- ///
- public class Modification
+ public class Splitter
{
+ #region Fields
+
+ ///
+ /// Splitting mode to apply
+ ///
+ public MergingFlag SplitType { get; set; }
+
+ #endregion
+
#region Logging
///
@@ -28,548 +28,72 @@ namespace SabreTools.DatTools
private static readonly Logger logger = new Logger();
#endregion
-
- ///
- /// Apply cleaning methods to the DatFile
- ///
- /// Current DatFile object to run operations on
- /// Cleaner to use
- /// True if the error that is thrown should be thrown back to the caller, false otherwise
- /// True if cleaning was successful, false on error
- public static bool ApplyCleaning(DatFile datFile, Cleaner cleaner, bool throwOnError = false)
- {
- try
- {
- // Perform item-level cleaning
- CleanDatItems(datFile, cleaner);
-
- // Bucket and dedupe according to the flag
- if (cleaner?.DedupeRoms == DedupeType.Full)
- datFile.Items.BucketBy(ItemKey.CRC, cleaner.DedupeRoms);
- else if (cleaner?.DedupeRoms == DedupeType.Game)
- datFile.Items.BucketBy(ItemKey.Machine, cleaner.DedupeRoms);
-
- // Process description to machine name
- if (cleaner?.DescriptionAsName == true)
- MachineDescriptionToName(datFile);
-
- // If we are removing scene dates, do that now
- if (cleaner?.SceneDateStrip == true)
- StripSceneDatesFromItems(datFile);
-
- // Run the one rom per game logic, if required
- if (cleaner?.OneGamePerRegion == true)
- OneGamePerRegion(datFile, cleaner.RegionList);
-
- // Run the one rom per game logic, if required
- if (cleaner?.OneRomPerGame == true)
- OneRomPerGame(datFile);
-
- // If we are removing fields, do that now
- cleaner.RemoveFieldsFromItems(datFile);
-
- // Remove all marked items
- datFile.Items.ClearMarked();
-
- // We remove any blanks, if we aren't supposed to have any
- if (cleaner?.KeepEmptyGames == false)
- datFile.Items.ClearEmpty();
- }
- catch (Exception ex) when (!throwOnError)
- {
- logger.Error(ex);
- return false;
- }
-
- return true;
- }
-
- ///
- /// Apply a set of Extra INIs on the DatFile
- ///
- /// Current DatFile object to run operations on
- /// ExtrasIni to use
- /// True if the error that is thrown should be thrown back to the caller, false otherwise
- /// True if the extras were applied, false on error
- public static bool ApplyExtras(DatFile datFile, ExtraIni extras, bool throwOnError = false)
- {
- try
- {
- // Bucket by game first
- datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
-
- // Create a new set of mappings based on the items
- var machineMap = new Dictionary>();
- var datItemMap = new Dictionary>();
-
- // Loop through each of the extras
- foreach (ExtraIniItem item in extras.Items)
- {
- foreach (var mapping in item.Mappings)
- {
- string key = mapping.Key;
- List machineNames = mapping.Value;
-
- // Loop through the machines and add the new mappings
- foreach (string machine in machineNames)
- {
- if (item.MachineField != MachineField.NULL)
- {
- if (!machineMap.ContainsKey(machine))
- machineMap[machine] = new Dictionary();
-
- machineMap[machine][item.MachineField] = key;
- }
- else if (item.DatItemField != DatItemField.NULL)
- {
- if (!datItemMap.ContainsKey(machine))
- datItemMap[machine] = new Dictionary();
-
- datItemMap[machine][item.DatItemField] = key;
- }
- }
- }
- }
-
- // Now apply the new set of Machine mappings
- foreach (string key in machineMap.Keys)
- {
- // If the key doesn't exist, continue
- if (!datFile.Items.ContainsKey(key))
- continue;
-
- List datItems = datFile.Items[key];
- var mappings = machineMap[key];
-
- foreach (var datItem in datItems)
- {
- DatItemTool.SetFields(datItem.Machine, mappings);
- }
- }
-
- // Now apply the new set of DatItem mappings
- foreach (string key in datItemMap.Keys)
- {
- // If the key doesn't exist, continue
- if (!datFile.Items.ContainsKey(key))
- continue;
-
- List datItems = datFile.Items[key];
- var mappings = datItemMap[key];
-
- foreach (var datItem in datItems)
- {
- DatItemTool.SetFields(datItem, mappings, null);
- }
- }
- }
- catch (Exception ex) when (!throwOnError)
- {
- logger.Error(ex);
- return false;
- }
-
- return true;
- }
-
- ///
- /// Apply a set of Filters on the DatFile
- ///
- /// Current DatFile object to run operations on
- /// Cleaner to use
- /// True if entire machines are considered, false otherwise (default)
- /// True if the error that is thrown should be thrown back to the caller, false otherwise
- /// True if the DatFile was filtered, false on error
- public static bool ApplyFilters(DatFile datFile, Cleaner cleaner, bool perMachine = false, bool throwOnError = false)
- {
- // If we have a null cleaner or filters, return false
- if (cleaner == null || cleaner.MachineFilter == null || cleaner.DatItemFilter == null)
- return false;
-
- // If we're filtering per machine, bucket by machine first
- if (perMachine)
- datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
-
- try
- {
- // Loop over every key in the dictionary
- List keys = datFile.Items.Keys.ToList();
- foreach (string key in keys)
- {
- // For every item in the current key
- bool machinePass = true;
- List items = datFile.Items[key];
- foreach (DatItem item in items)
- {
- // If we have a null item, we can't pass it
- if (item == null)
- continue;
-
- // If the item is already filtered out, we skip
- if (item.Remove)
- continue;
-
- // If the rom doesn't pass the filter, mark for removal
- if (!cleaner.PassesFilters(item))
- {
- item.Remove = true;
-
- // If we're in machine mode, set and break
- if (perMachine)
- {
- machinePass = false;
- break;
- }
- }
- }
-
- // If we didn't pass and we're in machine mode, set all items as remove
- if (perMachine && !machinePass)
- {
- foreach (DatItem item in items)
- {
- item.Remove = true;
- }
- }
-
- // Assign back for caution
- datFile.Items[key] = items;
- }
- }
- catch (Exception ex) when (!throwOnError)
- {
- logger.Error(ex);
- return false;
- }
-
- return true;
- }
-
- ///
- /// Apply splitting on the DatFile
- ///
- /// Current DatFile object to run operations on
- /// Split type to try
- /// True if DatFile tags override splitting, false otherwise
- /// True if the error that is thrown should be thrown back to the caller, false otherwise
- /// True if the DatFile was split, false on error
- public static bool ApplySplitting(DatFile datFile, MergingFlag splitType, bool useTags, bool throwOnError = false)
- {
- try
- {
- // If we are using tags from the DAT, set the proper input for split type unless overridden
- if (useTags && splitType == MergingFlag.None)
- splitType = datFile.Header.ForceMerging;
-
- // Run internal splitting
- switch (splitType)
- {
- case MergingFlag.None:
- // No-op
- break;
- case MergingFlag.Device:
- CreateDeviceNonMergedSets(datFile, DedupeType.None);
- break;
- case MergingFlag.Full:
- CreateFullyNonMergedSets(datFile, DedupeType.None);
- break;
- case MergingFlag.NonMerged:
- CreateNonMergedSets(datFile, DedupeType.None);
- break;
- case MergingFlag.Merged:
- CreateMergedSets(datFile, DedupeType.None);
- break;
- case MergingFlag.Split:
- CreateSplitSets(datFile, DedupeType.None);
- break;
- }
- }
- catch (Exception ex) when (!throwOnError)
- {
- logger.Error(ex);
- return false;
- }
-
- return true;
- }
-
- ///
- /// Apply SuperDAT naming logic to a merged DatFile
- ///
- /// Current DatFile object to run operations on
- /// List of inputs to use for renaming
- public static void ApplySuperDAT(DatFile datFile, List inputs)
- {
- List keys = datFile.Items.Keys.ToList();
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = datFile.Items[key].ToList();
- List newItems = new List();
- foreach (DatItem item in items)
- {
- DatItem newItem = item;
- string filename = inputs[newItem.Source.Index].CurrentPath;
- string rootpath = inputs[newItem.Source.Index].ParentPath;
-
- if (!string.IsNullOrWhiteSpace(rootpath))
- rootpath += Path.DirectorySeparatorChar.ToString();
-
- filename = filename.Remove(0, rootpath.Length);
- newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
- + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
- + newItem.Machine.Name;
-
- newItems.Add(newItem);
- }
-
- datFile.Items.Remove(key);
- datFile.Items.AddRange(key, newItems);
- });
- }
-
- ///
- /// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
- ///
- /// Current DatFile object to run operations on
- /// True if the error that is thrown should be thrown back to the caller, false otherwise
- public static void MachineDescriptionToName(DatFile datFile, bool throwOnError = false)
- {
- try
- {
- // First we want to get a mapping for all games to description
- ConcurrentDictionary mapping = new ConcurrentDictionary();
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = datFile.Items[key];
- foreach (DatItem item in items)
- {
- // If the key mapping doesn't exist, add it
- mapping.TryAdd(item.Machine.Name, item.Machine.Description.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
- }
- });
-
- // Now we loop through every item and update accordingly
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = datFile.Items[key];
- List newItems = new List();
- foreach (DatItem item in items)
- {
- // Update machine name
- if (!string.IsNullOrWhiteSpace(item.Machine.Name) && mapping.ContainsKey(item.Machine.Name))
- item.Machine.Name = mapping[item.Machine.Name];
-
- // Update cloneof
- if (!string.IsNullOrWhiteSpace(item.Machine.CloneOf) && mapping.ContainsKey(item.Machine.CloneOf))
- item.Machine.CloneOf = mapping[item.Machine.CloneOf];
-
- // Update romof
- if (!string.IsNullOrWhiteSpace(item.Machine.RomOf) && mapping.ContainsKey(item.Machine.RomOf))
- item.Machine.RomOf = mapping[item.Machine.RomOf];
-
- // Update sampleof
- if (!string.IsNullOrWhiteSpace(item.Machine.SampleOf) && mapping.ContainsKey(item.Machine.SampleOf))
- item.Machine.SampleOf = mapping[item.Machine.SampleOf];
-
- // Add the new item to the output list
- newItems.Add(item);
- }
-
- // Replace the old list of roms with the new one
- datFile.Items.Remove(key);
- datFile.Items.AddRange(key, newItems);
- });
- }
- catch (Exception ex) when (!throwOnError)
- {
- logger.Warning(ex.ToString());
- }
- }
-
- ///
- /// Filter a DAT using 1G1R logic given an ordered set of regions
- ///
- /// Current DatFile object to run operations on
- /// Ordered list of regions to use
- ///
- /// In the most technical sense, the way that the region list is being used does not
- /// confine its values to be just regions. Since it's essentially acting like a
- /// specialized version of the machine name filter, anything that is usually encapsulated
- /// in parenthesis would be matched on, including disc numbers, languages, editions,
- /// and anything else commonly used. Please note that, unlike other existing 1G1R
- /// solutions, this does not have the ability to contain custom mappings of parent
- /// to clone sets based on name, nor does it have the ability to match on the
- /// Release DatItem type.
- ///
- public static void OneGamePerRegion(DatFile datFile, List regions)
- {
- // If we have null region list, make it empty
- if (regions == null)
- regions = new List();
-
- // For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
-
- // Then we want to get a mapping of all machines to parents
- Dictionary> parents = new Dictionary>();
- foreach (string key in datFile.Items.Keys)
- {
- DatItem item = datFile.Items[key][0];
-
- // Match on CloneOf first
- if (!string.IsNullOrEmpty(item.Machine.CloneOf))
- {
- if (!parents.ContainsKey(item.Machine.CloneOf.ToLowerInvariant()))
- parents.Add(item.Machine.CloneOf.ToLowerInvariant(), new List());
-
- parents[item.Machine.CloneOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
- }
-
- // Then by RomOf
- else if (!string.IsNullOrEmpty(item.Machine.RomOf))
- {
- if (!parents.ContainsKey(item.Machine.RomOf.ToLowerInvariant()))
- parents.Add(item.Machine.RomOf.ToLowerInvariant(), new List());
-
- parents[item.Machine.RomOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
- }
-
- // Otherwise, treat it as a parent
- else
- {
- if (!parents.ContainsKey(item.Machine.Name.ToLowerInvariant()))
- parents.Add(item.Machine.Name.ToLowerInvariant(), new List());
-
- parents[item.Machine.Name.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
- }
- }
-
- // Once we have the full list of mappings, filter out games to keep
- foreach (string key in parents.Keys)
- {
- // Find the first machine that matches the regions in order, if possible
- string machine = default;
- foreach (string region in regions)
- {
- machine = parents[key].FirstOrDefault(m => Regex.IsMatch(m, @"\(.*" + region + @".*\)", RegexOptions.IgnoreCase));
- if (machine != default)
- break;
- }
-
- // If we didn't get a match, use the parent
- if (machine == default)
- machine = key;
-
- // Remove the key from the list
- parents[key].Remove(machine);
-
- // Remove the rest of the items from this key
- parents[key].ForEach(k => datFile.Items.Remove(k));
- }
-
- // Finally, strip out the parent tags
- RemoveTagsFromChild(datFile);
- }
-
- ///
- /// Ensure that all roms are in their own game (or at least try to ensure)
- ///
- /// Current DatFile object to run operations on
- public static void OneRomPerGame(DatFile datFile)
- {
- // Because this introduces subfolders, we need to set the SuperDAT type
- datFile.Header.Type = "SuperDAT";
-
- // For each rom, we want to update the game to be "/"
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = datFile.Items[key];
- for (int i = 0; i < items.Count; i++)
- {
- DatItemTool.SetOneRomPerGame(items[i]);
- }
- });
- }
-
- ///
- /// Strip the dates from the beginning of scene-style set names
- ///
- /// Current DatFile object to run operations on
- public static void StripSceneDatesFromItems(DatFile datFile)
- {
- // Output the logging statement
- logger.User("Stripping scene-style dates");
-
- // Set the regex pattern to use
- string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
-
- // Now process all of the roms
- Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
- {
- List items = datFile.Items[key];
- for (int j = 0; j < items.Count; j++)
- {
- DatItem item = items[j];
- if (Regex.IsMatch(item.Machine.Name, pattern))
- item.Machine.Name = Regex.Replace(item.Machine.Name, pattern, "$2");
-
- if (Regex.IsMatch(item.Machine.Description, pattern))
- item.Machine.Description = Regex.Replace(item.Machine.Description, pattern, "$2");
-
- items[j] = item;
- }
-
- datFile.Items.Remove(key);
- datFile.Items.AddRange(key, items);
- });
- }
- ///
- /// Clean individual items based on the current filter
- ///
- /// Current DatFile object to run operations on
- /// Cleaner to use
- private static void CleanDatItems(DatFile datFile, Cleaner cleaner)
- {
- List keys = datFile.Items.Keys.ToList();
- foreach (string key in keys)
- {
- // For every item in the current key
- List items = datFile.Items[key];
- foreach (DatItem item in items)
- {
- // If we have a null item, we can't clean it it
- if (item == null)
- continue;
-
- // Run cleaning per item
- cleaner.CleanDatItem(item);
- }
-
- // Assign back for caution
- datFile.Items[key] = items;
- }
- }
-
// TODO: Should any of these create a new DatFile in the process?
// The reason this comes up is that doing any of the splits or merges
// is an inherently destructive process. Making it output a new DatFile
// might make it easier to deal with multiple internal steps. On the other
// hand, this will increase memory usage significantly and would force the
// existing paths to behave entirely differently
- #region Internal Splitting/Merging
+ #region Running
+
+ ///
+ /// Apply splitting on the DatFile
+ ///
+ /// Current DatFile object to run operations on
+ /// True if DatFile tags override splitting, false otherwise
+ /// True if the error that is thrown should be thrown back to the caller, false otherwise
+ /// True if the DatFile was split, false on error
+ public bool ApplySplitting(DatFile datFile, bool useTags, bool throwOnError = false)
+ {
+ try
+ {
+ // If we are using tags from the DAT, set the proper input for split type unless overridden
+ if (useTags && SplitType == MergingFlag.None)
+ SplitType = datFile.Header.ForceMerging;
+
+ // Run internal splitting
+ switch (SplitType)
+ {
+ case MergingFlag.None:
+ // No-op
+ break;
+ case MergingFlag.Device:
+ CreateDeviceNonMergedSets(datFile);
+ break;
+ case MergingFlag.Full:
+ CreateFullyNonMergedSets(datFile);
+ break;
+ case MergingFlag.NonMerged:
+ CreateNonMergedSets(datFile);
+ break;
+ case MergingFlag.Merged:
+ CreateMergedSets(datFile);
+ break;
+ case MergingFlag.Split:
+ CreateSplitSets(datFile);
+ break;
+ }
+ }
+ catch (Exception ex) when (!throwOnError)
+ {
+ logger.Error(ex);
+ return false;
+ }
+
+ return true;
+ }
///
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
///
/// Current DatFile object to run operations on
- /// Dedupe type to be used
- private static void CreateDeviceNonMergedSets(DatFile datFile, DedupeType mergeroms)
+ internal static void CreateDeviceNonMergedSets(DatFile datFile)
{
logger.User("Creating device non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true);
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(datFile, false, false)) ;
@@ -583,13 +107,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
///
/// Current DatFile object to run operations on
- /// Dedupe type to be used
- private static void CreateFullyNonMergedSets(DatFile datFile, DedupeType mergeroms)
+ internal static void CreateFullyNonMergedSets(DatFile datFile)
{
logger.User("Creating fully non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true);
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(datFile, true, true)) ;
@@ -607,13 +130,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create merged sets and remove the tags
///
/// Current DatFile object to run operations on
- /// Dedupe type to be used
- private static void CreateMergedSets(DatFile datFile, DedupeType mergeroms)
+ internal static void CreateMergedSets(DatFile datFile)
{
logger.User("Creating merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true);
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information
AddRomsFromChildren(datFile);
@@ -630,13 +152,12 @@ namespace SabreTools.DatTools
/// Use cloneof tags to create non-merged sets and remove the tags
///
/// Current DatFile object to run operations on
- /// Dedupe type to be used
- private static void CreateNonMergedSets(DatFile datFile, DedupeType mergeroms)
+ internal static void CreateNonMergedSets(DatFile datFile)
{
logger.User("Creating non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true);
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information
AddRomsFromParent(datFile);
@@ -653,13 +174,12 @@ namespace SabreTools.DatTools
/// Use cloneof and romof tags to create split sets and remove the tags
///
/// Current DatFile object to run operations on
- /// Dedupe type to be used
- private static void CreateSplitSets(DatFile datFile, DedupeType mergeroms)
+ internal static void CreateSplitSets(DatFile datFile)
{
logger.User("Creating split sets from the DAT");
// For sake of ease, the first thing we want to do is bucket by game
- datFile.Items.BucketBy(ItemKey.Machine, mergeroms, norename: true);
+ datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, norename: true);
// Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild(datFile);
@@ -676,7 +196,7 @@ namespace SabreTools.DatTools
/// Use romof tags to add roms to the children
///
/// Current DatFile object to run operations on
- private static void AddRomsFromBios(DatFile datFile)
+ internal static void AddRomsFromBios(DatFile datFile)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -717,7 +237,7 @@ namespace SabreTools.DatTools
/// Current DatFile object to run operations on
/// True if only child device sets are touched, false for non-device sets (default)
/// True if slotoptions tags are used as well, false otherwise
- private static bool AddRomsFromDevices(DatFile datFile, bool dev = false, bool useSlotOptions = false)
+ internal static bool AddRomsFromDevices(DatFile datFile, bool dev = false, bool useSlotOptions = false)
{
bool foundnew = false;
List machines = datFile.Items.Keys.OrderBy(g => g).ToList();
@@ -845,7 +365,7 @@ namespace SabreTools.DatTools
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
///
/// Current DatFile object to run operations on
- private static void AddRomsFromParent(DatFile datFile)
+ internal static void AddRomsFromParent(DatFile datFile)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -896,7 +416,7 @@ namespace SabreTools.DatTools
///
/// Current DatFile object to run operations on
/// True to add DatItems to subfolder of parent (not including Disk), false otherwise
- private static void AddRomsFromChildren(DatFile datFile, bool subfolder = true)
+ internal static void AddRomsFromChildren(DatFile datFile, bool subfolder = true)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -1008,7 +528,7 @@ namespace SabreTools.DatTools
/// Remove all BIOS and device sets
///
/// Current DatFile object to run operations on
- private static void RemoveBiosAndDeviceSets(DatFile datFile)
+ internal static void RemoveBiosAndDeviceSets(DatFile datFile)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -1027,7 +547,7 @@ namespace SabreTools.DatTools
///
/// Current DatFile object to run operations on
/// True if only child Bios sets are touched, false for non-bios sets (default)
- private static void RemoveBiosRomsFromChild(DatFile datFile, bool bios = false)
+ internal static void RemoveBiosRomsFromChild(DatFile datFile, bool bios = false)
{
// Loop through the romof tags
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
@@ -1071,7 +591,7 @@ namespace SabreTools.DatTools
/// Use cloneof tags to remove roms from the children
///
/// Current DatFile object to run operations on
- private static void RemoveRomsFromChild(DatFile datFile)
+ internal static void RemoveRomsFromChild(DatFile datFile)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -1118,7 +638,7 @@ namespace SabreTools.DatTools
/// Remove all romof and cloneof tags from all games
///
/// Current DatFile object to run operations on
- private static void RemoveTagsFromChild(DatFile datFile)
+ internal static void RemoveTagsFromChild(DatFile datFile)
{
List games = datFile.Items.Keys.OrderBy(g => g).ToList();
foreach (string game in games)
@@ -1135,4 +655,4 @@ namespace SabreTools.DatTools
#endregion
}
-}
\ No newline at end of file
+}
diff --git a/SabreTools/Features/BaseFeature.cs b/SabreTools/Features/BaseFeature.cs
index 5f682b20..f06b8a3b 100644
--- a/SabreTools/Features/BaseFeature.cs
+++ b/SabreTools/Features/BaseFeature.cs
@@ -1735,6 +1735,11 @@ Some special strings that can be used:
///
protected string OutputDir { get; set; }
+ ///
+ /// Pre-configured Splitter
+ ///
+ protected Filtering.Splitter Splitter { get; set; }
+
#endregion
#region Add Feature Groups
@@ -1801,6 +1806,7 @@ Some special strings that can be used:
Extras = GetExtras(features);
Header = GetDatHeader(features);
OutputDir = GetString(features, OutputDirStringValue);
+ Splitter = GetSplitter(features);
// Set threading flag, if necessary
if (features.ContainsKey(ThreadsInt32Value))
@@ -1895,26 +1901,6 @@ Some special strings that can be used:
return splittingMode;
}
- ///
- /// Get SplitType from feature list
- ///
- protected MergingFlag GetSplitType(Dictionary features)
- {
- MergingFlag splitType = MergingFlag.None;
- if (GetBoolean(features, DatDeviceNonMergedValue))
- splitType = MergingFlag.Device;
- else if (GetBoolean(features, DatFullNonMergedValue))
- splitType = MergingFlag.Full;
- else if (GetBoolean(features, DatMergedValue))
- splitType = MergingFlag.Merged;
- else if (GetBoolean(features, DatNonMergedValue))
- splitType = MergingFlag.NonMerged;
- else if (GetBoolean(features, DatSplitValue))
- splitType = MergingFlag.Split;
-
- return splitType;
- }
-
///
/// Get StatReportFormat from feature list
///
@@ -2135,6 +2121,38 @@ Some special strings that can be used:
return extraIni;
}
+ ///
+ /// Get Splitter from feature list
+ ///
+ private Filtering.Splitter GetSplitter(Dictionary features)
+ {
+ Filtering.Splitter splitter = new Filtering.Splitter
+ {
+ SplitType = GetSplitType(features),
+ };
+ return splitter;
+ }
+
+ ///
+ /// Get SplitType from feature list
+ ///
+ private MergingFlag GetSplitType(Dictionary features)
+ {
+ MergingFlag splitType = MergingFlag.None;
+ if (GetBoolean(features, DatDeviceNonMergedValue))
+ splitType = MergingFlag.Device;
+ else if (GetBoolean(features, DatFullNonMergedValue))
+ splitType = MergingFlag.Full;
+ else if (GetBoolean(features, DatMergedValue))
+ splitType = MergingFlag.Merged;
+ else if (GetBoolean(features, DatNonMergedValue))
+ splitType = MergingFlag.NonMerged;
+ else if (GetBoolean(features, DatSplitValue))
+ splitType = MergingFlag.Split;
+
+ return splitType;
+ }
+
#endregion
#region Protected Helpers
diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs
index ebc14cbd..4be2eed3 100644
--- a/SabreTools/Features/Batch.cs
+++ b/SabreTools/Features/Batch.cs
@@ -168,7 +168,7 @@ Reset the internal state: reset();";
"DatItem.SpamSum",
"DatItem.Date",
});
- Modification.ApplyCleaning(datFile, dfdCleaner);
+ dfdCleaner.ApplyCleaning(datFile);
break;
@@ -213,18 +213,18 @@ Reset the internal state: reset();";
}
// Create cleaner to run filters from
- Cleaner cleaner = new Cleaner
+ Cleaner filterCleaner = new Cleaner
{
MachineFilter = new MachineFilter(),
DatItemFilter = new DatItemFilter(),
};
// Set the possible filters
- cleaner.MachineFilter.SetFilter(filterMachineField, filterValue, filterRemove.Value);
- cleaner.DatItemFilter.SetFilter(filterDatItemField, filterValue, filterRemove.Value);
+ filterCleaner.MachineFilter.SetFilter(filterMachineField, filterValue, filterRemove.Value);
+ filterCleaner.DatItemFilter.SetFilter(filterDatItemField, filterValue, filterRemove.Value);
// Apply the filters blindly
- Modification.ApplyFilters(datFile, cleaner, filterPerMachine.Value);
+ filterCleaner.ApplyFilters(datFile, filterPerMachine.Value);
// Cleanup after the filter
// TODO: We might not want to remove immediately
@@ -269,7 +269,7 @@ Reset the internal state: reset();";
extraIni.Items.Add(extraIniItem);
// Apply the extra INI blindly
- Modification.ApplyExtras(datFile, extraIni);
+ extraIni.ApplyExtras(datFile);
break;
@@ -293,7 +293,8 @@ Reset the internal state: reset();";
}
// Apply the merging flag
- Modification.ApplySplitting(datFile, mergingFlag, false);
+ Filtering.Splitter splitter = new Filtering.Splitter { SplitType = mergingFlag };
+ splitter.ApplySplitting(datFile, false);
break;
@@ -307,7 +308,8 @@ Reset the internal state: reset();";
}
// Apply the logic
- Modification.MachineDescriptionToName(datFile);
+ Cleaner descNameCleaner = new Cleaner { DescriptionAsName = true };
+ descNameCleaner.ApplyCleaning(datFile);
break;
@@ -321,7 +323,8 @@ Reset the internal state: reset();";
}
// Run the 1G1R functionality
- Modification.OneGamePerRegion(datFile, command.Arguments);
+ Cleaner ogorCleaner = new Cleaner { OneGamePerRegion = true, RegionList = command.Arguments };
+ ogorCleaner.ApplyCleaning(datFile);
break;
@@ -335,7 +338,8 @@ Reset the internal state: reset();";
}
// Apply the logic
- Modification.OneRomPerGame(datFile);
+ Cleaner orpgCleaner = new Cleaner { OneRomPerGame = true };
+ orpgCleaner.ApplyCleaning(datFile);
break;
@@ -365,7 +369,8 @@ Reset the internal state: reset();";
}
// Apply the logic
- Modification.StripSceneDatesFromItems(datFile);
+ Cleaner stripCleaner = new Cleaner { SceneDateStrip = true };
+ stripCleaner.ApplyCleaning(datFile);
break;
diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs
index ab6b7a6b..538788e6 100644
--- a/SabreTools/Features/DatFromDir.cs
+++ b/SabreTools/Features/DatFromDir.cs
@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
-using SabreTools.Core;
using SabreTools.DatFiles;
using SabreTools.DatTools;
using SabreTools.FileTypes;
@@ -62,10 +61,10 @@ namespace SabreTools.Features
bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
var includeInScan = GetIncludeInScan(features);
var skipFileType = GetSkipFileType(features);
- var splitType = GetSplitType(features);
// Apply the specialized field removals to the cleaner
- Cleaner.PopulateExclusionsFromList(new List { "DatItem.Date" });
+ if (!addFileDates)
+ Cleaner.PopulateExclusionsFromList(new List { "DatItem.Date" });
// Create a new DATFromDir object and process the inputs
DatFile basedat = DatFile.Create(Header);
@@ -95,10 +94,10 @@ namespace SabreTools.Features
if (success)
{
// Perform additional processing steps
- Modification.ApplyExtras(datdata, Extras);
- Modification.ApplySplitting(datdata, splitType, false);
- Modification.ApplyFilters(datdata, Cleaner);
- Modification.ApplyCleaning(datdata, Cleaner);
+ Extras.ApplyExtras(datdata);
+ Splitter.ApplySplitting(datdata, false);
+ Cleaner.ApplyFilters(datdata);
+ Cleaner.ApplyCleaning(datdata);
// Write out the file
Writer.Write(datdata, OutputDir);
diff --git a/SabreTools/Features/Split.cs b/SabreTools/Features/Split.cs
index 1d67fc29..6be4aefe 100644
--- a/SabreTools/Features/Split.cs
+++ b/SabreTools/Features/Split.cs
@@ -65,7 +65,7 @@ namespace SabreTools.Features
// Extension splitting
if (splittingMode.HasFlag(SplittingMode.Extension))
{
- (DatFile extADat, DatFile extBDat) = Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
+ (DatFile extADat, DatFile extBDat) = DatTools.Splitter.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");
@@ -79,7 +79,7 @@ namespace SabreTools.Features
// Hash splitting
if (splittingMode.HasFlag(SplittingMode.Hash))
{
- Dictionary typeDats = Splitter.SplitByHash(internalDat);
+ Dictionary typeDats = DatTools.Splitter.SplitByHash(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");
@@ -96,7 +96,7 @@ namespace SabreTools.Features
if (splittingMode.HasFlag(SplittingMode.Level))
{
logger.Warning("This feature is not implemented: level-split");
- Splitter.SplitByLevel(
+ DatTools.Splitter.SplitByLevel(
internalDat,
OutputDir,
GetBoolean(features, ShortValue),
@@ -106,7 +106,7 @@ namespace SabreTools.Features
// Size splitting
if (splittingMode.HasFlag(SplittingMode.Size))
{
- (DatFile lessThan, DatFile greaterThan) = Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
+ (DatFile lessThan, DatFile greaterThan) = DatTools.Splitter.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");
@@ -120,7 +120,7 @@ namespace SabreTools.Features
// Type splitting
if (splittingMode.HasFlag(SplittingMode.Type))
{
- Dictionary typeDats = Splitter.SplitByType(internalDat);
+ Dictionary typeDats = DatTools.Splitter.SplitByType(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");
diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs
index bcd201e5..88df3ae7 100644
--- a/SabreTools/Features/Update.cs
+++ b/SabreTools/Features/Update.cs
@@ -156,10 +156,10 @@ namespace SabreTools.Features
|| datFile.Header.DatFormat.HasFlag(DatFormat.SSV));
// Perform additional processing steps
- Modification.ApplyExtras(datFile, Extras);
- Modification.ApplySplitting(datFile, GetSplitType(features), false);
- Modification.ApplyFilters(datFile, Cleaner);
- Modification.ApplyCleaning(datFile, Cleaner);
+ Extras.ApplyExtras(datFile);
+ Splitter.ApplySplitting(datFile, false);
+ Cleaner.ApplyFilters(datFile);
+ Cleaner.ApplyCleaning(datFile);
// Get the correct output path
string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
@@ -194,10 +194,10 @@ namespace SabreTools.Features
datHeaders = DatFileTool.PopulateUserData(userInputDat, inputPaths);
// Perform additional processing steps
- Modification.ApplyExtras(userInputDat, Extras);
- Modification.ApplySplitting(userInputDat, GetSplitType(features), false);
- Modification.ApplyFilters(userInputDat, Cleaner);
- Modification.ApplyCleaning(userInputDat, Cleaner);
+ Extras.ApplyExtras(userInputDat);
+ Splitter.ApplySplitting(userInputDat, false);
+ Cleaner.ApplyFilters(userInputDat);
+ Cleaner.ApplyCleaning(userInputDat);
// Output only DatItems that are duplicated across inputs
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
@@ -286,10 +286,10 @@ namespace SabreTools.Features
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
- Modification.ApplyExtras(repDat, Extras);
- Modification.ApplySplitting(repDat, GetSplitType(features), false);
- Modification.ApplyFilters(repDat, Cleaner);
- Modification.ApplyCleaning(repDat, Cleaner);
+ Extras.ApplyExtras(repDat);
+ Splitter.ApplySplitting(repDat, false);
+ Cleaner.ApplyFilters(repDat);
+ Cleaner.ApplyCleaning(repDat);
// Now replace the fields from the base DatFile
DatFileTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));
@@ -311,10 +311,10 @@ namespace SabreTools.Features
Parser.ParseInto(repDat, inputPath, indexId: 1, keep: true);
// Perform additional processing steps
- Modification.ApplyExtras(repDat, Extras);
- Modification.ApplySplitting(repDat, GetSplitType(features), false);
- Modification.ApplyFilters(repDat, Cleaner);
- Modification.ApplyCleaning(repDat, Cleaner);
+ Extras.ApplyExtras(repDat);
+ Splitter.ApplySplitting(repDat, false);
+ Cleaner.ApplyFilters(repDat);
+ Cleaner.ApplyCleaning(repDat);
// Now replace the fields from the base DatFile
DatFileTool.BaseReplace(
@@ -336,7 +336,7 @@ namespace SabreTools.Features
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
- Modification.ApplySuperDAT(userInputDat, inputPaths);
+ DatFileTool.ApplySuperDAT(userInputDat, inputPaths);
Writer.Write(userInputDat, OutputDir);
}
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index ea2ba55c..67390be2 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -50,7 +50,6 @@ namespace SabreTools.Features
TreatAsFile asFiles = GetTreatAsFiles(features);
bool hashOnly = GetBoolean(features, HashOnlyValue);
bool quickScan = GetBoolean(features, QuickValue);
- var splitType = GetSplitType(features);
// If we are in individual mode, process each DAT on their own
if (GetBoolean(features, IndividualValue))
@@ -62,10 +61,10 @@ namespace SabreTools.Features
Parser.ParseInto(datdata, datfile, int.MaxValue, keep: true);
// Perform additional processing steps
- Modification.ApplyExtras(datdata, Extras);
- Modification.ApplySplitting(datdata, splitType, true);
- Modification.ApplyFilters(datdata, Cleaner);
- Modification.ApplyCleaning(datdata, Cleaner);
+ Extras.ApplyExtras(datdata);
+ Splitter.ApplySplitting(datdata, true);
+ Cleaner.ApplyFilters(datdata);
+ Cleaner.ApplyCleaning(datdata);
// Set depot information
datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;
@@ -109,10 +108,10 @@ namespace SabreTools.Features
}
// Perform additional processing steps
- Modification.ApplyExtras(datdata, Extras);
- Modification.ApplySplitting(datdata, splitType, true);
- Modification.ApplyFilters(datdata, Cleaner);
- Modification.ApplyCleaning(datdata, Cleaner);
+ Extras.ApplyExtras(datdata);
+ Splitter.ApplySplitting(datdata, true);
+ Cleaner.ApplyFilters(datdata);
+ Cleaner.ApplyCleaning(datdata);
// Set depot information
datdata.Header.InputDepot = Header.InputDepot.Clone() as DepotInformation;