diff --git a/SabreTools.DatFiles.Test/DatFileTests.FromMetadata.cs b/SabreTools.DatFiles.Test/DatFileTests.FromMetadata.cs
index 02a909e2..3f9f85db 100644
--- a/SabreTools.DatFiles.Test/DatFileTests.FromMetadata.cs
+++ b/SabreTools.DatFiles.Test/DatFileTests.FromMetadata.cs
@@ -16,7 +16,7 @@ namespace SabreTools.DatFiles.Test
Models.Metadata.MetadataFile? item = null;
DatFile datFile = new Formats.Logiqx(null, useGame: false);
- datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false);
+ datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false, filterRunner: null);
Assert.Equal(0, datFile.Items.DatStatistics.TotalCount);
Assert.Equal(0, datFile.ItemsDB.DatStatistics.TotalCount);
@@ -28,7 +28,7 @@ namespace SabreTools.DatFiles.Test
Models.Metadata.MetadataFile? item = new Models.Metadata.MetadataFile();
DatFile datFile = new Formats.Logiqx(null, useGame: false);
- datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false);
+ datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false, filterRunner: null);
Assert.Equal(0, datFile.Items.DatStatistics.TotalCount);
Assert.Equal(0, datFile.ItemsDB.DatStatistics.TotalCount);
@@ -46,7 +46,7 @@ namespace SabreTools.DatFiles.Test
};
DatFile datFile = new Formats.Logiqx(null, useGame: false);
- datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false);
+ datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false, filterRunner: null);
ValidateHeader(datFile.Header);
}
@@ -64,7 +64,7 @@ namespace SabreTools.DatFiles.Test
};
DatFile datFile = new Formats.Logiqx(null, useGame: false);
- datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false);
+ datFile.ConvertFromMetadata(item, "filename", indexId: 0, keep: true, statsOnly: false, filterRunner: null);
// TODO: Reenable when ItemsDB is used again
// DatItems.Machine actualMachine = Assert.Single(datFile.ItemsDB.GetMachines()).Value;
diff --git a/SabreTools.DatFiles.Test/FormatsTests.cs b/SabreTools.DatFiles.Test/FormatsTests.cs
index 5a774cb2..1d1dec6d 100644
--- a/SabreTools.DatFiles.Test/FormatsTests.cs
+++ b/SabreTools.DatFiles.Test/FormatsTests.cs
@@ -1,5 +1,6 @@
using System;
using System.Linq;
+using SabreTools.Core.Filter;
using SabreTools.DatFiles.Formats;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
diff --git a/SabreTools.DatFiles/DatFile.FromMetadata.cs b/SabreTools.DatFiles/DatFile.FromMetadata.cs
index 611e005b..9cec6329 100644
--- a/SabreTools.DatFiles/DatFile.FromMetadata.cs
+++ b/SabreTools.DatFiles/DatFile.FromMetadata.cs
@@ -1,4 +1,8 @@
+#if NET40_OR_GREATER || NETCOREAPP
+using System.Threading.Tasks;
+#endif
using SabreTools.Core;
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -17,7 +21,13 @@ namespace SabreTools.DatFiles
/// Index ID for the DAT
/// True if full pathnames are to be kept, false otherwise
/// True to only add item statistics while parsing, false otherwise
- internal void ConvertFromMetadata(Models.Metadata.MetadataFile? item, string filename, int indexId, bool keep, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ internal void ConvertFromMetadata(Models.Metadata.MetadataFile? item,
+ string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly,
+ FilterRunner? filterRunner)
{
// If the metadata file is invalid, we can't do anything
if (item == null || item.Count == 0)
@@ -35,7 +45,7 @@ namespace SabreTools.DatFiles
// Get the machines from the metadata
var machines = item.ReadItemArray(Models.Metadata.MetadataFile.MachineKey);
if (machines != null)
- ConvertMachines(machines, source, sourceIndex: 0, statsOnly);
+ ConvertMachines(machines, source, sourceIndex: 0, statsOnly, filterRunner);
}
///
@@ -175,17 +185,32 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ConvertMachines(Models.Metadata.Machine[]? items, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ConvertMachines(Models.Metadata.Machine[]? items,
+ Source source,
+ long sourceIndex,
+ bool statsOnly,
+ FilterRunner? filterRunner)
{
// If the array is invalid, we can't do anything
if (items == null || items.Length == 0)
return;
// Loop through the machines and add
+ #if NET452_OR_GREATER || NETCOREAPP
+ Parallel.ForEach(items, Core.Globals.ParallelOptions, machine =>
+#elif NET40_OR_GREATER
+ Parallel.ForEach(items, machine =>
+#else
foreach (var machine in items)
+#endif
{
- ConvertMachine(machine, source, sourceIndex, statsOnly);
+ ConvertMachine(machine, source, sourceIndex, statsOnly, filterRunner);
+#if NET40_OR_GREATER || NETCOREAPP
+ });
+#else
}
+#endif
}
///
@@ -195,12 +220,21 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ConvertMachine(Models.Metadata.Machine? item, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ConvertMachine(Models.Metadata.Machine? item,
+ Source source,
+ long sourceIndex,
+ bool statsOnly,
+ FilterRunner? filterRunner)
{
// If the machine is invalid, we can't do anything
if (item == null || item.Count == 0)
return;
+ // If the machine doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ return;
+
// Create an internal machine and add to the dictionary
var machine = new Machine(item);
// long machineIndex = AddMachineDB(machine);
@@ -209,138 +243,138 @@ namespace SabreTools.DatFiles
if (item.ContainsKey(Models.Metadata.Machine.AdjusterKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.AdjusterKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.ArchiveKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.ArchiveKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.BiosSetKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.BiosSetKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.ChipKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.ChipKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.ConfigurationKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.ConfigurationKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DeviceKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DeviceKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DeviceRefKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DeviceRefKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DipSwitchKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DipSwitchKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DiskKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DiskKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DisplayKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DisplayKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DriverKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DriverKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.DumpKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.DumpKey);
string? machineName = machine.GetName();
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, machineName);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, machineName, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.FeatureKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.FeatureKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.InfoKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.InfoKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.InputKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.InputKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.MediaKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.MediaKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.PartKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.PartKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.PortKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.PortKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.RamOptionKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.RamOptionKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.ReleaseKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.ReleaseKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.RomKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.RomKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.SampleKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.SampleKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.SharedFeatKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.SharedFeatKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.SlotKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.SlotKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.SoftwareListKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.SoftwareListKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.SoundKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.SoundKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
if (item.ContainsKey(Models.Metadata.Machine.VideoKey))
{
var items = item.ReadItemArray(Models.Metadata.Machine.VideoKey);
- ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly);
+ ProcessItems(items, machine, machineIndex: 0, source, sourceIndex, statsOnly, filterRunner);
}
}
@@ -353,7 +387,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Adjuster[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Adjuster[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -362,12 +397,16 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Adjuster(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
AddItem(datItem, statsOnly);
- //AddItemDB(datItem, machineIndex, sourceIndex, statsOnly);
+ //AddItemDB(datItem, machineIndex, sourceIndex, statsOnly, filterRunner);
}
}
@@ -380,7 +419,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Archive[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Archive[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -389,6 +429,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Archive(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -407,7 +451,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.BiosSet[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.BiosSet[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -416,6 +461,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new BiosSet(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -434,7 +483,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Chip[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Chip[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -443,6 +493,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Chip(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -461,7 +515,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Configuration[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Configuration[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -470,6 +525,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Configuration(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -488,7 +547,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Device[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Device[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -497,6 +557,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Device(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -515,7 +579,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.DeviceRef[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.DeviceRef[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -524,6 +589,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new DeviceRef(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -542,7 +611,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.DipSwitch[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.DipSwitch[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -551,6 +621,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new DipSwitch(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -569,7 +643,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Disk[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Disk[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -578,6 +653,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Disk(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -596,7 +675,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Display[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Display[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -605,6 +685,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Display(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -623,7 +707,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Driver[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Driver[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -632,6 +717,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Driver(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -651,8 +740,9 @@ namespace SabreTools.DatFiles
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
/// Machine name to use when constructing item names
+ /// Optional FilterRunner to filter items on parse
/// TODO: Convert this into a constructor in Rom
- private void ProcessItems(Models.Metadata.Dump[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, string? machineName)
+ private void ProcessItems(Models.Metadata.Dump[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, string? machineName, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -685,6 +775,10 @@ namespace SabreTools.DatFiles
continue;
}
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(rom!))
+ continue;
+
string name = $"{machineName}_{index++}{(!string.IsNullOrEmpty(rom!.ReadString(Models.Metadata.Rom.RemarkKey)) ? $" {rom.ReadString(Models.Metadata.Rom.RemarkKey)}" : string.Empty)}";
var datItem = new Rom();
@@ -743,7 +837,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Feature[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Feature[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -752,6 +847,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Feature(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -770,7 +869,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Info[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Info[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -779,6 +879,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Info(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -797,7 +901,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Input[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Input[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -806,6 +911,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Input(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -824,7 +933,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Media[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Media[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -833,6 +943,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Media(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -851,7 +965,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Part[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Part[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -875,6 +990,10 @@ namespace SabreTools.DatFiles
foreach (var rom in roms)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(rom))
+ continue;
+
var romItem = new Rom(rom);
romItem.SetFieldValue(Rom.DataAreaKey, dataAreaItem);
romItem.SetFieldValue(Rom.PartKey, partItem);
@@ -899,6 +1018,10 @@ namespace SabreTools.DatFiles
foreach (var disk in disks)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(disk))
+ continue;
+
var diskItem = new Disk(disk);
diskItem.SetFieldValue(Disk.DiskAreaKey, diskAreaitem);
diskItem.SetFieldValue(Disk.PartKey, partItem);
@@ -916,6 +1039,10 @@ namespace SabreTools.DatFiles
{
foreach (var dipSwitch in dipSwitches)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(dipSwitch))
+ continue;
+
var dipSwitchItem = new DipSwitch(dipSwitch);
dipSwitchItem.SetFieldValue(DipSwitch.PartKey, partItem);
dipSwitchItem.SetFieldValue(DatItem.SourceKey, source);
@@ -931,6 +1058,10 @@ namespace SabreTools.DatFiles
{
foreach (var partFeature in partFeatures)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(partFeature))
+ continue;
+
var partFeatureItem = new PartFeature(partFeature);
partFeatureItem.SetFieldValue(DipSwitch.PartKey, partItem);
partFeatureItem.SetFieldValue(DatItem.SourceKey, source);
@@ -952,7 +1083,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Port[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Port[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -961,6 +1093,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Port(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -979,7 +1115,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.RamOption[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.RamOption[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -988,6 +1125,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new RamOption(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1006,7 +1147,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Release[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Release[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1015,6 +1157,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Release(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1033,7 +1179,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Rom[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Rom[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1042,6 +1189,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Rom(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1060,7 +1211,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Sample[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Sample[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1069,6 +1221,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Sample(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1087,7 +1243,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.SharedFeat[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.SharedFeat[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1096,6 +1253,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new SharedFeat(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1114,7 +1275,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Slot[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Slot[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1123,6 +1285,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Slot(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1141,7 +1307,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.SoftwareList[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.SoftwareList[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1150,10 +1317,18 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new SoftwareList(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
AddItem(datItem, statsOnly);
// AddItemDB(datItem, machineIndex, sourceIndex, statsOnly);
}
@@ -1168,7 +1343,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Sound[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Sound[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1177,6 +1353,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Sound(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
@@ -1195,7 +1375,8 @@ namespace SabreTools.DatFiles
/// Source to use with the converted items
/// Index of the Source to use with the converted items
/// True to only add item statistics while parsing, false otherwise
- private void ProcessItems(Models.Metadata.Video[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly)
+ /// Optional FilterRunner to filter items on parse
+ private void ProcessItems(Models.Metadata.Video[]? items, Machine machine, long machineIndex, Source source, long sourceIndex, bool statsOnly, FilterRunner? filterRunner)
{
// If the array is null or empty, return without processing
if (items == null || items.Length == 0)
@@ -1204,6 +1385,10 @@ namespace SabreTools.DatFiles
// Loop through the items and add
foreach (var item in items)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !filterRunner.Run(item))
+ continue;
+
var datItem = new Display(item);
datItem.SetFieldValue(DatItem.SourceKey, source);
datItem.CopyMachineInformation(machine);
diff --git a/SabreTools.DatFiles/DatFile.cs b/SabreTools.DatFiles/DatFile.cs
index 88599c8a..3b17deaf 100644
--- a/SabreTools.DatFiles/DatFile.cs
+++ b/SabreTools.DatFiles/DatFile.cs
@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Xml.Serialization;
using Newtonsoft.Json;
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -427,10 +428,16 @@ namespace SabreTools.DatFiles
///
/// Name of the file to be parsed
/// Index ID for the DAT
- /// True if full pathnames are to be kept, false otherwise (default)
+ /// True if full pathnames are to be kept, false otherwise
/// True to only add item statistics while parsing, false otherwise
+ /// Optional FilterRunner to filter items on parse
/// True if the error that is thrown should be thrown back to the caller, false otherwise
- public abstract void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false);
+ public abstract void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false);
#endregion
diff --git a/SabreTools.DatFiles/Formats/ClrMamePro.cs b/SabreTools.DatFiles/Formats/ClrMamePro.cs
index 54c88782..567127be 100644
--- a/SabreTools.DatFiles/Formats/ClrMamePro.cs
+++ b/SabreTools.DatFiles/Formats/ClrMamePro.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -43,7 +44,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
try
{
@@ -52,7 +58,7 @@ namespace SabreTools.DatFiles.Formats
var metadata = new Serialization.CrossModel.ClrMamePro().Serialize(metadataFile);
// Convert to the internal format
- ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly);
+ ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly, filterRunner);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools.DatFiles/Formats/Hashfile.cs b/SabreTools.DatFiles/Formats/Hashfile.cs
index 75a2900f..2dca6541 100644
--- a/SabreTools.DatFiles/Formats/Hashfile.cs
+++ b/SabreTools.DatFiles/Formats/Hashfile.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using SabreTools.Core.Filter;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Hashing;
@@ -27,7 +28,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
try
{
@@ -36,7 +42,7 @@ namespace SabreTools.DatFiles.Formats
var metadata = new Serialization.CrossModel.Hashfile().Serialize(hashfile);
// Convert to the internal format
- ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly);
+ ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly, filterRunner);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools.DatFiles/Formats/Listxml.cs b/SabreTools.DatFiles/Formats/Listxml.cs
index ea574e8a..8f8a3518 100644
--- a/SabreTools.DatFiles/Formats/Listxml.cs
+++ b/SabreTools.DatFiles/Formats/Listxml.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -220,7 +221,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
try
{
@@ -238,7 +244,7 @@ namespace SabreTools.DatFiles.Formats
}
// Convert to the internal format
- ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly);
+ ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly, filterRunner);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools.DatFiles/Formats/Missfile.cs b/SabreTools.DatFiles/Formats/Missfile.cs
index abb5304c..c9b776c1 100644
--- a/SabreTools.DatFiles/Formats/Missfile.cs
+++ b/SabreTools.DatFiles/Formats/Missfile.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
+using SabreTools.Core.Filter;
using SabreTools.DatItems;
namespace SabreTools.DatFiles.Formats
@@ -26,7 +27,12 @@ namespace SabreTools.DatFiles.Formats
///
/// There is no consistent way to parse a missfile
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
throw new NotImplementedException();
}
diff --git a/SabreTools.DatFiles/Formats/SabreJSON.cs b/SabreTools.DatFiles/Formats/SabreJSON.cs
index 98ce52a2..99684289 100644
--- a/SabreTools.DatFiles/Formats/SabreJSON.cs
+++ b/SabreTools.DatFiles/Formats/SabreJSON.cs
@@ -6,6 +6,7 @@ using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -32,7 +33,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
// Prepare all internal variables
var fs = System.IO.File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
@@ -68,7 +74,7 @@ namespace SabreTools.DatFiles.Formats
// Machine array
case "machines":
- ReadMachines(jtr, statsOnly, source, sourceIndex: 0);
+ ReadMachines(jtr, statsOnly, source, sourceIndex: 0, filterRunner);
jtr.Read();
break;
@@ -110,7 +116,8 @@ namespace SabreTools.DatFiles.Formats
/// True to only add item statistics while parsing, false otherwise
/// Source representing the DAT
/// Index of the Source representing the DAT
- private void ReadMachines(JsonTextReader jtr, bool statsOnly, Source source, long sourceIndex)
+ /// Optional FilterRunner to filter items on parse
+ private void ReadMachines(JsonTextReader jtr, bool statsOnly, Source source, long sourceIndex, FilterRunner? filterRunner)
{
// If the reader is invalid, skip
if (jtr == null)
@@ -124,7 +131,7 @@ namespace SabreTools.DatFiles.Formats
// Loop through each machine object and process
foreach (JObject machineObj in machineArray)
{
- ReadMachine(machineObj, statsOnly, source, sourceIndex);
+ ReadMachine(machineObj, statsOnly, source, sourceIndex, filterRunner);
}
}
@@ -135,7 +142,8 @@ namespace SabreTools.DatFiles.Formats
/// True to only add item statistics while parsing, false otherwise
/// Source representing the DAT
/// Index of the Source representing the DAT
- private void ReadMachine(JObject machineObj, bool statsOnly, Source source, long sourceIndex)
+ /// Optional FilterRunner to filter items on parse
+ private void ReadMachine(JObject machineObj, bool statsOnly, Source source, long sourceIndex, FilterRunner? filterRunner)
{
// If object is invalid, skip it
if (machineObj == null)
@@ -148,6 +156,10 @@ namespace SabreTools.DatFiles.Formats
if (machineObj.ContainsKey("machine"))
machine = machineObj["machine"]?.ToObject();
+ // If the machine doesn't pass the filter
+ if (machine != null && filterRunner != null && !machine.PassesFilter(filterRunner))
+ return;
+
// Add the machine to the dictionary
// long machineIndex = -1;
// if (machine != null)
@@ -155,7 +167,15 @@ namespace SabreTools.DatFiles.Formats
// Read items, if possible
if (machineObj.ContainsKey("items"))
- ReadItems(machineObj["items"] as JArray, statsOnly, source, sourceIndex, machine, machineIndex: 0);
+ {
+ ReadItems(machineObj["items"] as JArray,
+ statsOnly,
+ source,
+ sourceIndex,
+ machine,
+ machineIndex: 0,
+ filterRunner);
+ }
}
///
@@ -167,6 +187,7 @@ namespace SabreTools.DatFiles.Formats
/// Index of the Source representing the DAT
/// Machine information to add to the parsed items
/// Index of the Machine to add to the parsed items
+ /// Optional FilterRunner to filter items on parse
private void ReadItems(
JArray? itemsArr,
bool statsOnly,
@@ -177,7 +198,8 @@ namespace SabreTools.DatFiles.Formats
// Miscellaneous
Machine? machine,
- long machineIndex)
+ long machineIndex,
+ FilterRunner? filterRunner)
{
// If the array is invalid, skip
if (itemsArr == null)
@@ -186,7 +208,7 @@ namespace SabreTools.DatFiles.Formats
// Loop through each datitem object and process
foreach (JObject itemObj in itemsArr)
{
- ReadItem(itemObj, statsOnly, source, sourceIndex, machine, machineIndex);
+ ReadItem(itemObj, statsOnly, source, sourceIndex, machine, machineIndex, filterRunner);
}
}
@@ -199,6 +221,7 @@ namespace SabreTools.DatFiles.Formats
/// Index of the Source representing the DAT
/// Machine information to add to the parsed items
/// Index of the Machine to add to the parsed items
+ /// Optional FilterRunner to filter items on parse
private void ReadItem(
JObject itemObj,
bool statsOnly,
@@ -209,7 +232,8 @@ namespace SabreTools.DatFiles.Formats
// Miscellaneous
Machine? machine,
- long machineIndex)
+ long machineIndex,
+ FilterRunner? filterRunner)
{
// If we have an empty item, skip it
if (itemObj == null)
@@ -359,6 +383,10 @@ namespace SabreTools.DatFiles.Formats
// If we got a valid datitem, copy machine info and add
if (datItem != null)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !datItem.PassesFilter(filterRunner))
+ return;
+
datItem.CopyMachineInformation(machine);
datItem.SetFieldValue(DatItem.SourceKey, source);
AddItem(datItem, statsOnly);
diff --git a/SabreTools.DatFiles/Formats/SabreXML.cs b/SabreTools.DatFiles/Formats/SabreXML.cs
index c32dcdb3..2acf9df1 100644
--- a/SabreTools.DatFiles/Formats/SabreXML.cs
+++ b/SabreTools.DatFiles/Formats/SabreXML.cs
@@ -5,6 +5,7 @@ using System.Text;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
+using SabreTools.Core.Filter;
using SabreTools.DatItems;
namespace SabreTools.DatFiles.Formats
@@ -29,7 +30,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
// Prepare all internal variables
XmlReader? xtr = XmlReader.Create(filename, new XmlReaderSettings
@@ -73,7 +79,7 @@ namespace SabreTools.DatFiles.Formats
break;
case "directory":
- ReadDirectory(xtr.ReadSubtree(), statsOnly, source, sourceIndex);
+ ReadDirectory(xtr.ReadSubtree(), statsOnly, source, sourceIndex, filterRunner);
// Skip the directory node now that we've processed it
xtr.Read();
@@ -104,7 +110,12 @@ namespace SabreTools.DatFiles.Formats
/// True to only add item statistics while parsing, false otherwise
/// Source representing the DAT
/// Index of the Source representing the DAT
- private void ReadDirectory(XmlReader xtr, bool statsOnly, Source source, long sourceIndex)
+ /// Optional FilterRunner to filter items on parse
+ private void ReadDirectory(XmlReader xtr,
+ bool statsOnly,
+ Source source,
+ long sourceIndex,
+ FilterRunner? filterRunner)
{
// If the reader is invalid, skip
if (xtr == null)
@@ -130,6 +141,11 @@ namespace SabreTools.DatFiles.Formats
case "machine":
XmlSerializer xs = new(typeof(Machine));
machine = xs?.Deserialize(xtr.ReadSubtree()) as Machine;
+
+ // If the machine doesn't pass the filter
+ if (machine != null && filterRunner != null && !machine.PassesFilter(filterRunner))
+ machine = null;
+
if (machine != null)
machineIndex = AddMachineDB(machine);
@@ -137,7 +153,13 @@ namespace SabreTools.DatFiles.Formats
break;
case "files":
- ReadFiles(xtr.ReadSubtree(), machine, machineIndex, statsOnly, source, sourceIndex);
+ ReadFiles(xtr.ReadSubtree(),
+ machine,
+ machineIndex,
+ statsOnly,
+ source,
+ sourceIndex,
+ filterRunner);
// Skip the directory node now that we've processed it
xtr.Read();
@@ -158,7 +180,14 @@ namespace SabreTools.DatFiles.Formats
/// True to only add item statistics while parsing, false otherwise
/// Source representing the DAT
/// Index of the Source representing the DAT
- private void ReadFiles(XmlReader xtr, Machine? machine, long machineIndex, bool statsOnly, Source source, long sourceIndex)
+ /// Optional FilterRunner to filter items on parse
+ private void ReadFiles(XmlReader xtr,
+ Machine? machine,
+ long machineIndex,
+ bool statsOnly,
+ Source source,
+ long sourceIndex,
+ FilterRunner? filterRunner)
{
// If the reader is invalid, skip
if (xtr == null)
@@ -181,6 +210,13 @@ namespace SabreTools.DatFiles.Formats
XmlSerializer xs = new(typeof(DatItem));
if (xs.Deserialize(xtr.ReadSubtree()) is DatItem item)
{
+ // If the item doesn't pass the filter
+ if (filterRunner != null && !item.PassesFilter(filterRunner))
+ {
+ xtr.Skip();
+ break;
+ }
+
item.CopyMachineInformation(machine);
item.SetFieldValue(DatItem.SourceKey, source);
AddItem(item, statsOnly);
diff --git a/SabreTools.DatFiles/Formats/SeparatedValue.cs b/SabreTools.DatFiles/Formats/SeparatedValue.cs
index aa2c58c9..ab148cc5 100644
--- a/SabreTools.DatFiles/Formats/SeparatedValue.cs
+++ b/SabreTools.DatFiles/Formats/SeparatedValue.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using SabreTools.Core.Filter;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
@@ -36,7 +37,12 @@ namespace SabreTools.DatFiles.Formats
}
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
try
{
@@ -45,7 +51,7 @@ namespace SabreTools.DatFiles.Formats
var metadata = new Serialization.CrossModel.SeparatedValue().Serialize(metadataFile);
// Convert to the internal format
- ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly);
+ ConvertFromMetadata(metadata, filename, indexId, keep, statsOnly, filterRunner);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools.DatFiles/Formats/SerializableDatFile.cs b/SabreTools.DatFiles/Formats/SerializableDatFile.cs
index 228af7dc..63e7f58e 100644
--- a/SabreTools.DatFiles/Formats/SerializableDatFile.cs
+++ b/SabreTools.DatFiles/Formats/SerializableDatFile.cs
@@ -1,4 +1,5 @@
using System;
+using SabreTools.Core.Filter;
using SabreTools.Models.Metadata;
using SabreTools.Serialization.Interfaces;
@@ -20,7 +21,12 @@ namespace SabreTools.DatFiles.Formats
protected SerializableDatFile(DatFile? datFile) : base(datFile) { }
///
- public override void ParseFile(string filename, int indexId, bool keep, bool statsOnly = false, bool throwOnError = false)
+ public override void ParseFile(string filename,
+ int indexId,
+ bool keep,
+ bool statsOnly = false,
+ FilterRunner? filterRunner = null,
+ bool throwOnError = false)
{
try
{
@@ -29,7 +35,7 @@ namespace SabreTools.DatFiles.Formats
var internalFormat = Activator.CreateInstance().Serialize(specificFormat);
// Convert to the internal format
- ConvertFromMetadata(internalFormat, filename, indexId, keep, statsOnly);
+ ConvertFromMetadata(internalFormat, filename, indexId, keep, statsOnly, filterRunner);
}
catch (Exception ex) when (!throwOnError)
{
diff --git a/SabreTools.DatTools.Test/ParserTests.cs b/SabreTools.DatTools.Test/ParserTests.cs
index 9a8bd55d..fed30f7a 100644
--- a/SabreTools.DatTools.Test/ParserTests.cs
+++ b/SabreTools.DatTools.Test/ParserTests.cs
@@ -1,5 +1,6 @@
using System;
using System.IO;
+using SabreTools.Core.Filter;
using SabreTools.DatFiles;
using SabreTools.Reports;
using Xunit;
@@ -227,7 +228,7 @@ namespace SabreTools.DatTools.Test
if (filename != null)
filename = Path.Combine(Environment.CurrentDirectory, "TestData", filename);
- var datFile = Parser.ParseStatistics(filename, throwOnError: true);
+ var datFile = Parser.ParseStatistics(filename, new FilterRunner(Array.Empty()), throwOnError: true);
Assert.Equal(datFormat, datFile.Header.GetFieldValue(DatHeader.DatFormatKey));
Assert.Equal(totalCount, datFile.Items.DatStatistics.TotalCount);
//Assert.Equal(totalCount, datFile.ItemsDB.DatStatistics.TotalCount);
diff --git a/SabreTools.DatTools/Parser.cs b/SabreTools.DatTools/Parser.cs
index 7ff79c46..a1b697c2 100644
--- a/SabreTools.DatTools/Parser.cs
+++ b/SabreTools.DatTools/Parser.cs
@@ -5,6 +5,7 @@ using System.Text.RegularExpressions;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
+using SabreTools.Core.Filter;
using SabreTools.Core.Tools;
using SabreTools.DatFiles;
using SabreTools.DatFiles.Formats;
@@ -105,6 +106,7 @@ namespace SabreTools.DatTools
/// True if full pathnames are to be kept, false otherwise (default)
/// True if original extension should be kept, false otherwise (default)
/// True to only add item statistics while parsing, false otherwise
+ /// Optional FilterRunner to filter items on parse
/// True if the error that is thrown should be thrown back to the caller, false otherwise
public static void ParseInto(
DatFile datFile,
@@ -113,6 +115,7 @@ namespace SabreTools.DatTools
bool keep = false,
bool keepext = false,
bool statsOnly = false,
+ FilterRunner? filterRunner = null,
bool throwOnError = false)
{
// Check the file extension first as a safeguard
@@ -140,7 +143,12 @@ namespace SabreTools.DatTools
try
{
DatFile parsingDatFile = CreateDatFile(datFormat, datFile);
- parsingDatFile.ParseFile(filename, indexId, keep, statsOnly: statsOnly, throwOnError: throwOnError);
+ parsingDatFile.ParseFile(filename,
+ indexId,
+ keep,
+ statsOnly: statsOnly,
+ filterRunner: filterRunner,
+ throwOnError: throwOnError);
}
catch (Exception ex) when (!throwOnError)
{
@@ -155,12 +163,13 @@ namespace SabreTools.DatTools
///
/// Name of the file to be parsed
/// True if the error that is thrown should be thrown back to the caller, false otherwise
+ /// Optional FilterRunner to filter items on parse
///
/// Code must remove the existing format in order to ensure the format is derived
/// from the input file instead. This should be addressed later by either always
/// deriving the format, or by setting a flag for this to be done automatically.
//
- public static DatFile ParseStatistics(string? filename, bool throwOnError = false)
+ public static DatFile ParseStatistics(string? filename, FilterRunner? filterRunner = null, bool throwOnError = false)
{
// Null filenames are invalid
if (filename == null)
@@ -172,7 +181,7 @@ namespace SabreTools.DatTools
DatFile datFile = CreateDatFile();
datFile.Header.RemoveField(DatHeader.DatFormatKey);
- ParseInto(datFile, filename, statsOnly: true, throwOnError: throwOnError);
+ ParseInto(datFile, filename, statsOnly: true, filterRunner: filterRunner, throwOnError: throwOnError);
return datFile;
}
@@ -181,11 +190,12 @@ namespace SabreTools.DatTools
///
/// Current DatFile object to use for updating
/// Paths to DATs to parse
+ /// Optional FilterRunner to filter items on parse
/// List of DatHeader objects representing headers
- public static List PopulateUserData(DatFile datFile, List inputs)
+ public static List PopulateUserData(DatFile datFile, List inputs, FilterRunner? filterRunner = null)
{
List paths = inputs.ConvertAll(i => new ParentablePath(i));
- return PopulateUserData(datFile, paths);
+ return PopulateUserData(datFile, paths, filterRunner);
}
///
@@ -193,8 +203,9 @@ namespace SabreTools.DatTools
///
/// Current DatFile object to use for updating
/// Paths to DATs to parse
+ /// Optional FilterRunner to filter items on parse
/// List of DatHeader objects representing headers
- public static List PopulateUserData(DatFile datFile, List inputs)
+ public static List PopulateUserData(DatFile datFile, List inputs, FilterRunner? filterRunner = null)
{
DatFile[] datFiles = new DatFile[inputs.Count];
InternalStopwatch watch = new("Processing individual DATs");
@@ -211,7 +222,7 @@ namespace SabreTools.DatTools
var input = inputs[i];
_staticLogger.User($"Adding DAT: {input.CurrentPath}");
datFiles[i] = CreateDatFile(datFile.Header.CloneFormat(), datFile.Modifiers);
- ParseInto(datFiles[i], input.CurrentPath, i, keep: true);
+ ParseInto(datFiles[i], input.CurrentPath, indexId: i, keep: true, filterRunner: filterRunner);
#if NET40_OR_GREATER || NETCOREAPP
});
#else
diff --git a/SabreTools.DatTools/Statistics.cs b/SabreTools.DatTools/Statistics.cs
index 2b726ce8..d01152de 100644
--- a/SabreTools.DatTools/Statistics.cs
+++ b/SabreTools.DatTools/Statistics.cs
@@ -6,6 +6,7 @@ using System.Net;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
+using SabreTools.Core.Filter;
using SabreTools.DatFiles;
using SabreTools.IO;
using SabreTools.IO.Extensions;
@@ -75,8 +76,8 @@ namespace SabreTools.DatTools
InternalStopwatch watch = new($"Collecting statistics for '{file.CurrentPath}'");
- List machines = [];
- DatFile datdata = Parser.ParseStatistics(file.CurrentPath, throwOnError: throwOnError);
+ FilterRunner filterRunner = new FilterRunner(new string[0]);
+ DatFile datdata = Parser.ParseStatistics(file.CurrentPath, filterRunner, throwOnError);
// Add single DAT stats (if asked)
if (single)
diff --git a/SabreTools/Features/Batch.cs b/SabreTools/Features/Batch.cs
index b0c16e7d..46f38109 100644
--- a/SabreTools/Features/Batch.cs
+++ b/SabreTools/Features/Batch.cs
@@ -526,7 +526,9 @@ Reset the internal state: reset();";
// Assume there could be multiple
foreach (ParentablePath datFilePath in datFilePaths)
{
- Parser.ParseInto(batchState.DatFile, datFilePath.CurrentPath, batchState.Index++);
+ Parser.ParseInto(batchState.DatFile,
+ datFilePath.CurrentPath,
+ indexId: batchState.Index++);
}
}
}
diff --git a/SabreTools/Features/Sort.cs b/SabreTools/Features/Sort.cs
index de9c5896..167c22fa 100644
--- a/SabreTools/Features/Sort.cs
+++ b/SabreTools/Features/Sort.cs
@@ -94,7 +94,11 @@ namespace SabreTools.Features
foreach (ParentablePath datfile in datfilePaths)
{
DatFile datdata = Parser.CreateDatFile();
- Parser.ParseInto(datdata, datfile.CurrentPath, int.MaxValue, keep: true);
+ Parser.ParseInto(datdata,
+ datfile.CurrentPath,
+ indexId: int.MaxValue,
+ keep: true,
+ filterRunner: FilterRunner);
// Skip if nothing was parsed
if (datdata.DatStatistics.TotalCount == 0) // datdata.ItemsDB.SortedKeys.Length == 0
@@ -136,7 +140,11 @@ namespace SabreTools.Features
DatFile datdata = Parser.CreateDatFile();
foreach (ParentablePath datfile in datfilePaths)
{
- Parser.ParseInto(datdata, datfile.CurrentPath, int.MaxValue, keep: true);
+ Parser.ParseInto(datdata,
+ datfile.CurrentPath,
+ indexId: int.MaxValue,
+ keep: true,
+ filterRunner: FilterRunner);
}
// Set depot information
diff --git a/SabreTools/Features/Split.cs b/SabreTools/Features/Split.cs
index 9d9be53d..0d4eefff 100644
--- a/SabreTools/Features/Split.cs
+++ b/SabreTools/Features/Split.cs
@@ -66,7 +66,7 @@ namespace SabreTools.Features
{
// Create and fill the new DAT
DatFile internalDat = Parser.CreateDatFile(Header!, Modifiers!);
- Parser.ParseInto(internalDat, file.CurrentPath);
+ Parser.ParseInto(internalDat, file.CurrentPath, filterRunner: FilterRunner);
// Get the output directory
OutputDir = OutputDir.Ensure();
diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs
index 0be6acc7..e2f4307c 100644
--- a/SabreTools/Features/Update.cs
+++ b/SabreTools/Features/Update.cs
@@ -300,7 +300,11 @@ namespace SabreTools.Features
{
// Parse the path to a new DatFile
DatFile repDat = Parser.CreateDatFile(Header!, Modifiers);
- Parser.ParseInto(repDat, inputPath.CurrentPath, indexId: 1, keep: true);
+ Parser.ParseInto(repDat,
+ inputPath.CurrentPath,
+ indexId: 1,
+ keep: true,
+ filterRunner: FilterRunner);
// Perform additional processing steps
AdditionalProcessing(repDat);
@@ -336,7 +340,11 @@ namespace SabreTools.Features
{
// Parse the path to a new DatFile
DatFile repDat = Parser.CreateDatFile(Header!, Modifiers);
- Parser.ParseInto(repDat, inputPath.CurrentPath, indexId: 1, keep: true);
+ Parser.ParseInto(repDat,
+ inputPath.CurrentPath,
+ indexId: 1,
+ keep: true,
+ filterRunner: FilterRunner);
// Perform additional processing steps
AdditionalProcessing(repDat);
@@ -474,7 +482,11 @@ namespace SabreTools.Features
// Clear format and parse
datFile.Header.RemoveField(DatHeader.DatFormatKey);
- Parser.ParseInto(datFile, inputPath.CurrentPath, keep: true, keepext: isSeparatedFile);
+ Parser.ParseInto(datFile,
+ inputPath.CurrentPath,
+ keep: true,
+ keepext: isSeparatedFile,
+ filterRunner: FilterRunner);
datFile.Header.SetFieldValue(DatHeader.DatFormatKey, currentFormat);
// Perform additional processing steps
@@ -500,16 +512,16 @@ namespace SabreTools.Features
/// Set of base paths
/// DatFile to parse into
/// List of DatHeader values representing the parsed files
- private static List GetDatHeaders(UpdateMode updateMode, List inputPaths, List basePaths, DatFile userInputDat)
+ private List GetDatHeaders(UpdateMode updateMode, List inputPaths, List basePaths, DatFile userInputDat)
{
#if NET20 || NET35
if ((updateMode & UpdateMode.DiffAgainst) != 0 || (updateMode & UpdateMode.BaseReplace) != 0)
#else
if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
#endif
- return Parser.PopulateUserData(userInputDat, basePaths);
+ return Parser.PopulateUserData(userInputDat, basePaths, FilterRunner!);
else
- return Parser.PopulateUserData(userInputDat, inputPaths);
+ return Parser.PopulateUserData(userInputDat, inputPaths, FilterRunner!);
}
///
diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs
index 4cc36aad..7db4da1d 100644
--- a/SabreTools/Features/Verify.cs
+++ b/SabreTools/Features/Verify.cs
@@ -63,7 +63,11 @@ namespace SabreTools.Features
{
// Parse in from the file
DatFile datdata = Parser.CreateDatFile();
- Parser.ParseInto(datdata, datfile.CurrentPath, int.MaxValue, keep: true);
+ Parser.ParseInto(datdata,
+ datfile.CurrentPath,
+ indexId: int.MaxValue,
+ keep: true,
+ filterRunner: FilterRunner!);
// Perform additional processing steps
Extras!.ApplyExtras(datdata);
@@ -114,7 +118,11 @@ namespace SabreTools.Features
DatFile datdata = Parser.CreateDatFile();
foreach (ParentablePath datfile in datfilePaths)
{
- Parser.ParseInto(datdata, datfile.CurrentPath, int.MaxValue, keep: true);
+ Parser.ParseInto(datdata,
+ datfile.CurrentPath,
+ int.MaxValue,
+ keep: true,
+ filterRunner: FilterRunner);
}
// Perform additional processing steps