diff --git a/RombaSharp/Features/Diffdat.cs b/RombaSharp/Features/Diffdat.cs index 74dd5db7..25dd7731 100644 --- a/RombaSharp/Features/Diffdat.cs +++ b/RombaSharp/Features/Diffdat.cs @@ -67,8 +67,8 @@ in -old DAT file. Ignores those entries in -old that are not in -new."; List basedats = new List { olddat }; // Now run the diff on the inputs - datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, - new Filter(), new List(), false /* onlySame */, false /* byGame */); + datfile.PopulateUserData(basedats, new Filter()); + datfile.DiffAgainst(dats, outdat, false, new Filter(), false); } } } diff --git a/RombaSharp/Features/DisplayHelp.cs b/RombaSharp/Features/DisplayHelp.cs index 444bd75d..197a6f5c 100644 --- a/RombaSharp/Features/DisplayHelp.cs +++ b/RombaSharp/Features/DisplayHelp.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace RombaSharp.Features diff --git a/RombaSharp/Features/DisplayHelpDetailed.cs b/RombaSharp/Features/DisplayHelpDetailed.cs index 487356f5..22fc6761 100644 --- a/RombaSharp/Features/DisplayHelpDetailed.cs +++ b/RombaSharp/Features/DisplayHelpDetailed.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace RombaSharp.Features diff --git a/RombaSharp/Features/EDiffdat.cs b/RombaSharp/Features/EDiffdat.cs index f2f4e535..a222b425 100644 --- a/RombaSharp/Features/EDiffdat.cs +++ b/RombaSharp/Features/EDiffdat.cs @@ -60,8 +60,8 @@ namespace RombaSharp.Features List basedats = new List { olddat }; // Now run the diff on the inputs - datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */, - new Filter(), new List(), false /* onlySame */, false /* byGame */); + datfile.PopulateUserData(basedats, new Filter()); + datfile.DiffAgainst(dats, outdat, false, new Filter(), false); } } } diff --git a/RombaSharp/Features/Export.cs b/RombaSharp/Features/Export.cs index 000275e0..28a13139 100644 --- a/RombaSharp/Features/Export.cs +++ b/RombaSharp/Features/Export.cs @@ -1,7 +1,6 @@ using System.Collections.Generic; using System.IO; -using SabreTools.Library.Data; using SabreTools.Library.Help; using SabreTools.Library.Tools; using Microsoft.Data.Sqlite; diff --git a/RombaSharp/Features/Script.cs b/RombaSharp/Features/Script.cs index e388a23f..85f48f9e 100644 --- a/RombaSharp/Features/Script.cs +++ b/RombaSharp/Features/Script.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace RombaSharp.Features diff --git a/SabreTools.Library/DatFiles/DatFile.cs b/SabreTools.Library/DatFiles/DatFile.cs index fffd738b..699ff473 100644 --- a/SabreTools.Library/DatFiles/DatFile.cs +++ b/SabreTools.Library/DatFiles/DatFile.cs @@ -191,158 +191,6 @@ namespace SabreTools.Library.DatFiles #endregion - // TODO: Move to features? - #region Determination Helpers - - /// - /// Split a set of input DATs based on the given information - /// - /// List of inputs to be used - /// Output directory for the split files - /// True if files should be written to the source folders, false otherwise - /// Type of split to perform, if any - /// First extension to split on (Extension Split only) - /// Second extension to split on (Extension Split only) - /// True if short filenames should be used, false otherwise (Level Split only) - /// True if original filenames should be used as the base for output filename, false otherwise (Level Split only) - /// Long value representing the split point (Size Split only) - public void DetermineSplitType( - List inputs, - string outDir, - bool inplace, - SplittingMode splittingMode, - List exta, - List extb, - bool shortname, - bool basedat, - long radix) - { - // If we somehow have the "none" split type, return - if (splittingMode == SplittingMode.None) - return; - - // Get only files from the inputs - List files = DirectoryExtensions.GetFilesOnly(inputs, appendparent: true); - - // Loop over the input files - foreach (ParentablePath file in files) - { - // Create and fill the new DAT - DatFile internalDat = Create(Header); - Parse(file); - - // Get the output directory - outDir = file.GetOutputPath(outDir, inplace); - - // Split and write the DAT - if (splittingMode.HasFlag(SplittingMode.Extension)) - internalDat.SplitByExtension(outDir, exta, extb); - - if (splittingMode.HasFlag(SplittingMode.Hash)) - internalDat.SplitByHash(outDir); - - if (splittingMode.HasFlag(SplittingMode.Level)) - internalDat.SplitByLevel(outDir, shortname, basedat); - - if (splittingMode.HasFlag(SplittingMode.Size)) - internalDat.SplitBySize(outDir, radix); - - if (splittingMode.HasFlag(SplittingMode.Type)) - internalDat.SplitByType(outDir); - } - } - - /// - /// Determine if input files should be merged, diffed, or processed invidually - /// - /// Names of the input files and/or folders - /// Names of base files and/or folders - /// Optional param for output directory - /// Non-zero flag for diffing mode, zero otherwise - /// True if the output files should overwrite their inputs, false otherwise - /// True if the first cascaded diff file should be skipped on output, false otherwise - /// Filter object to be passed to the DatItem level - /// List of Fields representing what should be updated [only for base replacement] - /// True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement] - /// True if diffing is by Game, false if diffing is by hash [only for against] - public void DetermineUpdateType( - List inputPaths, - List basePaths, - string outDir, - UpdateMode updateMode, - bool inplace, - bool skip, - Filter filter, - List updateFields, - bool onlySame, - bool byGame) - { - // Ensure we only have files in the inputs - List inputFileNames = DirectoryExtensions.GetFilesOnly(inputPaths, appendparent: true); - List baseFileNames = DirectoryExtensions.GetFilesOnly(basePaths); - - // If we're in standard update mode, run through all of the inputs - if (updateMode == UpdateMode.None) - { - Update(inputFileNames, outDir, inplace, filter); - return; - } - - // Reverse inputs if we're in a required mode - if (updateMode.HasFlag(UpdateMode.DiffReverseCascade)) - inputFileNames.Reverse(); - if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace)) - baseFileNames.Reverse(); - - // If we're in merging mode - if (updateMode.HasFlag(UpdateMode.Merge)) - { - // Populate the combined data and get the headers - PopulateUserData(inputFileNames, filter); - MergeNoDiff(inputFileNames, outDir); - } - - // If we have one of the standard diffing modes - else if (updateMode.HasFlag(UpdateMode.DiffDupesOnly) - || updateMode.HasFlag(UpdateMode.DiffNoDupesOnly) - || updateMode.HasFlag(UpdateMode.DiffIndividualsOnly)) - { - // Populate the combined data - PopulateUserData(inputFileNames, filter); - DiffNoCascade(inputFileNames, outDir, updateMode); - } - - // If we have one of the cascaded diffing modes - else if (updateMode.HasFlag(UpdateMode.DiffCascade) - || updateMode.HasFlag(UpdateMode.DiffReverseCascade)) - { - // Populate the combined data and get the headers - List datHeaders = PopulateUserData(inputFileNames, filter); - DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip); - } - - // If we have a diff against mode - else if (updateMode.HasFlag(UpdateMode.DiffAgainst)) - { - // Populate the combined data - PopulateUserData(baseFileNames, filter); - DiffAgainst(inputFileNames, outDir, inplace, filter, byGame); - } - - // If we have one of the base replacement modes - else if (updateMode.HasFlag(UpdateMode.BaseReplace) - || updateMode.HasFlag(UpdateMode.ReverseBaseReplace)) - { - // Populate the combined data - PopulateUserData(baseFileNames, filter); - BaseReplace(inputFileNames, outDir, inplace, filter, updateFields, onlySame); - } - - return; - } - - #endregion - #region Converting and Updating /// @@ -366,87 +214,6 @@ namespace SabreTools.Library.DatFiles BaseReplace(paths, outDir, inplace, filter, updateFields, onlySame); } - /// - /// Output diffs against a base set represented by the current DAT - /// - /// Names of the input files - /// Optional param for output directory - /// True if the output files should overwrite their inputs, false otherwise - /// Filter object to be passed to the DatItem level - /// True to diff using games, false to use hashes - public void DiffAgainst(List inputs, string outDir, bool inplace, Filter filter, bool useGames) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - DiffAgainst(paths, outDir, inplace, filter, useGames); - } - - /// - /// Output cascading diffs - /// - /// List of inputs to write out from - /// Dat headers used optionally - /// Output directory to write the DATs to - /// True if cascaded diffs are outputted in-place, false otherwise - /// True if the first cascaded diff file should be skipped on output, false otherwise - public void DiffCascade( - List inputs, - List datHeaders, - string outDir, - bool inplace, - bool skip) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - DiffCascade(paths, datHeaders, outDir, inplace, skip); - } - - /// - /// Output non-cascading diffs - /// - /// List of inputs to write out from - /// Output directory to write the DATs to - /// Non-zero flag for diffing mode, zero otherwise - public void DiffNoCascade(List inputs, string outDir, UpdateMode diff) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - DiffNoCascade(paths, outDir, diff); - } - - /// - /// Output user defined merge - /// - /// List of inputs to write out from - /// Output directory to write the DATs to - public void MergeNoDiff(List inputs, string outDir) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - MergeNoDiff(paths, outDir); - } - - /// - /// Populate the user DatData object from the input files - /// - /// Paths to DATs to parse - /// Filter object to be passed to the DatItem level - /// List of DatData objects representing headers - public List PopulateUserData(List inputs, Filter filter) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - return PopulateUserData(paths, filter); - } - - /// - /// Convert, update, and filter a DAT file or set of files - /// - /// Names of the input files and/or folders - /// Optional param for output directory - /// True if the output files should overwrite their inputs, false otherwise - /// Filter object to be passed to the DatItem level - public void Update(List inputs, string outDir, bool inplace, Filter filter) - { - List paths = inputs.Select(i => new ParentablePath(i)).ToList(); - Update(paths, outDir, inplace, filter); - } - /// /// Replace item values from the base set represented by the current DAT /// @@ -456,7 +223,7 @@ namespace SabreTools.Library.DatFiles /// Filter object to be passed to the DatItem level /// List of Fields representing what should be updated [only for base replacement] /// True if descriptions should only be replaced if the game name is the same, false otherwise - internal void BaseReplace( + public void BaseReplace( List inputs, string outDir, bool inplace, @@ -691,7 +458,7 @@ namespace SabreTools.Library.DatFiles if (updateFields.Contains(Field.Inverted)) { - if (newDatItem.ItemType == ItemType.Rom) + if (newDatItem.ItemType == ItemType.Rom) rom.Inverted = romDupe.Inverted; } @@ -952,7 +719,21 @@ namespace SabreTools.Library.DatFiles /// True if the output files should overwrite their inputs, false otherwise /// Filter object to be passed to the DatItem level /// True to diff using games, false to use hashes - internal void DiffAgainst(List inputs, string outDir, bool inplace, Filter filter, bool useGames) + public void DiffAgainst(List inputs, string outDir, bool inplace, Filter filter, bool useGames) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + DiffAgainst(paths, outDir, inplace, filter, useGames); + } + + /// + /// Output diffs against a base set represented by the current DAT + /// + /// Names of the input files + /// Optional param for output directory + /// True if the output files should overwrite their inputs, false otherwise + /// Filter object to be passed to the DatItem level + /// True to diff using games, false to use hashes + public void DiffAgainst(List inputs, string outDir, bool inplace, Filter filter, bool useGames) { // For comparison's sake, we want to use a base ordering if (useGames) @@ -1044,7 +825,26 @@ namespace SabreTools.Library.DatFiles /// Output directory to write the DATs to /// True if cascaded diffs are outputted in-place, false otherwise /// True if the first cascaded diff file should be skipped on output, false otherwise - internal void DiffCascade( + public void DiffCascade( + List inputs, + List datHeaders, + string outDir, + bool inplace, + bool skip) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + DiffCascade(paths, datHeaders, outDir, inplace, skip); + } + + /// + /// Output cascading diffs + /// + /// List of inputs to write out from + /// Dat headers used optionally + /// Output directory to write the DATs to + /// True if cascaded diffs are outputted in-place, false otherwise + /// True if the first cascaded diff file should be skipped on output, false otherwise + public void DiffCascade( List inputs, List datHeaders, string outDir, @@ -1127,19 +927,24 @@ namespace SabreTools.Library.DatFiles } /// - /// Output non-cascading diffs + /// Output duplicate item diff /// /// List of inputs to write out from /// Output directory to write the DATs to - /// Non-zero flag for diffing mode, zero otherwise - internal void DiffNoCascade(List inputs, string outDir, UpdateMode diff) + public void DiffDuplicates(List inputs, string outDir) { - InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs"); + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + DiffDuplicates(paths, outDir); + } - // Default vars for use - string post = string.Empty; - DatFile outerDiffData = Create(); - DatFile dupeData = Create(); + /// + /// Output duplicate item diff + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void DiffDuplicates(List inputs, string outDir) + { + InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT"); // Fill in any information not in the base DAT if (string.IsNullOrWhiteSpace(Header.FileName)) @@ -1151,54 +956,17 @@ namespace SabreTools.Library.DatFiles if (string.IsNullOrWhiteSpace(Header.Description)) Header.Description = "All DATs"; - // Don't have External dupes - if (diff.HasFlag(UpdateMode.DiffNoDupesOnly)) - { - post = " (No Duplicates)"; - outerDiffData = Create(Header); - outerDiffData.Header.FileName += post; - outerDiffData.Header.Name += post; - outerDiffData.Header.Description += post; - outerDiffData.Items = new ItemDictionary(); - } - - // Have External dupes - if (diff.HasFlag(UpdateMode.DiffDupesOnly)) - { - post = " (Duplicates)"; - dupeData = Create(Header); - dupeData.Header.FileName += post; - dupeData.Header.Name += post; - dupeData.Header.Description += post; - dupeData.Items = new ItemDictionary(); - } - - // Create a list of DatData objects representing individual output files - List outDats = new List(); - - // Loop through each of the inputs and get or create a new DatData object - if (diff.HasFlag(UpdateMode.DiffIndividualsOnly)) - { - DatFile[] outDatsArray = new DatFile[inputs.Count]; - - Parallel.For(0, inputs.Count, Globals.ParallelOptions, j => - { - string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)"; - DatFile diffData = Create(Header); - diffData.Header.FileName += innerpost; - diffData.Header.Name += innerpost; - diffData.Header.Description += innerpost; - diffData.Items = new ItemDictionary(); - outDatsArray[j] = diffData; - }); - - outDats = outDatsArray.ToList(); - } + string post = " (Duplicates)"; + DatFile dupeData = Create(Header); + dupeData.Header.FileName += post; + dupeData.Header.Name += post; + dupeData.Header.Description += post; + dupeData.Items = new ItemDictionary(); watch.Stop(); // Now, loop through the dictionary and populate the correct DATs - watch.Start("Populating all output DATs"); + watch.Start("Populating duplicate DAT"); Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => { @@ -1211,36 +979,12 @@ namespace SabreTools.Library.DatFiles // Loop through and add the items correctly foreach (DatItem item in items) { - // No duplicates - if (diff.HasFlag(UpdateMode.DiffNoDupesOnly) || diff.HasFlag(UpdateMode.DiffIndividualsOnly)) + if (item.DupeType.HasFlag(DupeType.External)) { - if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00) - { - // Individual DATs that are output - if (diff.HasFlag(UpdateMode.DiffIndividualsOnly)) - outDats[item.IndexId].Items.Add(key, item); + DatItem newrom = item.Clone() as DatItem; + newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})"; - // Merged no-duplicates DAT - if (diff.HasFlag(UpdateMode.DiffNoDupesOnly)) - { - DatItem newrom = item.Clone() as DatItem; - newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})"; - - outerDiffData.Items.Add(key, newrom); - } - } - } - - // Duplicates only - if (diff.HasFlag(UpdateMode.DiffNoDupesOnly)) - { - if (item.DupeType.HasFlag(DupeType.External)) - { - DatItem newrom = item.Clone() as DatItem; - newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})"; - - dupeData.Items.Add(key, newrom); - } + dupeData.Items.Add(key, newrom); } } }); @@ -1248,28 +992,162 @@ namespace SabreTools.Library.DatFiles watch.Stop(); // Finally, loop through and output each of the DATs - watch.Start("Outputting all created DATs"); + watch.Start("Outputting duplicate DAT"); + dupeData.Write(outDir, overwrite: false); + watch.Stop(); + } - // Output the difflist (a-b)+(b-a) diff - if (diff.HasFlag(UpdateMode.DiffNoDupesOnly)) - outerDiffData.Write(outDir, overwrite: false); + /// + /// Output non-cascading diffs + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void DiffIndividuals(List inputs, string outDir) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + DiffIndividuals(paths, outDir); + } - // Output the (ab) diff - if (diff.HasFlag(UpdateMode.DiffDupesOnly)) - dupeData.Write(outDir, overwrite: false); + /// + /// Output non-cascading diffs + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void DiffIndividuals(List inputs, string outDir) + { + InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs"); - // Output the individual (a-b) DATs - if (diff.HasFlag(UpdateMode.DiffIndividualsOnly)) + // Fill in any information not in the base DAT + if (string.IsNullOrWhiteSpace(Header.FileName)) + Header.FileName = "All DATs"; + + if (string.IsNullOrWhiteSpace(Header.Name)) + Header.Name = "All DATs"; + + if (string.IsNullOrWhiteSpace(Header.Description)) + Header.Description = "All DATs"; + + // Loop through each of the inputs and get or create a new DatData object + DatFile[] outDatsArray = new DatFile[inputs.Count]; + + Parallel.For(0, inputs.Count, Globals.ParallelOptions, j => { - Parallel.For(0, inputs.Count, Globals.ParallelOptions, j => + string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)"; + DatFile diffData = Create(Header); + diffData.Header.FileName += innerpost; + diffData.Header.Name += innerpost; + diffData.Header.Description += innerpost; + diffData.Items = new ItemDictionary(); + outDatsArray[j] = diffData; + }); + + // Create a list of DatData objects representing individual output files + List outDats = outDatsArray.ToList(); + + watch.Stop(); + + // Now, loop through the dictionary and populate the correct DATs + watch.Start("Populating all individual DATs"); + + Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => + { + List items = DatItem.Merge(Items[key]); + + // If the rom list is empty or null, just skip it + if (items == null || items.Count == 0) + return; + + // Loop through and add the items correctly + foreach (DatItem item in items) { - string path = inputs[j].GetOutputPath(outDir, false /* inplace */); + if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00) + outDats[item.IndexId].Items.Add(key, item); + } + }); - // Try to output the file - outDats[j].Write(path, overwrite: false); - }); - } + watch.Stop(); + // Finally, loop through and output each of the DATs + watch.Start("Outputting all individual DATs"); + + Parallel.For(0, inputs.Count, Globals.ParallelOptions, j => + { + string path = inputs[j].GetOutputPath(outDir, false /* inplace */); + + // Try to output the file + outDats[j].Write(path, overwrite: false); + }); + + watch.Stop(); + } + + /// + /// Output non-duplicate item diff + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void DiffNoDuplicates(List inputs, string outDir) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + DiffNoDuplicates(paths, outDir); + } + + /// + /// Output non-duplicate item diff + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void DiffNoDuplicates(List inputs, string outDir) + { + InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT"); + + // Fill in any information not in the base DAT + if (string.IsNullOrWhiteSpace(Header.FileName)) + Header.FileName = "All DATs"; + + if (string.IsNullOrWhiteSpace(Header.Name)) + Header.Name = "All DATs"; + + if (string.IsNullOrWhiteSpace(Header.Description)) + Header.Description = "All DATs"; + + string post = " (No Duplicates)"; + DatFile outerDiffData = Create(Header); + outerDiffData.Header.FileName += post; + outerDiffData.Header.Name += post; + outerDiffData.Header.Description += post; + outerDiffData.Items = new ItemDictionary(); + + watch.Stop(); + + // Now, loop through the dictionary and populate the correct DATs + watch.Start("Populating no duplicate DAT"); + + Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => + { + List items = DatItem.Merge(Items[key]); + + // If the rom list is empty or null, just skip it + if (items == null || items.Count == 0) + return; + + // Loop through and add the items correctly + foreach (DatItem item in items) + { + if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00) + { + DatItem newrom = item.Clone() as DatItem; + newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})"; + outerDiffData.Items.Add(key, newrom); + } + } + }); + + watch.Stop(); + + // Finally, loop through and output each of the DATs + watch.Start("Outputting no duplicate DAT"); + outerDiffData.Write(outDir, overwrite: false); watch.Stop(); } @@ -1278,7 +1156,18 @@ namespace SabreTools.Library.DatFiles /// /// List of inputs to write out from /// Output directory to write the DATs to - internal void MergeNoDiff(List inputs, string outDir) + public void MergeNoDiff(List inputs, string outDir) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + MergeNoDiff(paths, outDir); + } + + /// + /// Output user defined merge + /// + /// List of inputs to write out from + /// Output directory to write the DATs to + public void MergeNoDiff(List inputs, string outDir) { // If we're in SuperDAT mode, prefix all games with their respective DATs if (Header.Type == "SuperDAT") @@ -1317,7 +1206,19 @@ namespace SabreTools.Library.DatFiles /// Paths to DATs to parse /// Filter object to be passed to the DatItem level /// List of DatData objects representing headers - internal List PopulateUserData(List inputs, Filter filter) + public List PopulateUserData(List inputs, Filter filter) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + return PopulateUserData(paths, filter); + } + + /// + /// Populate the user DatData object from the input files + /// + /// Paths to DATs to parse + /// Filter object to be passed to the DatItem level + /// List of DatData objects representing headers + public List PopulateUserData(List inputs, Filter filter) { DatFile[] datFiles = new DatFile[inputs.Count]; InternalStopwatch watch = new InternalStopwatch("Processing individual DATs"); @@ -1354,7 +1255,20 @@ namespace SabreTools.Library.DatFiles /// Optional param for output directory /// True if the output files should overwrite their inputs, false otherwise /// Filter object to be passed to the DatItem level - internal void Update(List inputs, string outDir, bool inplace, Filter filter) + public void Update(List inputs, string outDir, bool inplace, Filter filter) + { + List paths = inputs.Select(i => new ParentablePath(i)).ToList(); + Update(paths, outDir, inplace, filter); + } + + /// + /// Convert, update, and filter a DAT file or set of files + /// + /// Names of the input files and/or folders + /// Optional param for output directory + /// True if the output files should overwrite their inputs, false otherwise + /// Filter object to be passed to the DatItem level + public void Update(List inputs, string outDir, bool inplace, Filter filter) { // Iterate over the files foreach (ParentablePath file in inputs) diff --git a/SabreTools.Library/DatFiles/DatHeader.cs b/SabreTools.Library/DatFiles/DatHeader.cs index afb547a0..3da7c524 100644 --- a/SabreTools.Library/DatFiles/DatHeader.cs +++ b/SabreTools.Library/DatFiles/DatHeader.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.IO; using SabreTools.Library.Data; +using SabreTools.Library.DatItems; using SabreTools.Library.Tools; using Newtonsoft.Json; diff --git a/SabreTools.Library/DatFiles/Enums.cs b/SabreTools.Library/DatFiles/Enums.cs new file mode 100644 index 00000000..5853af3e --- /dev/null +++ b/SabreTools.Library/DatFiles/Enums.cs @@ -0,0 +1,98 @@ +namespace SabreTools.Library.DatFiles +{ + /// + /// Determines how the current dictionary is bucketed by + /// + public enum BucketedBy + { + Default = 0, + Size, + CRC, + MD5, +#if NET_FRAMEWORK + RIPEMD160, +#endif + SHA1, + SHA256, + SHA384, + SHA512, + Game, + } + + /// + /// Determines the DAT deduplication type + /// + public enum DedupeType + { + None = 0, + Full, + + // Force only deduping with certain types + Game, + CRC, + MD5, +#if NET_FRAMEWORK + RIPEMD160, +#endif + SHA1, + SHA256, + SHA384, + SHA512, + } + + /// + /// Determines forcemerging tag for DAT output + /// + public enum ForceMerging + { + None = 0, + Split, + Merged, + NonMerged, + Full, + } + + /// + /// Determines forcenodump tag for DAT output + /// + public enum ForceNodump + { + None = 0, + Obsolete, + Required, + Ignore, + } + + /// + /// Determines forcepacking tag for DAT output + /// + public enum ForcePacking + { + None = 0, + Zip, + Unzip, + } + + /// + /// Determines which files should be skipped in DFD + /// + public enum SkipFileType + { + None = 0, + Archive, + File, + } + + /// + /// Determines how a DAT will be split internally + /// + public enum SplitType + { + None = 0, + NonMerged, + Merged, + FullNonMerged, + Split, + DeviceNonMerged + } +} diff --git a/SabreTools.Library/DatItems/DatItem.cs b/SabreTools.Library/DatItems/DatItem.cs index 820af4cf..cf1736dc 100644 --- a/SabreTools.Library/DatItems/DatItem.cs +++ b/SabreTools.Library/DatItems/DatItem.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Net; using SabreTools.Library.Data; +using SabreTools.Library.DatFiles; using SabreTools.Library.FileTypes; using SabreTools.Library.Tools; using NaturalSort; diff --git a/SabreTools.Library/DatItems/Enums.cs b/SabreTools.Library/DatItems/Enums.cs new file mode 100644 index 00000000..c302ec4e --- /dev/null +++ b/SabreTools.Library/DatItems/Enums.cs @@ -0,0 +1,86 @@ +namespace SabreTools.Library.DatItems +{ + /// + /// List of valid field types within a DatItem/Machine + /// + public enum Field : int + { + NULL = 0, + + // Generic DatItem + ItemType, + Name, + PartName, + PartInterface, + Features, + AreaName, + AreaSize, + + // Machine + MachineName, + Comment, + Description, + Year, + Manufacturer, + Publisher, + Category, + RomOf, + CloneOf, + SampleOf, + Supported, + SourceFile, + Runnable, + Board, + RebuildTo, + Devices, + SlotOptions, + Infos, + MachineType, + + // BiosSet + Default, + BiosDescription, + + // Disk + MD5, +#if NET_FRAMEWORK + RIPEMD160, +#endif + SHA1, + SHA256, + SHA384, + SHA512, + Merge, + Region, + Index, + Writable, + Optional, + Status, + + // Release + Language, + Date, + + // Rom + Bios, + Size, + CRC, + Offset, + Inverted, + } + + /// + /// Determine what type of file an item is + /// + public enum ItemType + { + Rom = 0, + Disk = 1, + Sample = 2, + Release = 3, + BiosSet = 4, + Archive = 5, + + Blank = 99, // This is not a real type, only used internally + } +} diff --git a/SabreTools.Library/Data/Enums.cs b/SabreTools.Library/Data/Enums.cs index fb3b8e80..f6b0627f 100644 --- a/SabreTools.Library/Data/Enums.cs +++ b/SabreTools.Library/Data/Enums.cs @@ -129,210 +129,6 @@ #endregion - #region DatFile related - - /// - /// Determines how the current dictionary is bucketed by - /// - public enum BucketedBy - { - Default = 0, - Size, - CRC, - MD5, -#if NET_FRAMEWORK - RIPEMD160, -#endif - SHA1, - SHA256, - SHA384, - SHA512, - Game, - } - - /// - /// Determines the DAT deduplication type - /// - public enum DedupeType - { - None = 0, - Full, - - // Force only deduping with certain types - Game, - CRC, - MD5, -#if NET_FRAMEWORK - RIPEMD160, -#endif - SHA1, - SHA256, - SHA384, - SHA512, - } - - /// - /// Determines forcemerging tag for DAT output - /// - public enum ForceMerging - { - None = 0, - Split, - Merged, - NonMerged, - Full, - } - - /// - /// Determines forcenodump tag for DAT output - /// - public enum ForceNodump - { - None = 0, - Obsolete, - Required, - Ignore, - } - - /// - /// Determines forcepacking tag for DAT output - /// - public enum ForcePacking - { - None = 0, - Zip, - Unzip, - } - - /// - /// Determines which files should be skipped in DFD - /// - public enum SkipFileType - { - None = 0, - Archive, - File, - } - - /// - /// Determines how a DAT will be split internally - /// - public enum SplitType - { - None = 0, - NonMerged, - Merged, - FullNonMerged, - Split, - DeviceNonMerged - } - - #endregion - - #region DatItem related - - /// - /// List of valid field types within a DatItem/Machine - /// - public enum Field : int - { - NULL = 0, - - // Generic DatItem - ItemType, - Name, - PartName, - PartInterface, - Features, - AreaName, - AreaSize, - - // Machine - MachineName, - Comment, - Description, - Year, - Manufacturer, - Publisher, - Category, - RomOf, - CloneOf, - SampleOf, - Supported, - SourceFile, - Runnable, - Board, - RebuildTo, - Devices, - SlotOptions, - Infos, - MachineType, - - // BiosSet - Default, - BiosDescription, - - // Disk - MD5, -#if NET_FRAMEWORK - RIPEMD160, -#endif - SHA1, - SHA256, - SHA384, - SHA512, - Merge, - Region, - Index, - Writable, - Optional, - Status, - - // Release - Language, - Date, - - // Rom - Bios, - Size, - CRC, - Offset, - Inverted, - } - - /// - /// Determine what type of file an item is - /// - public enum ItemType - { - Rom = 0, - Disk = 1, - Sample = 2, - Release = 3, - BiosSet = 4, - Archive = 5, - - Blank = 99, // This is not a real type, only used internally - } - - #endregion - - #region Help related - - /// - /// Determines the feature type to check for - /// - public enum FeatureType - { - Flag = 0, - String, - Int32, - Int64, - List, - } - - #endregion - #region Logging related /// @@ -376,42 +172,4 @@ } #endregion - - #region Skippers and Mappers - - /// - /// Determines the header skip operation - /// - public enum HeaderSkipOperation - { - None = 0, - Bitswap, - Byteswap, - Wordswap, - WordByteswap, - } - - /// - /// Determines the type of test to be done - /// - public enum HeaderSkipTest - { - Data = 0, - Or, - Xor, - And, - File, - } - - /// - /// Determines the operator to be used in a file test - /// - public enum HeaderSkipTestFileOperator - { - Equal = 0, - Less, - Greater, - } - - #endregion } diff --git a/SabreTools.Library/Data/Flags.cs b/SabreTools.Library/Data/Flags.cs index 3684ad5b..39353adf 100644 --- a/SabreTools.Library/Data/Flags.cs +++ b/SabreTools.Library/Data/Flags.cs @@ -223,50 +223,6 @@ namespace SabreTools.Library.Data All = Int32.MaxValue, } - /// - /// Determines how the DAT will be split on output - /// - [Flags] - public enum SplittingMode - { - None = 0x00, - - Extension = 1 << 0, - Hash = 1 << 2, - Level = 1 << 3, - Type = 1 << 4, - Size = 1 << 5, - } - - /// - /// Determines special update modes - /// - [Flags] - public enum UpdateMode - { - None = 0x00, - - // Standard diffs - DiffDupesOnly = 1 << 0, - DiffNoDupesOnly = 1 << 1, - DiffIndividualsOnly = 1 << 2, - - // Cascaded diffs - DiffCascade = 1 << 3, - DiffReverseCascade = 1 << 4, - - // Base diffs - DiffAgainst = 1 << 5, - - // Special update modes - Merge = 1 << 6, - BaseReplace = 1 << 7, - ReverseBaseReplace = 1 << 8, - - // Combinations - AllDiffs = DiffDupesOnly | DiffNoDupesOnly | DiffIndividualsOnly, - } - #endregion #region DatItem related diff --git a/SabreTools.Library/Help/Enums.cs b/SabreTools.Library/Help/Enums.cs new file mode 100644 index 00000000..73332f29 --- /dev/null +++ b/SabreTools.Library/Help/Enums.cs @@ -0,0 +1,14 @@ +namespace SabreTools.Library.Help +{ + /// + /// Determines the feature type to check for + /// + public enum FeatureType + { + Flag = 0, + String, + Int32, + Int64, + List, + } +} diff --git a/SabreTools.Library/Skippers/Enums.cs b/SabreTools.Library/Skippers/Enums.cs new file mode 100644 index 00000000..d3eed9cc --- /dev/null +++ b/SabreTools.Library/Skippers/Enums.cs @@ -0,0 +1,36 @@ +namespace SabreTools.Library.Skippers +{ + /// + /// Determines the header skip operation + /// + public enum HeaderSkipOperation + { + None = 0, + Bitswap, + Byteswap, + Wordswap, + WordByteswap, + } + + /// + /// Determines the type of test to be done + /// + public enum HeaderSkipTest + { + Data = 0, + Or, + Xor, + And, + File, + } + + /// + /// Determines the operator to be used in a file test + /// + public enum HeaderSkipTestFileOperator + { + Equal = 0, + Less, + Greater, + } +} diff --git a/SabreTools.Library/Skippers/SkipperFile.cs b/SabreTools.Library/Skippers/SkipperFile.cs index c1d47f10..cbac1120 100644 --- a/SabreTools.Library/Skippers/SkipperFile.cs +++ b/SabreTools.Library/Skippers/SkipperFile.cs @@ -4,7 +4,6 @@ using System.Globalization; using System.IO; using System.Xml; -using SabreTools.Library.Data; using SabreTools.Library.Tools; namespace SabreTools.Library.Skippers diff --git a/SabreTools.Library/Skippers/SkipperTest.cs b/SabreTools.Library/Skippers/SkipperTest.cs index 237d3d58..f823546b 100644 --- a/SabreTools.Library/Skippers/SkipperTest.cs +++ b/SabreTools.Library/Skippers/SkipperTest.cs @@ -1,8 +1,6 @@ using System; using System.IO; -using SabreTools.Library.Data; - namespace SabreTools.Library.Skippers { /// diff --git a/SabreTools.Library/Tools/Converters.cs b/SabreTools.Library/Tools/Converters.cs index 02db62bc..598f0704 100644 --- a/SabreTools.Library/Tools/Converters.cs +++ b/SabreTools.Library/Tools/Converters.cs @@ -1,4 +1,6 @@ using SabreTools.Library.Data; +using SabreTools.Library.DatFiles; +using SabreTools.Library.DatItems; namespace SabreTools.Library.Tools { diff --git a/SabreTools.Library/Tools/Logger.cs b/SabreTools.Library/Tools/Logger.cs index 3e7d461b..690d0ebb 100644 --- a/SabreTools.Library/Tools/Logger.cs +++ b/SabreTools.Library/Tools/Logger.cs @@ -3,7 +3,6 @@ using System.IO; using System.Text; using SabreTools.Library.Data; -using SabreTools.Library.Tools; namespace SabreTools.Library.Tools { diff --git a/SabreTools/Features/BaseFeature.cs b/SabreTools/Features/BaseFeature.cs index 0c5cbcd0..282d3fec 100644 --- a/SabreTools/Features/BaseFeature.cs +++ b/SabreTools/Features/BaseFeature.cs @@ -1,7 +1,9 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using SabreTools.Library.Data; using SabreTools.Library.DatFiles; +using SabreTools.Library.DatItems; using SabreTools.Library.Filtering; using SabreTools.Library.Help; using SabreTools.Library.Tools; @@ -10,6 +12,54 @@ namespace SabreTools.Features { internal class BaseFeature : TopLevel { + #region Enums + + /// + /// Determines how the DAT will be split on output + /// + [Flags] + public enum SplittingMode + { + None = 0x00, + + Extension = 1 << 0, + Hash = 1 << 2, + Level = 1 << 3, + Type = 1 << 4, + Size = 1 << 5, + } + + /// + /// Determines special update modes + /// + [Flags] + public enum UpdateMode + { + None = 0x00, + + // Standard diffs + DiffDupesOnly = 1 << 0, + DiffNoDupesOnly = 1 << 1, + DiffIndividualsOnly = 1 << 2, + + // Cascaded diffs + DiffCascade = 1 << 3, + DiffReverseCascade = 1 << 4, + + // Base diffs + DiffAgainst = 1 << 5, + + // Special update modes + Merge = 1 << 6, + BaseReplace = 1 << 7, + ReverseBaseReplace = 1 << 8, + + // Combinations + AllDiffs = DiffDupesOnly | DiffNoDupesOnly | DiffIndividualsOnly, + } + + #endregion + #region Features #region Flag features diff --git a/SabreTools/Features/DatFromDir.cs b/SabreTools/Features/DatFromDir.cs index 58d78076..11f5e917 100644 --- a/SabreTools/Features/DatFromDir.cs +++ b/SabreTools/Features/DatFromDir.cs @@ -2,7 +2,6 @@ using System.Collections.Generic; using System.IO; -using SabreTools.Library.Data; using SabreTools.Library.DatFiles; using SabreTools.Library.Help; diff --git a/SabreTools/Features/DisplayHelp.cs b/SabreTools/Features/DisplayHelp.cs index 45e45704..cfb8e63f 100644 --- a/SabreTools/Features/DisplayHelp.cs +++ b/SabreTools/Features/DisplayHelp.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace SabreTools.Features diff --git a/SabreTools/Features/DisplayHelpDetailed.cs b/SabreTools/Features/DisplayHelpDetailed.cs index c0b950b8..c32f4b97 100644 --- a/SabreTools/Features/DisplayHelpDetailed.cs +++ b/SabreTools/Features/DisplayHelpDetailed.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace SabreTools.Features diff --git a/SabreTools/Features/Extract.cs b/SabreTools/Features/Extract.cs index 062c4294..0381ccde 100644 --- a/SabreTools/Features/Extract.cs +++ b/SabreTools/Features/Extract.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; using SabreTools.Library.Skippers; using SabreTools.Library.Tools; diff --git a/SabreTools/Features/Restore.cs b/SabreTools/Features/Restore.cs index 7ad85f1d..e3969c45 100644 --- a/SabreTools/Features/Restore.cs +++ b/SabreTools/Features/Restore.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; using SabreTools.Library.Skippers; using SabreTools.Library.Tools; diff --git a/SabreTools/Features/Script.cs b/SabreTools/Features/Script.cs index 6ed5eb83..af1c9728 100644 --- a/SabreTools/Features/Script.cs +++ b/SabreTools/Features/Script.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.Help; namespace SabreTools.Features diff --git a/SabreTools/Features/Split.cs b/SabreTools/Features/Split.cs index 131880c0..4388bd3b 100644 --- a/SabreTools/Features/Split.cs +++ b/SabreTools/Features/Split.cs @@ -1,8 +1,8 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.DatFiles; using SabreTools.Library.Help; +using SabreTools.Library.Tools; namespace SabreTools.Features { @@ -38,18 +38,55 @@ namespace SabreTools.Features public override void ProcessFeatures(Dictionary features) { base.ProcessFeatures(features); + SplittingMode splittingMode = GetSplittingMode(features); - DatFile datfile = DatFile.Create(Header.DatFormat); - datfile.DetermineSplitType( - Inputs, - OutputDir, - GetBoolean(features, InplaceValue), - GetSplittingMode(features), - GetList(features, ExtAListValue), - GetList(features, ExtBListValue), - GetBoolean(features, ShortValue), - GetBoolean(features, BaseValue), - GetInt64(features, RadixInt64Value)); + // If we somehow have the "none" split type, return + if (splittingMode == SplittingMode.None) + return; + + // Get only files from the inputs + List files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); + + // Loop over the input files + foreach (ParentablePath file in files) + { + // Create and fill the new DAT + DatFile internalDat = DatFile.Create(Header); + internalDat.Parse(file); + + // Get the output directory + OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue)); + + // Extension splitting + if (splittingMode.HasFlag(SplittingMode.Extension)) + { + internalDat.SplitByExtension( + OutputDir, + GetList(features, ExtAListValue), + GetList(features, ExtBListValue)); + } + + // Hash splitting + if (splittingMode.HasFlag(SplittingMode.Hash)) + internalDat.SplitByHash(OutputDir); + + // Level splitting + if (splittingMode.HasFlag(SplittingMode.Level)) + { + internalDat.SplitByLevel( + OutputDir, + GetBoolean(features, ShortValue), + GetBoolean(features, BaseValue)); + } + + // Size splitting + if (splittingMode.HasFlag(SplittingMode.Size)) + internalDat.SplitBySize(OutputDir, GetInt64(features, RadixInt64Value)); + + // Type splitting + if (splittingMode.HasFlag(SplittingMode.Type)) + internalDat.SplitByType(OutputDir); + } } } } diff --git a/SabreTools/Features/Stats.cs b/SabreTools/Features/Stats.cs index 5d953aad..793e5104 100644 --- a/SabreTools/Features/Stats.cs +++ b/SabreTools/Features/Stats.cs @@ -1,7 +1,6 @@ using System.Collections.Generic; using System.IO; -using SabreTools.Library.Data; using SabreTools.Library.DatFiles; using SabreTools.Library.Help; diff --git a/SabreTools/Features/Update.cs b/SabreTools/Features/Update.cs index 1589539f..45745615 100644 --- a/SabreTools/Features/Update.cs +++ b/SabreTools/Features/Update.cs @@ -1,9 +1,10 @@ using System; using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.DatFiles; +using SabreTools.Library.DatItems; using SabreTools.Library.Help; +using SabreTools.Library.Tools; namespace SabreTools.Features { @@ -142,20 +143,93 @@ namespace SabreTools.Features if (updateFields == null || updateFields.Count == 0) updateFields = new List() { Field.Name }; - // Populate the DatData object + // Ensure we only have files in the inputs + List inputFileNames = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); + List baseFileNames = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue)); + + // If we're in standard update mode, run through all of the inputs + if (updateMode == UpdateMode.None) + { + DatFile datFile = DatFile.Create(Header); + datFile.Update( + inputFileNames, + OutputDir, + GetBoolean(features, InplaceValue), + Filter); + return; + } + + // Reverse inputs if we're in a required mode + if (updateMode.HasFlag(UpdateMode.DiffReverseCascade)) + { + updateMode |= UpdateMode.DiffCascade; + inputFileNames.Reverse(); + } + if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace)) + { + updateMode |= UpdateMode.BaseReplace; + baseFileNames.Reverse(); + } + + // Create a DAT to capture inputs DatFile userInputDat = DatFile.Create(Header); - userInputDat.DetermineUpdateType( - Inputs, - GetList(features, BaseDatListValue), - OutputDir, - updateMode, - GetBoolean(features, InplaceValue), - GetBoolean(features, SkipFirstOutputValue), - Filter, - updateFields, - GetBoolean(features, OnlySameValue), - GetBoolean(Features, ByGameValue)); + // Populate using the correct set + List datHeaders; + if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace)) + datHeaders = userInputDat.PopulateUserData(baseFileNames, Filter); + else + datHeaders = userInputDat.PopulateUserData(inputFileNames, Filter); + + // Merge all input files and write + if (updateMode.HasFlag(UpdateMode.Merge)) + userInputDat.MergeNoDiff(inputFileNames, OutputDir); + + // Output only DatItems that are duplicated across inputs + if (updateMode.HasFlag(UpdateMode.DiffDupesOnly)) + userInputDat.DiffDuplicates(inputFileNames, OutputDir); + + // Output only DatItems that are not duplicated across inputs + if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly)) + userInputDat.DiffNoDuplicates(inputFileNames, OutputDir); + + // Output only DatItems that are unique to each input + if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly)) + userInputDat.DiffIndividuals(inputFileNames, OutputDir); + + // Output cascaded diffs + if (updateMode.HasFlag(UpdateMode.DiffCascade)) + { + userInputDat.DiffCascade( + inputFileNames, + datHeaders, + OutputDir, + GetBoolean(features, InplaceValue), + GetBoolean(features, SkipFirstOutputValue)); + } + + // Output differences against a base DAT + if (updateMode.HasFlag(UpdateMode.DiffAgainst)) + { + userInputDat.DiffAgainst( + inputFileNames, + OutputDir, + GetBoolean(features, InplaceValue), + Filter, + GetBoolean(Features, ByGameValue)); + } + + // Output DATs after replacing fields from a base DAT + if (updateMode.HasFlag(UpdateMode.BaseReplace)) + { + userInputDat.BaseReplace( + inputFileNames, + OutputDir, + GetBoolean(features, InplaceValue), + Filter, + updateFields, + GetBoolean(features, OnlySameValue)); + } } } } diff --git a/SabreTools/Features/Verify.cs b/SabreTools/Features/Verify.cs index ca144eb4..79c3f3a3 100644 --- a/SabreTools/Features/Verify.cs +++ b/SabreTools/Features/Verify.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; -using SabreTools.Library.Data; using SabreTools.Library.DatFiles; using SabreTools.Library.Filtering; using SabreTools.Library.Help;