diff --git a/SabreTools.Library/DatFiles/DatFile.cs b/SabreTools.Library/DatFiles/DatFile.cs
index 86f59d6d..df4bc1e2 100644
--- a/SabreTools.Library/DatFiles/DatFile.cs
+++ b/SabreTools.Library/DatFiles/DatFile.cs
@@ -190,7 +190,65 @@ namespace SabreTools.Library.DatFiles
#endregion
- #region Converting and Updating
+ #region Determination Helpers
+
+ ///
+ /// Split a set of input DATs based on the given information
+ ///
+ /// List of inputs to be used
+ /// Output directory for the split files
+ /// True if files should be written to the source folders, false otherwise
+ /// Type of split to perform, if any
+ /// First extension to split on (Extension Split only)
+ /// Second extension to split on (Extension Split only)
+ /// True if short filenames should be used, false otherwise (Level Split only)
+ /// True if original filenames should be used as the base for output filename, false otherwise (Level Split only)
+ /// Long value representing the split point (Size Split only)
+ public void DetermineSplitType(
+ List inputs,
+ string outDir,
+ bool inplace,
+ SplittingMode splittingMode,
+ List exta,
+ List extb,
+ bool shortname,
+ bool basedat,
+ long radix)
+ {
+ // If we somehow have the "none" split type, return
+ if (splittingMode == SplittingMode.None)
+ return;
+
+ // Get only files from the inputs
+ List files = DirectoryExtensions.GetFilesOnly(inputs, appendparent: true);
+
+ // Loop over the input files
+ foreach (ParentablePath file in files)
+ {
+ // Create and fill the new DAT
+ DatFile internalDat = Create(Header);
+ Parse(file);
+
+ // Get the output directory
+ outDir = file.GetOutputPath(outDir, inplace);
+
+ // Split and write the DAT
+ if (splittingMode.HasFlag(SplittingMode.Extension))
+ internalDat.SplitByExtension(outDir, exta, extb);
+
+ if (splittingMode.HasFlag(SplittingMode.Hash))
+ internalDat.SplitByHash(outDir);
+
+ if (splittingMode.HasFlag(SplittingMode.Level))
+ internalDat.SplitByLevel(outDir, shortname, basedat);
+
+ if (splittingMode.HasFlag(SplittingMode.Size))
+ internalDat.SplitBySize(outDir, radix);
+
+ if (splittingMode.HasFlag(SplittingMode.Type))
+ internalDat.SplitByType(outDir);
+ }
+ }
///
/// Determine if input files should be merged, diffed, or processed invidually
@@ -279,53 +337,121 @@ namespace SabreTools.Library.DatFiles
return;
}
+ #endregion
+
+ #region Converting and Updating
+
+ ///
+ /// Replace item values from the base set represented by the current DAT
+ ///
+ /// Names of the input files
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// Filter object to be passed to the DatItem level
+ /// List of Fields representing what should be updated [only for base replacement]
+ /// True if descriptions should only be replaced if the game name is the same, false otherwise
+ public void BaseReplace(
+ List inputs,
+ string outDir,
+ bool inplace,
+ Filter filter,
+ List updateFields,
+ bool onlySame)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ BaseReplace(paths, outDir, inplace, filter, updateFields, onlySame);
+ }
+
+ ///
+ /// Output diffs against a base set represented by the current DAT
+ ///
+ /// Names of the input files
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ public void DiffAgainst(List inputs, string outDir, bool inplace)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ DiffAgainst(paths, outDir, inplace);
+ }
+
+ ///
+ /// Output cascading diffs
+ ///
+ /// List of inputs to write out from
+ /// Dat headers used optionally
+ /// Output directory to write the DATs to
+ /// True if cascaded diffs are outputted in-place, false otherwise
+ /// True if the first cascaded diff file should be skipped on output, false otherwise
+ public void DiffCascade(
+ List inputs,
+ List datHeaders,
+ string outDir,
+ bool inplace,
+ bool skip)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ DiffCascade(paths, datHeaders, outDir, inplace, skip);
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// List of inputs to write out from
+ /// Output directory to write the DATs to
+ /// Non-zero flag for diffing mode, zero otherwise
+ public void DiffNoCascade(List inputs, string outDir, UpdateMode diff)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ DiffNoCascade(paths, outDir, diff);
+ }
+
+ ///
+ /// Output user defined merge
+ ///
+ /// List of inputs to write out from
+ /// Output directory to write the DATs to
+ public void MergeNoDiff(List inputs, string outDir)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ MergeNoDiff(paths, outDir);
+ }
+
///
/// Populate the user DatData object from the input files
///
/// Paths to DATs to parse
/// Filter object to be passed to the DatItem level
/// List of DatData objects representing headers
- private List PopulateUserData(List inputs, Filter filter)
+ public List PopulateUserData(List inputs, Filter filter)
{
- DatFile[] datFiles = new DatFile[inputs.Count];
- InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ return PopulateUserData(paths, filter);
+ }
- // Parse all of the DATs into their own DatFiles in the array
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
- {
- var input = inputs[i];
- Globals.Logger.User($"Adding DAT: {input.CurrentPath}");
- datFiles[i] = Create(Header.CloneFiltering());
- datFiles[i].Parse(input, i, keep: true);
- });
-
- watch.Stop();
-
- watch.Start("Populating internal DAT");
- for (int i = 0; i < inputs.Count; i++)
- {
- AddFromExisting(datFiles[i], true);
- }
-
- // Now that we have a merged DAT, filter it
- filter.FilterDatFile(this, false /* useTags */);
-
- watch.Stop();
-
- return datFiles.Select(d => d.Header).ToList();
+ ///
+ /// Convert, update, and filter a DAT file or set of files
+ ///
+ /// Names of the input files and/or folders
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// Filter object to be passed to the DatItem level
+ public void Update(List inputs, string outDir, bool inplace, Filter filter)
+ {
+ List paths = inputs.Select(i => new ParentablePath(i)).ToList();
+ Update(paths, outDir, inplace, filter);
}
///
/// Replace item values from the base set represented by the current DAT
///
- /// Names of the input files
+ /// Names of the input files
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
/// Filter object to be passed to the DatItem level
/// List of Fields representing what should be updated [only for base replacement]
/// True if descriptions should only be replaced if the game name is the same, false otherwise
private void BaseReplace(
- List inputFileNames,
+ List inputs,
string outDir,
bool inplace,
Filter filter,
@@ -390,7 +516,7 @@ namespace SabreTools.Library.DatFiles
};
// We want to try to replace each item in each input DAT from the base
- foreach (ParentablePath path in inputFileNames)
+ foreach (ParentablePath path in inputs)
{
Globals.Logger.User($"Replacing items in '{path.CurrentPath}' from the base DAT");
@@ -808,16 +934,16 @@ namespace SabreTools.Library.DatFiles
///
/// Output diffs against a base set represented by the current DAT
///
- /// Names of the input files
+ /// Names of the input files
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
- private void DiffAgainst(List inputFileNames, string outDir, bool inplace)
+ private void DiffAgainst(List inputs, string outDir, bool inplace)
{
// For comparison's sake, we want to use CRC as the base ordering
Items.BucketBy(BucketedBy.CRC, DedupeType.Full);
// Now we want to compare each input DAT against the base
- foreach (ParentablePath path in inputFileNames)
+ foreach (ParentablePath path in inputs)
{
Globals.Logger.User($"Comparing '{path.CurrentPath}' to base DAT");
@@ -863,7 +989,12 @@ namespace SabreTools.Library.DatFiles
/// Output directory to write the DATs to
/// True if cascaded diffs are outputted in-place, false otherwise
/// True if the first cascaded diff file should be skipped on output, false otherwise
- private void DiffCascade(List inputs, List datHeaders, string outDir, bool inplace, bool skip)
+ private void DiffCascade(
+ List inputs,
+ List datHeaders,
+ string outDir,
+ bool inplace,
+ bool skip)
{
// Create a list of DatData objects representing output files
List outDats = new List();
@@ -1125,17 +1256,53 @@ namespace SabreTools.Library.DatFiles
Write(outDir, overwrite: false);
}
+ ///
+ /// Populate the user DatData object from the input files
+ ///
+ /// Paths to DATs to parse
+ /// Filter object to be passed to the DatItem level
+ /// List of DatData objects representing headers
+ private List PopulateUserData(List inputs, Filter filter)
+ {
+ DatFile[] datFiles = new DatFile[inputs.Count];
+ InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
+
+ // Parse all of the DATs into their own DatFiles in the array
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
+ {
+ var input = inputs[i];
+ Globals.Logger.User($"Adding DAT: {input.CurrentPath}");
+ datFiles[i] = Create(Header.CloneFiltering());
+ datFiles[i].Parse(input, i, keep: true);
+ });
+
+ watch.Stop();
+
+ watch.Start("Populating internal DAT");
+ for (int i = 0; i < inputs.Count; i++)
+ {
+ AddFromExisting(datFiles[i], true);
+ }
+
+ // Now that we have a merged DAT, filter it
+ filter.FilterDatFile(this, false /* useTags */);
+
+ watch.Stop();
+
+ return datFiles.Select(d => d.Header).ToList();
+ }
+
///
/// Convert, update, and filter a DAT file or set of files
///
- /// Names of the input files and/or folders
+ /// Names of the input files and/or folders
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
/// Filter object to be passed to the DatItem level
- private void Update(List inputFileNames, string outDir, bool inplace, Filter filter)
+ private void Update(List inputs, string outDir, bool inplace, Filter filter)
{
// Iterate over the files
- foreach (ParentablePath file in inputFileNames)
+ foreach (ParentablePath file in inputs)
{
DatFile innerDatdata = Create(Header);
Globals.Logger.User($"Processing '{Path.GetFileName(file.CurrentPath)}'");
@@ -2525,64 +2692,6 @@ namespace SabreTools.Library.DatFiles
// TODO: Implement Level split
#region Splitting
- ///
- /// Split a set of input DATs based on the given information
- ///
- /// List of inputs to be used
- /// Output directory for the split files
- /// True if files should be written to the source folders, false otherwise
- /// Type of split to perform, if any
- /// First extension to split on (Extension Split only)
- /// Second extension to split on (Extension Split only)
- /// True if short filenames should be used, false otherwise (Level Split only)
- /// True if original filenames should be used as the base for output filename, false otherwise (Level Split only)
- /// Long value representing the split point (Size Split only)
- public void DetermineSplitType(
- List inputs,
- string outDir,
- bool inplace,
- SplittingMode splittingMode,
- List exta,
- List extb,
- bool shortname,
- bool basedat,
- long radix)
- {
- // If we somehow have the "none" split type, return
- if (splittingMode == SplittingMode.None)
- return;
-
- // Get only files from the inputs
- List files = DirectoryExtensions.GetFilesOnly(inputs, appendparent: true);
-
- // Loop over the input files
- foreach (ParentablePath file in files)
- {
- // Create and fill the new DAT
- DatFile internalDat = Create(Header);
- Parse(file);
-
- // Get the output directory
- outDir = file.GetOutputPath(outDir, inplace);
-
- // Split and write the DAT
- if (splittingMode.HasFlag(SplittingMode.Extension))
- internalDat.SplitByExtension(outDir, exta, extb);
-
- if (splittingMode.HasFlag(SplittingMode.Hash))
- internalDat.SplitByHash(outDir);
-
- if (splittingMode.HasFlag(SplittingMode.Level))
- internalDat.SplitByLevel(outDir, shortname, basedat);
-
- if (splittingMode.HasFlag(SplittingMode.Size))
- internalDat.SplitBySize(outDir, radix);
-
- if (splittingMode.HasFlag(SplittingMode.Type))
- internalDat.SplitByType(outDir);
- }
- }
-
///
/// Split a DAT by input extensions
///