mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Diff, determination, enum cleanup
This commit is contained in:
@@ -67,8 +67,8 @@ in -old DAT file. Ignores those entries in -old that are not in -new.";
|
|||||||
List<string> basedats = new List<string> { olddat };
|
List<string> basedats = new List<string> { olddat };
|
||||||
|
|
||||||
// Now run the diff on the inputs
|
// Now run the diff on the inputs
|
||||||
datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */,
|
datfile.PopulateUserData(basedats, new Filter());
|
||||||
new Filter(), new List<Field>(), false /* onlySame */, false /* byGame */);
|
datfile.DiffAgainst(dats, outdat, false, new Filter(), false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace RombaSharp.Features
|
namespace RombaSharp.Features
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace RombaSharp.Features
|
namespace RombaSharp.Features
|
||||||
|
|||||||
@@ -60,8 +60,8 @@ namespace RombaSharp.Features
|
|||||||
List<string> basedats = new List<string> { olddat };
|
List<string> basedats = new List<string> { olddat };
|
||||||
|
|
||||||
// Now run the diff on the inputs
|
// Now run the diff on the inputs
|
||||||
datfile.DetermineUpdateType(dats, basedats, outdat, UpdateMode.DiffAgainst, false /* inplace */, false /* skip */,
|
datfile.PopulateUserData(basedats, new Filter());
|
||||||
new Filter(), new List<Field>(), false /* onlySame */, false /* byGame */);
|
datfile.DiffAgainst(dats, outdat, false, new Filter(), false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace RombaSharp.Features
|
namespace RombaSharp.Features
|
||||||
|
|||||||
@@ -191,158 +191,6 @@ namespace SabreTools.Library.DatFiles
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
// TODO: Move to features?
|
|
||||||
#region Determination Helpers
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Split a set of input DATs based on the given information
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">List of inputs to be used</param>
|
|
||||||
/// <param name="outDir">Output directory for the split files</param>
|
|
||||||
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
|
|
||||||
/// <param name="splittingMode">Type of split to perform, if any</param>
|
|
||||||
/// <param name="exta">First extension to split on (Extension Split only)</param>
|
|
||||||
/// <param name="extb">Second extension to split on (Extension Split only)</param>
|
|
||||||
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
|
|
||||||
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
|
|
||||||
/// <param name="radix">Long value representing the split point (Size Split only)</param>
|
|
||||||
public void DetermineSplitType(
|
|
||||||
List<string> inputs,
|
|
||||||
string outDir,
|
|
||||||
bool inplace,
|
|
||||||
SplittingMode splittingMode,
|
|
||||||
List<string> exta,
|
|
||||||
List<string> extb,
|
|
||||||
bool shortname,
|
|
||||||
bool basedat,
|
|
||||||
long radix)
|
|
||||||
{
|
|
||||||
// If we somehow have the "none" split type, return
|
|
||||||
if (splittingMode == SplittingMode.None)
|
|
||||||
return;
|
|
||||||
|
|
||||||
// Get only files from the inputs
|
|
||||||
List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(inputs, appendparent: true);
|
|
||||||
|
|
||||||
// Loop over the input files
|
|
||||||
foreach (ParentablePath file in files)
|
|
||||||
{
|
|
||||||
// Create and fill the new DAT
|
|
||||||
DatFile internalDat = Create(Header);
|
|
||||||
Parse(file);
|
|
||||||
|
|
||||||
// Get the output directory
|
|
||||||
outDir = file.GetOutputPath(outDir, inplace);
|
|
||||||
|
|
||||||
// Split and write the DAT
|
|
||||||
if (splittingMode.HasFlag(SplittingMode.Extension))
|
|
||||||
internalDat.SplitByExtension(outDir, exta, extb);
|
|
||||||
|
|
||||||
if (splittingMode.HasFlag(SplittingMode.Hash))
|
|
||||||
internalDat.SplitByHash(outDir);
|
|
||||||
|
|
||||||
if (splittingMode.HasFlag(SplittingMode.Level))
|
|
||||||
internalDat.SplitByLevel(outDir, shortname, basedat);
|
|
||||||
|
|
||||||
if (splittingMode.HasFlag(SplittingMode.Size))
|
|
||||||
internalDat.SplitBySize(outDir, radix);
|
|
||||||
|
|
||||||
if (splittingMode.HasFlag(SplittingMode.Type))
|
|
||||||
internalDat.SplitByType(outDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determine if input files should be merged, diffed, or processed invidually
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
|
||||||
/// <param name="basePaths">Names of base files and/or folders</param>
|
|
||||||
/// <param name="outDir">Optional param for output directory</param>
|
|
||||||
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
|
|
||||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
||||||
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
|
||||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param>
|
|
||||||
/// <param name="byGame">True if diffing is by Game, false if diffing is by hash [only for against]</param>
|
|
||||||
public void DetermineUpdateType(
|
|
||||||
List<string> inputPaths,
|
|
||||||
List<string> basePaths,
|
|
||||||
string outDir,
|
|
||||||
UpdateMode updateMode,
|
|
||||||
bool inplace,
|
|
||||||
bool skip,
|
|
||||||
Filter filter,
|
|
||||||
List<Field> updateFields,
|
|
||||||
bool onlySame,
|
|
||||||
bool byGame)
|
|
||||||
{
|
|
||||||
// Ensure we only have files in the inputs
|
|
||||||
List<ParentablePath> inputFileNames = DirectoryExtensions.GetFilesOnly(inputPaths, appendparent: true);
|
|
||||||
List<ParentablePath> baseFileNames = DirectoryExtensions.GetFilesOnly(basePaths);
|
|
||||||
|
|
||||||
// If we're in standard update mode, run through all of the inputs
|
|
||||||
if (updateMode == UpdateMode.None)
|
|
||||||
{
|
|
||||||
Update(inputFileNames, outDir, inplace, filter);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reverse inputs if we're in a required mode
|
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
|
||||||
inputFileNames.Reverse();
|
|
||||||
if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
|
||||||
baseFileNames.Reverse();
|
|
||||||
|
|
||||||
// If we're in merging mode
|
|
||||||
if (updateMode.HasFlag(UpdateMode.Merge))
|
|
||||||
{
|
|
||||||
// Populate the combined data and get the headers
|
|
||||||
PopulateUserData(inputFileNames, filter);
|
|
||||||
MergeNoDiff(inputFileNames, outDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have one of the standard diffing modes
|
|
||||||
else if (updateMode.HasFlag(UpdateMode.DiffDupesOnly)
|
|
||||||
|| updateMode.HasFlag(UpdateMode.DiffNoDupesOnly)
|
|
||||||
|| updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
||||||
{
|
|
||||||
// Populate the combined data
|
|
||||||
PopulateUserData(inputFileNames, filter);
|
|
||||||
DiffNoCascade(inputFileNames, outDir, updateMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have one of the cascaded diffing modes
|
|
||||||
else if (updateMode.HasFlag(UpdateMode.DiffCascade)
|
|
||||||
|| updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
|
||||||
{
|
|
||||||
// Populate the combined data and get the headers
|
|
||||||
List<DatHeader> datHeaders = PopulateUserData(inputFileNames, filter);
|
|
||||||
DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have a diff against mode
|
|
||||||
else if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
|
||||||
{
|
|
||||||
// Populate the combined data
|
|
||||||
PopulateUserData(baseFileNames, filter);
|
|
||||||
DiffAgainst(inputFileNames, outDir, inplace, filter, byGame);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have one of the base replacement modes
|
|
||||||
else if (updateMode.HasFlag(UpdateMode.BaseReplace)
|
|
||||||
|| updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
|
||||||
{
|
|
||||||
// Populate the combined data
|
|
||||||
PopulateUserData(baseFileNames, filter);
|
|
||||||
BaseReplace(inputFileNames, outDir, inplace, filter, updateFields, onlySame);
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region Converting and Updating
|
#region Converting and Updating
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -366,87 +214,6 @@ namespace SabreTools.Library.DatFiles
|
|||||||
BaseReplace(paths, outDir, inplace, filter, updateFields, onlySame);
|
BaseReplace(paths, outDir, inplace, filter, updateFields, onlySame);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output diffs against a base set represented by the current DAT
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">Names of the input files</param>
|
|
||||||
/// <param name="outDir">Optional param for output directory</param>
|
|
||||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
||||||
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
|
||||||
public void DiffAgainst(List<string> inputs, string outDir, bool inplace, Filter filter, bool useGames)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
DiffAgainst(paths, outDir, inplace, filter, useGames);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output cascading diffs
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
||||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
||||||
public void DiffCascade(
|
|
||||||
List<string> inputs,
|
|
||||||
List<DatHeader> datHeaders,
|
|
||||||
string outDir,
|
|
||||||
bool inplace,
|
|
||||||
bool skip)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
DiffCascade(paths, datHeaders, outDir, inplace, skip);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output non-cascading diffs
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
||||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
|
||||||
public void DiffNoCascade(List<string> inputs, string outDir, UpdateMode diff)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
DiffNoCascade(paths, outDir, diff);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output user defined merge
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
||||||
public void MergeNoDiff(List<string> inputs, string outDir)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
MergeNoDiff(paths, outDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Populate the user DatData object from the input files
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">Paths to DATs to parse</param>
|
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
||||||
/// <returns>List of DatData objects representing headers</returns>
|
|
||||||
public List<DatHeader> PopulateUserData(List<string> inputs, Filter filter)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
return PopulateUserData(paths, filter);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Convert, update, and filter a DAT file or set of files
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">Names of the input files and/or folders</param>
|
|
||||||
/// <param name="outDir">Optional param for output directory</param>
|
|
||||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
||||||
public void Update(List<string> inputs, string outDir, bool inplace, Filter filter)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
Update(paths, outDir, inplace, filter);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Replace item values from the base set represented by the current DAT
|
/// Replace item values from the base set represented by the current DAT
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@@ -456,7 +223,7 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
||||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
||||||
internal void BaseReplace(
|
public void BaseReplace(
|
||||||
List<ParentablePath> inputs,
|
List<ParentablePath> inputs,
|
||||||
string outDir,
|
string outDir,
|
||||||
bool inplace,
|
bool inplace,
|
||||||
@@ -952,7 +719,21 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
||||||
internal void DiffAgainst(List<ParentablePath> inputs, string outDir, bool inplace, Filter filter, bool useGames)
|
public void DiffAgainst(List<string> inputs, string outDir, bool inplace, Filter filter, bool useGames)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
DiffAgainst(paths, outDir, inplace, filter, useGames);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output diffs against a base set represented by the current DAT
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">Names of the input files</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
||||||
|
public void DiffAgainst(List<ParentablePath> inputs, string outDir, bool inplace, Filter filter, bool useGames)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use a base ordering
|
// For comparison's sake, we want to use a base ordering
|
||||||
if (useGames)
|
if (useGames)
|
||||||
@@ -1044,7 +825,26 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
internal void DiffCascade(
|
public void DiffCascade(
|
||||||
|
List<string> inputs,
|
||||||
|
List<DatHeader> datHeaders,
|
||||||
|
string outDir,
|
||||||
|
bool inplace,
|
||||||
|
bool skip)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
DiffCascade(paths, datHeaders, outDir, inplace, skip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||||
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
|
public void DiffCascade(
|
||||||
List<ParentablePath> inputs,
|
List<ParentablePath> inputs,
|
||||||
List<DatHeader> datHeaders,
|
List<DatHeader> datHeaders,
|
||||||
string outDir,
|
string outDir,
|
||||||
@@ -1127,19 +927,24 @@ namespace SabreTools.Library.DatFiles
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-cascading diffs
|
/// Output duplicate item diff
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
public void DiffDuplicates(List<string> inputs, string outDir)
|
||||||
internal void DiffNoCascade(List<ParentablePath> inputs, string outDir, UpdateMode diff)
|
|
||||||
{
|
{
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
DiffDuplicates(paths, outDir);
|
||||||
|
}
|
||||||
|
|
||||||
// Default vars for use
|
/// <summary>
|
||||||
string post = string.Empty;
|
/// Output duplicate item diff
|
||||||
DatFile outerDiffData = Create();
|
/// </summary>
|
||||||
DatFile dupeData = Create();
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void DiffDuplicates(List<ParentablePath> inputs, string outDir)
|
||||||
|
{
|
||||||
|
InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
|
||||||
|
|
||||||
// Fill in any information not in the base DAT
|
// Fill in any information not in the base DAT
|
||||||
if (string.IsNullOrWhiteSpace(Header.FileName))
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
||||||
@@ -1151,34 +956,78 @@ namespace SabreTools.Library.DatFiles
|
|||||||
if (string.IsNullOrWhiteSpace(Header.Description))
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
||||||
Header.Description = "All DATs";
|
Header.Description = "All DATs";
|
||||||
|
|
||||||
// Don't have External dupes
|
string post = " (Duplicates)";
|
||||||
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
DatFile dupeData = Create(Header);
|
||||||
{
|
|
||||||
post = " (No Duplicates)";
|
|
||||||
outerDiffData = Create(Header);
|
|
||||||
outerDiffData.Header.FileName += post;
|
|
||||||
outerDiffData.Header.Name += post;
|
|
||||||
outerDiffData.Header.Description += post;
|
|
||||||
outerDiffData.Items = new ItemDictionary();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Have External dupes
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffDupesOnly))
|
|
||||||
{
|
|
||||||
post = " (Duplicates)";
|
|
||||||
dupeData = Create(Header);
|
|
||||||
dupeData.Header.FileName += post;
|
dupeData.Header.FileName += post;
|
||||||
dupeData.Header.Name += post;
|
dupeData.Header.Name += post;
|
||||||
dupeData.Header.Description += post;
|
dupeData.Header.Description += post;
|
||||||
dupeData.Items = new ItemDictionary();
|
dupeData.Items = new ItemDictionary();
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
|
|
||||||
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
|
watch.Start("Populating duplicate DAT");
|
||||||
|
|
||||||
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||||
|
{
|
||||||
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
||||||
|
|
||||||
|
// If the rom list is empty or null, just skip it
|
||||||
|
if (items == null || items.Count == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// Loop through and add the items correctly
|
||||||
|
foreach (DatItem item in items)
|
||||||
|
{
|
||||||
|
if (item.DupeType.HasFlag(DupeType.External))
|
||||||
|
{
|
||||||
|
DatItem newrom = item.Clone() as DatItem;
|
||||||
|
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
||||||
|
|
||||||
|
dupeData.Items.Add(key, newrom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
|
|
||||||
|
// Finally, loop through and output each of the DATs
|
||||||
|
watch.Start("Outputting duplicate DAT");
|
||||||
|
dupeData.Write(outDir, overwrite: false);
|
||||||
|
watch.Stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a list of DatData objects representing individual output files
|
/// <summary>
|
||||||
List<DatFile> outDats = new List<DatFile>();
|
/// Output non-cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void DiffIndividuals(List<string> inputs, string outDir)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
DiffIndividuals(paths, outDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output non-cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void DiffIndividuals(List<ParentablePath> inputs, string outDir)
|
||||||
|
{
|
||||||
|
InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
|
||||||
|
|
||||||
|
// Fill in any information not in the base DAT
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
||||||
|
Header.FileName = "All DATs";
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
||||||
|
Header.Name = "All DATs";
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
||||||
|
Header.Description = "All DATs";
|
||||||
|
|
||||||
// Loop through each of the inputs and get or create a new DatData object
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
||||||
{
|
|
||||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||||
|
|
||||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||||
@@ -1192,13 +1041,13 @@ namespace SabreTools.Library.DatFiles
|
|||||||
outDatsArray[j] = diffData;
|
outDatsArray[j] = diffData;
|
||||||
});
|
});
|
||||||
|
|
||||||
outDats = outDatsArray.ToList();
|
// Create a list of DatData objects representing individual output files
|
||||||
}
|
List<DatFile> outDats = outDatsArray.ToList();
|
||||||
|
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
|
|
||||||
// Now, loop through the dictionary and populate the correct DATs
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
watch.Start("Populating all output DATs");
|
watch.Start("Populating all individual DATs");
|
||||||
|
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||||
{
|
{
|
||||||
@@ -1210,57 +1059,17 @@ namespace SabreTools.Library.DatFiles
|
|||||||
|
|
||||||
// Loop through and add the items correctly
|
// Loop through and add the items correctly
|
||||||
foreach (DatItem item in items)
|
foreach (DatItem item in items)
|
||||||
{
|
|
||||||
// No duplicates
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly) || diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
||||||
{
|
{
|
||||||
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
||||||
{
|
|
||||||
// Individual DATs that are output
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
||||||
outDats[item.IndexId].Items.Add(key, item);
|
outDats[item.IndexId].Items.Add(key, item);
|
||||||
|
|
||||||
// Merged no-duplicates DAT
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
||||||
{
|
|
||||||
DatItem newrom = item.Clone() as DatItem;
|
|
||||||
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
|
||||||
|
|
||||||
outerDiffData.Items.Add(key, newrom);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Duplicates only
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
||||||
{
|
|
||||||
if (item.DupeType.HasFlag(DupeType.External))
|
|
||||||
{
|
|
||||||
DatItem newrom = item.Clone() as DatItem;
|
|
||||||
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
|
||||||
|
|
||||||
dupeData.Items.Add(key, newrom);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
|
|
||||||
// Finally, loop through and output each of the DATs
|
// Finally, loop through and output each of the DATs
|
||||||
watch.Start("Outputting all created DATs");
|
watch.Start("Outputting all individual DATs");
|
||||||
|
|
||||||
// Output the difflist (a-b)+(b-a) diff
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
||||||
outerDiffData.Write(outDir, overwrite: false);
|
|
||||||
|
|
||||||
// Output the (ab) diff
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffDupesOnly))
|
|
||||||
dupeData.Write(outDir, overwrite: false);
|
|
||||||
|
|
||||||
// Output the individual (a-b) DATs
|
|
||||||
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
||||||
{
|
|
||||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||||
{
|
{
|
||||||
string path = inputs[j].GetOutputPath(outDir, false /* inplace */);
|
string path = inputs[j].GetOutputPath(outDir, false /* inplace */);
|
||||||
@@ -1268,8 +1077,77 @@ namespace SabreTools.Library.DatFiles
|
|||||||
// Try to output the file
|
// Try to output the file
|
||||||
outDats[j].Write(path, overwrite: false);
|
outDats[j].Write(path, overwrite: false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output non-duplicate item diff
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void DiffNoDuplicates(List<string> inputs, string outDir)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
DiffNoDuplicates(paths, outDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output non-duplicate item diff
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void DiffNoDuplicates(List<ParentablePath> inputs, string outDir)
|
||||||
|
{
|
||||||
|
InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
|
||||||
|
|
||||||
|
// Fill in any information not in the base DAT
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
||||||
|
Header.FileName = "All DATs";
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
||||||
|
Header.Name = "All DATs";
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
||||||
|
Header.Description = "All DATs";
|
||||||
|
|
||||||
|
string post = " (No Duplicates)";
|
||||||
|
DatFile outerDiffData = Create(Header);
|
||||||
|
outerDiffData.Header.FileName += post;
|
||||||
|
outerDiffData.Header.Name += post;
|
||||||
|
outerDiffData.Header.Description += post;
|
||||||
|
outerDiffData.Items = new ItemDictionary();
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
|
|
||||||
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
|
watch.Start("Populating no duplicate DAT");
|
||||||
|
|
||||||
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||||
|
{
|
||||||
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
||||||
|
|
||||||
|
// If the rom list is empty or null, just skip it
|
||||||
|
if (items == null || items.Count == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// Loop through and add the items correctly
|
||||||
|
foreach (DatItem item in items)
|
||||||
|
{
|
||||||
|
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
||||||
|
{
|
||||||
|
DatItem newrom = item.Clone() as DatItem;
|
||||||
|
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
||||||
|
outerDiffData.Items.Add(key, newrom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
|
|
||||||
|
// Finally, loop through and output each of the DATs
|
||||||
|
watch.Start("Outputting no duplicate DAT");
|
||||||
|
outerDiffData.Write(outDir, overwrite: false);
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1278,7 +1156,18 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
internal void MergeNoDiff(List<ParentablePath> inputs, string outDir)
|
public void MergeNoDiff(List<string> inputs, string outDir)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
MergeNoDiff(paths, outDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output user defined merge
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
public void MergeNoDiff(List<ParentablePath> inputs, string outDir)
|
||||||
{
|
{
|
||||||
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||||
if (Header.Type == "SuperDAT")
|
if (Header.Type == "SuperDAT")
|
||||||
@@ -1317,7 +1206,19 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <param name="inputs">Paths to DATs to parse</param>
|
/// <param name="inputs">Paths to DATs to parse</param>
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
/// <returns>List of DatData objects representing headers</returns>
|
/// <returns>List of DatData objects representing headers</returns>
|
||||||
internal List<DatHeader> PopulateUserData(List<ParentablePath> inputs, Filter filter)
|
public List<DatHeader> PopulateUserData(List<string> inputs, Filter filter)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
return PopulateUserData(paths, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Populate the user DatData object from the input files
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">Paths to DATs to parse</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <returns>List of DatData objects representing headers</returns>
|
||||||
|
public List<DatHeader> PopulateUserData(List<ParentablePath> inputs, Filter filter)
|
||||||
{
|
{
|
||||||
DatFile[] datFiles = new DatFile[inputs.Count];
|
DatFile[] datFiles = new DatFile[inputs.Count];
|
||||||
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
||||||
@@ -1354,7 +1255,20 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <param name="outDir">Optional param for output directory</param>
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
internal void Update(List<ParentablePath> inputs, string outDir, bool inplace, Filter filter)
|
public void Update(List<string> inputs, string outDir, bool inplace, Filter filter)
|
||||||
|
{
|
||||||
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
|
Update(paths, outDir, inplace, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Convert, update, and filter a DAT file or set of files
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">Names of the input files and/or folders</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
public void Update(List<ParentablePath> inputs, string outDir, bool inplace, Filter filter)
|
||||||
{
|
{
|
||||||
// Iterate over the files
|
// Iterate over the files
|
||||||
foreach (ParentablePath file in inputs)
|
foreach (ParentablePath file in inputs)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
|
using SabreTools.Library.DatItems;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
using Newtonsoft.Json;
|
using Newtonsoft.Json;
|
||||||
|
|
||||||
|
|||||||
98
SabreTools.Library/DatFiles/Enums.cs
Normal file
98
SabreTools.Library/DatFiles/Enums.cs
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
namespace SabreTools.Library.DatFiles
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Determines how the current dictionary is bucketed by
|
||||||
|
/// </summary>
|
||||||
|
public enum BucketedBy
|
||||||
|
{
|
||||||
|
Default = 0,
|
||||||
|
Size,
|
||||||
|
CRC,
|
||||||
|
MD5,
|
||||||
|
#if NET_FRAMEWORK
|
||||||
|
RIPEMD160,
|
||||||
|
#endif
|
||||||
|
SHA1,
|
||||||
|
SHA256,
|
||||||
|
SHA384,
|
||||||
|
SHA512,
|
||||||
|
Game,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the DAT deduplication type
|
||||||
|
/// </summary>
|
||||||
|
public enum DedupeType
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Full,
|
||||||
|
|
||||||
|
// Force only deduping with certain types
|
||||||
|
Game,
|
||||||
|
CRC,
|
||||||
|
MD5,
|
||||||
|
#if NET_FRAMEWORK
|
||||||
|
RIPEMD160,
|
||||||
|
#endif
|
||||||
|
SHA1,
|
||||||
|
SHA256,
|
||||||
|
SHA384,
|
||||||
|
SHA512,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines forcemerging tag for DAT output
|
||||||
|
/// </summary>
|
||||||
|
public enum ForceMerging
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Split,
|
||||||
|
Merged,
|
||||||
|
NonMerged,
|
||||||
|
Full,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines forcenodump tag for DAT output
|
||||||
|
/// </summary>
|
||||||
|
public enum ForceNodump
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Obsolete,
|
||||||
|
Required,
|
||||||
|
Ignore,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines forcepacking tag for DAT output
|
||||||
|
/// </summary>
|
||||||
|
public enum ForcePacking
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Zip,
|
||||||
|
Unzip,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines which files should be skipped in DFD
|
||||||
|
/// </summary>
|
||||||
|
public enum SkipFileType
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Archive,
|
||||||
|
File,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines how a DAT will be split internally
|
||||||
|
/// </summary>
|
||||||
|
public enum SplitType
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
NonMerged,
|
||||||
|
Merged,
|
||||||
|
FullNonMerged,
|
||||||
|
Split,
|
||||||
|
DeviceNonMerged
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,6 +5,7 @@ using System.Linq;
|
|||||||
using System.Net;
|
using System.Net;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.FileTypes;
|
using SabreTools.Library.FileTypes;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
using NaturalSort;
|
using NaturalSort;
|
||||||
|
|||||||
86
SabreTools.Library/DatItems/Enums.cs
Normal file
86
SabreTools.Library/DatItems/Enums.cs
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
namespace SabreTools.Library.DatItems
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// List of valid field types within a DatItem/Machine
|
||||||
|
/// </summary>
|
||||||
|
public enum Field : int
|
||||||
|
{
|
||||||
|
NULL = 0,
|
||||||
|
|
||||||
|
// Generic DatItem
|
||||||
|
ItemType,
|
||||||
|
Name,
|
||||||
|
PartName,
|
||||||
|
PartInterface,
|
||||||
|
Features,
|
||||||
|
AreaName,
|
||||||
|
AreaSize,
|
||||||
|
|
||||||
|
// Machine
|
||||||
|
MachineName,
|
||||||
|
Comment,
|
||||||
|
Description,
|
||||||
|
Year,
|
||||||
|
Manufacturer,
|
||||||
|
Publisher,
|
||||||
|
Category,
|
||||||
|
RomOf,
|
||||||
|
CloneOf,
|
||||||
|
SampleOf,
|
||||||
|
Supported,
|
||||||
|
SourceFile,
|
||||||
|
Runnable,
|
||||||
|
Board,
|
||||||
|
RebuildTo,
|
||||||
|
Devices,
|
||||||
|
SlotOptions,
|
||||||
|
Infos,
|
||||||
|
MachineType,
|
||||||
|
|
||||||
|
// BiosSet
|
||||||
|
Default,
|
||||||
|
BiosDescription,
|
||||||
|
|
||||||
|
// Disk
|
||||||
|
MD5,
|
||||||
|
#if NET_FRAMEWORK
|
||||||
|
RIPEMD160,
|
||||||
|
#endif
|
||||||
|
SHA1,
|
||||||
|
SHA256,
|
||||||
|
SHA384,
|
||||||
|
SHA512,
|
||||||
|
Merge,
|
||||||
|
Region,
|
||||||
|
Index,
|
||||||
|
Writable,
|
||||||
|
Optional,
|
||||||
|
Status,
|
||||||
|
|
||||||
|
// Release
|
||||||
|
Language,
|
||||||
|
Date,
|
||||||
|
|
||||||
|
// Rom
|
||||||
|
Bios,
|
||||||
|
Size,
|
||||||
|
CRC,
|
||||||
|
Offset,
|
||||||
|
Inverted,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determine what type of file an item is
|
||||||
|
/// </summary>
|
||||||
|
public enum ItemType
|
||||||
|
{
|
||||||
|
Rom = 0,
|
||||||
|
Disk = 1,
|
||||||
|
Sample = 2,
|
||||||
|
Release = 3,
|
||||||
|
BiosSet = 4,
|
||||||
|
Archive = 5,
|
||||||
|
|
||||||
|
Blank = 99, // This is not a real type, only used internally
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -129,210 +129,6 @@
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region DatFile related
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines how the current dictionary is bucketed by
|
|
||||||
/// </summary>
|
|
||||||
public enum BucketedBy
|
|
||||||
{
|
|
||||||
Default = 0,
|
|
||||||
Size,
|
|
||||||
CRC,
|
|
||||||
MD5,
|
|
||||||
#if NET_FRAMEWORK
|
|
||||||
RIPEMD160,
|
|
||||||
#endif
|
|
||||||
SHA1,
|
|
||||||
SHA256,
|
|
||||||
SHA384,
|
|
||||||
SHA512,
|
|
||||||
Game,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines the DAT deduplication type
|
|
||||||
/// </summary>
|
|
||||||
public enum DedupeType
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Full,
|
|
||||||
|
|
||||||
// Force only deduping with certain types
|
|
||||||
Game,
|
|
||||||
CRC,
|
|
||||||
MD5,
|
|
||||||
#if NET_FRAMEWORK
|
|
||||||
RIPEMD160,
|
|
||||||
#endif
|
|
||||||
SHA1,
|
|
||||||
SHA256,
|
|
||||||
SHA384,
|
|
||||||
SHA512,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines forcemerging tag for DAT output
|
|
||||||
/// </summary>
|
|
||||||
public enum ForceMerging
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Split,
|
|
||||||
Merged,
|
|
||||||
NonMerged,
|
|
||||||
Full,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines forcenodump tag for DAT output
|
|
||||||
/// </summary>
|
|
||||||
public enum ForceNodump
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Obsolete,
|
|
||||||
Required,
|
|
||||||
Ignore,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines forcepacking tag for DAT output
|
|
||||||
/// </summary>
|
|
||||||
public enum ForcePacking
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Zip,
|
|
||||||
Unzip,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines which files should be skipped in DFD
|
|
||||||
/// </summary>
|
|
||||||
public enum SkipFileType
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Archive,
|
|
||||||
File,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines how a DAT will be split internally
|
|
||||||
/// </summary>
|
|
||||||
public enum SplitType
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
NonMerged,
|
|
||||||
Merged,
|
|
||||||
FullNonMerged,
|
|
||||||
Split,
|
|
||||||
DeviceNonMerged
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region DatItem related
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// List of valid field types within a DatItem/Machine
|
|
||||||
/// </summary>
|
|
||||||
public enum Field : int
|
|
||||||
{
|
|
||||||
NULL = 0,
|
|
||||||
|
|
||||||
// Generic DatItem
|
|
||||||
ItemType,
|
|
||||||
Name,
|
|
||||||
PartName,
|
|
||||||
PartInterface,
|
|
||||||
Features,
|
|
||||||
AreaName,
|
|
||||||
AreaSize,
|
|
||||||
|
|
||||||
// Machine
|
|
||||||
MachineName,
|
|
||||||
Comment,
|
|
||||||
Description,
|
|
||||||
Year,
|
|
||||||
Manufacturer,
|
|
||||||
Publisher,
|
|
||||||
Category,
|
|
||||||
RomOf,
|
|
||||||
CloneOf,
|
|
||||||
SampleOf,
|
|
||||||
Supported,
|
|
||||||
SourceFile,
|
|
||||||
Runnable,
|
|
||||||
Board,
|
|
||||||
RebuildTo,
|
|
||||||
Devices,
|
|
||||||
SlotOptions,
|
|
||||||
Infos,
|
|
||||||
MachineType,
|
|
||||||
|
|
||||||
// BiosSet
|
|
||||||
Default,
|
|
||||||
BiosDescription,
|
|
||||||
|
|
||||||
// Disk
|
|
||||||
MD5,
|
|
||||||
#if NET_FRAMEWORK
|
|
||||||
RIPEMD160,
|
|
||||||
#endif
|
|
||||||
SHA1,
|
|
||||||
SHA256,
|
|
||||||
SHA384,
|
|
||||||
SHA512,
|
|
||||||
Merge,
|
|
||||||
Region,
|
|
||||||
Index,
|
|
||||||
Writable,
|
|
||||||
Optional,
|
|
||||||
Status,
|
|
||||||
|
|
||||||
// Release
|
|
||||||
Language,
|
|
||||||
Date,
|
|
||||||
|
|
||||||
// Rom
|
|
||||||
Bios,
|
|
||||||
Size,
|
|
||||||
CRC,
|
|
||||||
Offset,
|
|
||||||
Inverted,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determine what type of file an item is
|
|
||||||
/// </summary>
|
|
||||||
public enum ItemType
|
|
||||||
{
|
|
||||||
Rom = 0,
|
|
||||||
Disk = 1,
|
|
||||||
Sample = 2,
|
|
||||||
Release = 3,
|
|
||||||
BiosSet = 4,
|
|
||||||
Archive = 5,
|
|
||||||
|
|
||||||
Blank = 99, // This is not a real type, only used internally
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region Help related
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines the feature type to check for
|
|
||||||
/// </summary>
|
|
||||||
public enum FeatureType
|
|
||||||
{
|
|
||||||
Flag = 0,
|
|
||||||
String,
|
|
||||||
Int32,
|
|
||||||
Int64,
|
|
||||||
List,
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region Logging related
|
#region Logging related
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -376,42 +172,4 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region Skippers and Mappers
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines the header skip operation
|
|
||||||
/// </summary>
|
|
||||||
public enum HeaderSkipOperation
|
|
||||||
{
|
|
||||||
None = 0,
|
|
||||||
Bitswap,
|
|
||||||
Byteswap,
|
|
||||||
Wordswap,
|
|
||||||
WordByteswap,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines the type of test to be done
|
|
||||||
/// </summary>
|
|
||||||
public enum HeaderSkipTest
|
|
||||||
{
|
|
||||||
Data = 0,
|
|
||||||
Or,
|
|
||||||
Xor,
|
|
||||||
And,
|
|
||||||
File,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines the operator to be used in a file test
|
|
||||||
/// </summary>
|
|
||||||
public enum HeaderSkipTestFileOperator
|
|
||||||
{
|
|
||||||
Equal = 0,
|
|
||||||
Less,
|
|
||||||
Greater,
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,50 +223,6 @@ namespace SabreTools.Library.Data
|
|||||||
All = Int32.MaxValue,
|
All = Int32.MaxValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines how the DAT will be split on output
|
|
||||||
/// </summary>
|
|
||||||
[Flags]
|
|
||||||
public enum SplittingMode
|
|
||||||
{
|
|
||||||
None = 0x00,
|
|
||||||
|
|
||||||
Extension = 1 << 0,
|
|
||||||
Hash = 1 << 2,
|
|
||||||
Level = 1 << 3,
|
|
||||||
Type = 1 << 4,
|
|
||||||
Size = 1 << 5,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Determines special update modes
|
|
||||||
/// </summary>
|
|
||||||
[Flags]
|
|
||||||
public enum UpdateMode
|
|
||||||
{
|
|
||||||
None = 0x00,
|
|
||||||
|
|
||||||
// Standard diffs
|
|
||||||
DiffDupesOnly = 1 << 0,
|
|
||||||
DiffNoDupesOnly = 1 << 1,
|
|
||||||
DiffIndividualsOnly = 1 << 2,
|
|
||||||
|
|
||||||
// Cascaded diffs
|
|
||||||
DiffCascade = 1 << 3,
|
|
||||||
DiffReverseCascade = 1 << 4,
|
|
||||||
|
|
||||||
// Base diffs
|
|
||||||
DiffAgainst = 1 << 5,
|
|
||||||
|
|
||||||
// Special update modes
|
|
||||||
Merge = 1 << 6,
|
|
||||||
BaseReplace = 1 << 7,
|
|
||||||
ReverseBaseReplace = 1 << 8,
|
|
||||||
|
|
||||||
// Combinations
|
|
||||||
AllDiffs = DiffDupesOnly | DiffNoDupesOnly | DiffIndividualsOnly,
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region DatItem related
|
#region DatItem related
|
||||||
|
|||||||
14
SabreTools.Library/Help/Enums.cs
Normal file
14
SabreTools.Library/Help/Enums.cs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
namespace SabreTools.Library.Help
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the feature type to check for
|
||||||
|
/// </summary>
|
||||||
|
public enum FeatureType
|
||||||
|
{
|
||||||
|
Flag = 0,
|
||||||
|
String,
|
||||||
|
Int32,
|
||||||
|
Int64,
|
||||||
|
List,
|
||||||
|
}
|
||||||
|
}
|
||||||
36
SabreTools.Library/Skippers/Enums.cs
Normal file
36
SabreTools.Library/Skippers/Enums.cs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
namespace SabreTools.Library.Skippers
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the header skip operation
|
||||||
|
/// </summary>
|
||||||
|
public enum HeaderSkipOperation
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Bitswap,
|
||||||
|
Byteswap,
|
||||||
|
Wordswap,
|
||||||
|
WordByteswap,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the type of test to be done
|
||||||
|
/// </summary>
|
||||||
|
public enum HeaderSkipTest
|
||||||
|
{
|
||||||
|
Data = 0,
|
||||||
|
Or,
|
||||||
|
Xor,
|
||||||
|
And,
|
||||||
|
File,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the operator to be used in a file test
|
||||||
|
/// </summary>
|
||||||
|
public enum HeaderSkipTestFileOperator
|
||||||
|
{
|
||||||
|
Equal = 0,
|
||||||
|
Less,
|
||||||
|
Greater,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,6 @@ using System.Globalization;
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Xml;
|
using System.Xml;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
|
|
||||||
namespace SabreTools.Library.Skippers
|
namespace SabreTools.Library.Skippers
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
|
|
||||||
namespace SabreTools.Library.Skippers
|
namespace SabreTools.Library.Skippers
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
|
using SabreTools.Library.DatFiles;
|
||||||
|
using SabreTools.Library.DatItems;
|
||||||
|
|
||||||
namespace SabreTools.Library.Tools
|
namespace SabreTools.Library.Tools
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ using System.IO;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
using SabreTools.Library.Tools;
|
|
||||||
|
|
||||||
namespace SabreTools.Library.Tools
|
namespace SabreTools.Library.Tools
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
using System.Collections.Generic;
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
|
using SabreTools.Library.DatItems;
|
||||||
using SabreTools.Library.Filtering;
|
using SabreTools.Library.Filtering;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
@@ -10,6 +12,54 @@ namespace SabreTools.Features
|
|||||||
{
|
{
|
||||||
internal class BaseFeature : TopLevel
|
internal class BaseFeature : TopLevel
|
||||||
{
|
{
|
||||||
|
#region Enums
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines how the DAT will be split on output
|
||||||
|
/// </summary>
|
||||||
|
[Flags]
|
||||||
|
public enum SplittingMode
|
||||||
|
{
|
||||||
|
None = 0x00,
|
||||||
|
|
||||||
|
Extension = 1 << 0,
|
||||||
|
Hash = 1 << 2,
|
||||||
|
Level = 1 << 3,
|
||||||
|
Type = 1 << 4,
|
||||||
|
Size = 1 << 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determines special update modes
|
||||||
|
/// </summary>
|
||||||
|
[Flags]
|
||||||
|
public enum UpdateMode
|
||||||
|
{
|
||||||
|
None = 0x00,
|
||||||
|
|
||||||
|
// Standard diffs
|
||||||
|
DiffDupesOnly = 1 << 0,
|
||||||
|
DiffNoDupesOnly = 1 << 1,
|
||||||
|
DiffIndividualsOnly = 1 << 2,
|
||||||
|
|
||||||
|
// Cascaded diffs
|
||||||
|
DiffCascade = 1 << 3,
|
||||||
|
DiffReverseCascade = 1 << 4,
|
||||||
|
|
||||||
|
// Base diffs
|
||||||
|
DiffAgainst = 1 << 5,
|
||||||
|
|
||||||
|
// Special update modes
|
||||||
|
Merge = 1 << 6,
|
||||||
|
BaseReplace = 1 << 7,
|
||||||
|
ReverseBaseReplace = 1 << 8,
|
||||||
|
|
||||||
|
// Combinations
|
||||||
|
AllDiffs = DiffDupesOnly | DiffNoDupesOnly | DiffIndividualsOnly,
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
#region Features
|
#region Features
|
||||||
|
|
||||||
#region Flag features
|
#region Flag features
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
using SabreTools.Library.Skippers;
|
using SabreTools.Library.Skippers;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
using SabreTools.Library.Skippers;
|
using SabreTools.Library.Skippers;
|
||||||
using SabreTools.Library.Tools;
|
using SabreTools.Library.Tools;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
using SabreTools.Library.Tools;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
{
|
{
|
||||||
@@ -38,18 +38,55 @@ namespace SabreTools.Features
|
|||||||
public override void ProcessFeatures(Dictionary<string, Feature> features)
|
public override void ProcessFeatures(Dictionary<string, Feature> features)
|
||||||
{
|
{
|
||||||
base.ProcessFeatures(features);
|
base.ProcessFeatures(features);
|
||||||
|
SplittingMode splittingMode = GetSplittingMode(features);
|
||||||
|
|
||||||
DatFile datfile = DatFile.Create(Header.DatFormat);
|
// If we somehow have the "none" split type, return
|
||||||
datfile.DetermineSplitType(
|
if (splittingMode == SplittingMode.None)
|
||||||
Inputs,
|
return;
|
||||||
|
|
||||||
|
// Get only files from the inputs
|
||||||
|
List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
|
||||||
|
|
||||||
|
// Loop over the input files
|
||||||
|
foreach (ParentablePath file in files)
|
||||||
|
{
|
||||||
|
// Create and fill the new DAT
|
||||||
|
DatFile internalDat = DatFile.Create(Header);
|
||||||
|
internalDat.Parse(file);
|
||||||
|
|
||||||
|
// Get the output directory
|
||||||
|
OutputDir = file.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
|
|
||||||
|
// Extension splitting
|
||||||
|
if (splittingMode.HasFlag(SplittingMode.Extension))
|
||||||
|
{
|
||||||
|
internalDat.SplitByExtension(
|
||||||
OutputDir,
|
OutputDir,
|
||||||
GetBoolean(features, InplaceValue),
|
|
||||||
GetSplittingMode(features),
|
|
||||||
GetList(features, ExtAListValue),
|
GetList(features, ExtAListValue),
|
||||||
GetList(features, ExtBListValue),
|
GetList(features, ExtBListValue));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash splitting
|
||||||
|
if (splittingMode.HasFlag(SplittingMode.Hash))
|
||||||
|
internalDat.SplitByHash(OutputDir);
|
||||||
|
|
||||||
|
// Level splitting
|
||||||
|
if (splittingMode.HasFlag(SplittingMode.Level))
|
||||||
|
{
|
||||||
|
internalDat.SplitByLevel(
|
||||||
|
OutputDir,
|
||||||
GetBoolean(features, ShortValue),
|
GetBoolean(features, ShortValue),
|
||||||
GetBoolean(features, BaseValue),
|
GetBoolean(features, BaseValue));
|
||||||
GetInt64(features, RadixInt64Value));
|
}
|
||||||
|
|
||||||
|
// Size splitting
|
||||||
|
if (splittingMode.HasFlag(SplittingMode.Size))
|
||||||
|
internalDat.SplitBySize(OutputDir, GetInt64(features, RadixInt64Value));
|
||||||
|
|
||||||
|
// Type splitting
|
||||||
|
if (splittingMode.HasFlag(SplittingMode.Type))
|
||||||
|
internalDat.SplitByType(OutputDir);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
|
using SabreTools.Library.DatItems;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
using SabreTools.Library.Tools;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
{
|
{
|
||||||
@@ -142,20 +143,93 @@ namespace SabreTools.Features
|
|||||||
if (updateFields == null || updateFields.Count == 0)
|
if (updateFields == null || updateFields.Count == 0)
|
||||||
updateFields = new List<Field>() { Field.Name };
|
updateFields = new List<Field>() { Field.Name };
|
||||||
|
|
||||||
// Populate the DatData object
|
// Ensure we only have files in the inputs
|
||||||
|
List<ParentablePath> inputFileNames = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
|
||||||
|
List<ParentablePath> baseFileNames = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));
|
||||||
|
|
||||||
|
// If we're in standard update mode, run through all of the inputs
|
||||||
|
if (updateMode == UpdateMode.None)
|
||||||
|
{
|
||||||
|
DatFile datFile = DatFile.Create(Header);
|
||||||
|
datFile.Update(
|
||||||
|
inputFileNames,
|
||||||
|
OutputDir,
|
||||||
|
GetBoolean(features, InplaceValue),
|
||||||
|
Filter);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reverse inputs if we're in a required mode
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
||||||
|
{
|
||||||
|
updateMode |= UpdateMode.DiffCascade;
|
||||||
|
inputFileNames.Reverse();
|
||||||
|
}
|
||||||
|
if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
||||||
|
{
|
||||||
|
updateMode |= UpdateMode.BaseReplace;
|
||||||
|
baseFileNames.Reverse();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a DAT to capture inputs
|
||||||
DatFile userInputDat = DatFile.Create(Header);
|
DatFile userInputDat = DatFile.Create(Header);
|
||||||
|
|
||||||
userInputDat.DetermineUpdateType(
|
// Populate using the correct set
|
||||||
Inputs,
|
List<DatHeader> datHeaders;
|
||||||
GetList(features, BaseDatListValue),
|
if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||||
|
datHeaders = userInputDat.PopulateUserData(baseFileNames, Filter);
|
||||||
|
else
|
||||||
|
datHeaders = userInputDat.PopulateUserData(inputFileNames, Filter);
|
||||||
|
|
||||||
|
// Merge all input files and write
|
||||||
|
if (updateMode.HasFlag(UpdateMode.Merge))
|
||||||
|
userInputDat.MergeNoDiff(inputFileNames, OutputDir);
|
||||||
|
|
||||||
|
// Output only DatItems that are duplicated across inputs
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
|
||||||
|
userInputDat.DiffDuplicates(inputFileNames, OutputDir);
|
||||||
|
|
||||||
|
// Output only DatItems that are not duplicated across inputs
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
|
||||||
|
userInputDat.DiffNoDuplicates(inputFileNames, OutputDir);
|
||||||
|
|
||||||
|
// Output only DatItems that are unique to each input
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
||||||
|
userInputDat.DiffIndividuals(inputFileNames, OutputDir);
|
||||||
|
|
||||||
|
// Output cascaded diffs
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffCascade))
|
||||||
|
{
|
||||||
|
userInputDat.DiffCascade(
|
||||||
|
inputFileNames,
|
||||||
|
datHeaders,
|
||||||
|
OutputDir,
|
||||||
|
GetBoolean(features, InplaceValue),
|
||||||
|
GetBoolean(features, SkipFirstOutputValue));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output differences against a base DAT
|
||||||
|
if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
||||||
|
{
|
||||||
|
userInputDat.DiffAgainst(
|
||||||
|
inputFileNames,
|
||||||
|
OutputDir,
|
||||||
|
GetBoolean(features, InplaceValue),
|
||||||
|
Filter,
|
||||||
|
GetBoolean(Features, ByGameValue));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output DATs after replacing fields from a base DAT
|
||||||
|
if (updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||||
|
{
|
||||||
|
userInputDat.BaseReplace(
|
||||||
|
inputFileNames,
|
||||||
OutputDir,
|
OutputDir,
|
||||||
updateMode,
|
|
||||||
GetBoolean(features, InplaceValue),
|
GetBoolean(features, InplaceValue),
|
||||||
GetBoolean(features, SkipFirstOutputValue),
|
|
||||||
Filter,
|
Filter,
|
||||||
updateFields,
|
updateFields,
|
||||||
GetBoolean(features, OnlySameValue),
|
GetBoolean(features, OnlySameValue));
|
||||||
GetBoolean(Features, ByGameValue));
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Library.Data;
|
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.Filtering;
|
using SabreTools.Library.Filtering;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
|
|||||||
Reference in New Issue
Block a user