mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Simplify Cascade
This commit is contained in:
@@ -295,6 +295,31 @@ namespace SabreTools.Library.DatFiles
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Fill a DatFile with all items with a particular source index ID
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="indexDat">DatFile to add found items to</param>
|
||||||
|
/// <param name="index">Source index ID to retrieve items for</param>
|
||||||
|
/// <returns>DatFile containing all items with the source index ID/returns>
|
||||||
|
public void FillWithSourceIndex(DatFile indexDat, int index)
|
||||||
|
{
|
||||||
|
// Loop through and add the items for this index to the output
|
||||||
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||||
|
{
|
||||||
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
||||||
|
|
||||||
|
// If the rom list is empty or null, just skip it
|
||||||
|
if (items == null || items.Count == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
foreach (DatItem item in items)
|
||||||
|
{
|
||||||
|
if (item.Source.Index == index)
|
||||||
|
indexDat.Items.Add(key, item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output diffs against a base set represented by the current DAT
|
/// Output diffs against a base set represented by the current DAT
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@@ -369,110 +394,33 @@ namespace SabreTools.Library.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output cascading diffs
|
/// Output cascading diffs
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
/// <returns>List of DatFiles representing the individually indexed items</returns>
|
||||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
public List<DatFile> DiffCascade(List<DatHeader> datHeaders)
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
||||||
public void DiffCascade(
|
|
||||||
List<string> inputs,
|
|
||||||
List<DatHeader> datHeaders,
|
|
||||||
string outDir,
|
|
||||||
bool inplace,
|
|
||||||
bool skip)
|
|
||||||
{
|
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
||||||
DiffCascade(paths, datHeaders, outDir, inplace, skip);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output cascading diffs
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
||||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
||||||
public void DiffCascade(
|
|
||||||
List<ParentablePath> inputs,
|
|
||||||
List<DatHeader> datHeaders,
|
|
||||||
string outDir,
|
|
||||||
bool inplace,
|
|
||||||
bool skip)
|
|
||||||
{
|
{
|
||||||
// Create a list of DatData objects representing output files
|
// Create a list of DatData objects representing output files
|
||||||
List<DatFile> outDats = new List<DatFile>();
|
List<DatFile> outDats = new List<DatFile>();
|
||||||
|
|
||||||
|
// Ensure the current DatFile is sorted optimally
|
||||||
|
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||||
|
|
||||||
// Loop through each of the inputs and get or create a new DatData object
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
|
||||||
|
|
||||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
// Create the DatFiles from the set of headers
|
||||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
|
||||||
|
Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
|
||||||
{
|
{
|
||||||
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
DatFile diffData = Create(datHeaders[j]);
|
||||||
DatFile diffData;
|
|
||||||
|
|
||||||
// If we're in inplace mode or the output directory is set, take the appropriate DatData object already stored
|
|
||||||
if (inplace || outDir != Environment.CurrentDirectory)
|
|
||||||
{
|
|
||||||
diffData = Create(datHeaders[j]);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
diffData = Create(Header);
|
|
||||||
diffData.Header.FileName += innerpost;
|
|
||||||
diffData.Header.Name += innerpost;
|
|
||||||
diffData.Header.Description += innerpost;
|
|
||||||
}
|
|
||||||
|
|
||||||
diffData.Items = new ItemDictionary();
|
diffData.Items = new ItemDictionary();
|
||||||
|
FillWithSourceIndex(diffData, j);
|
||||||
outDatsArray[j] = diffData;
|
outDatsArray[j] = diffData;
|
||||||
});
|
});
|
||||||
|
|
||||||
outDats = outDatsArray.ToList();
|
outDats = outDatsArray.ToList();
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
|
|
||||||
// Then, ensure that the internal dat can be bucketed in the best possible way
|
return outDats;
|
||||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
|
||||||
|
|
||||||
// Now, loop through the dictionary and populate the correct DATs
|
|
||||||
watch.Start("Populating all output DATs");
|
|
||||||
|
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
||||||
{
|
|
||||||
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
|
||||||
if (items == null || items.Count == 0)
|
|
||||||
return;
|
|
||||||
|
|
||||||
foreach (DatItem item in items)
|
|
||||||
{
|
|
||||||
// There's odd cases where there are items with System ID < 0. Skip them for now
|
|
||||||
if (item.Source.Index < 0)
|
|
||||||
{
|
|
||||||
Globals.Logger.Warning($"Item found with a <0 SystemID: {item.Name}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
outDats[item.Source.Index].Items.Add(key, item);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
watch.Stop();
|
|
||||||
|
|
||||||
// Finally, loop through and output each of the DATs
|
|
||||||
watch.Start("Outputting all created DATs");
|
|
||||||
|
|
||||||
Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
|
|
||||||
{
|
|
||||||
string path = inputs[j].GetOutputPath(outDir, inplace);
|
|
||||||
|
|
||||||
// Try to output the file
|
|
||||||
outDats[j].Write(path, overwrite: inplace);
|
|
||||||
});
|
|
||||||
|
|
||||||
watch.Stop();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using System.Threading.Tasks;
|
||||||
using SabreTools.Library.Data;
|
using SabreTools.Library.Data;
|
||||||
using SabreTools.Library.DatFiles;
|
using SabreTools.Library.DatFiles;
|
||||||
using SabreTools.Library.DatItems;
|
using SabreTools.Library.DatItems;
|
||||||
using SabreTools.Library.Help;
|
using SabreTools.Library.Help;
|
||||||
using SabreTools.Library.IO;
|
using SabreTools.Library.IO;
|
||||||
|
using SabreTools.Library.Tools;
|
||||||
|
|
||||||
namespace SabreTools.Features
|
namespace SabreTools.Features
|
||||||
{
|
{
|
||||||
@@ -155,7 +156,7 @@ namespace SabreTools.Features
|
|||||||
if (updateMode == UpdateMode.None)
|
if (updateMode == UpdateMode.None)
|
||||||
{
|
{
|
||||||
// Loop through each input and update
|
// Loop through each input and update
|
||||||
foreach (ParentablePath inputPath in inputPaths)
|
Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
|
||||||
{
|
{
|
||||||
// Create a new base DatFile
|
// Create a new base DatFile
|
||||||
DatFile datFile = DatFile.Create(Header);
|
DatFile datFile = DatFile.Create(Header);
|
||||||
@@ -172,7 +173,7 @@ namespace SabreTools.Features
|
|||||||
|
|
||||||
// Try to output the file, overwriting only if it's not in the current directory
|
// Try to output the file, overwriting only if it's not in the current directory
|
||||||
datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue));
|
datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||||
}
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -218,19 +219,44 @@ namespace SabreTools.Features
|
|||||||
// Output cascaded diffs
|
// Output cascaded diffs
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffCascade))
|
if (updateMode.HasFlag(UpdateMode.DiffCascade))
|
||||||
{
|
{
|
||||||
userInputDat.DiffCascade(
|
// Preprocess the DatHeaders
|
||||||
inputPaths,
|
Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
|
||||||
datHeaders,
|
{
|
||||||
OutputDir,
|
// If we're outputting to the runtime folder, rename
|
||||||
GetBoolean(features, InplaceValue),
|
if (!GetBoolean(features, InplaceValue) && OutputDir == Environment.CurrentDirectory)
|
||||||
GetBoolean(features, SkipFirstOutputValue));
|
{
|
||||||
|
string innerpost = $" ({j} - {inputPaths[j].GetNormalizedFileName(true)} Only)";
|
||||||
|
|
||||||
|
datHeaders[j] = userInputDat.Header;
|
||||||
|
datHeaders[j].FileName += innerpost;
|
||||||
|
datHeaders[j].Name += innerpost;
|
||||||
|
datHeaders[j].Description += innerpost;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get all of the output DatFiles
|
||||||
|
List<DatFile> datFiles = userInputDat.DiffCascade(datHeaders);
|
||||||
|
|
||||||
|
// Loop through and output the new DatFiles
|
||||||
|
InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");
|
||||||
|
|
||||||
|
int startIndex = GetBoolean(features, SkipFirstOutputValue) ? 1 : 0;
|
||||||
|
Parallel.For(startIndex, inputPaths.Count, Globals.ParallelOptions, j =>
|
||||||
|
{
|
||||||
|
string path = inputPaths[j].GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
|
|
||||||
|
// Try to output the file
|
||||||
|
datFiles[j].Write(path, overwrite: GetBoolean(features, InplaceValue));
|
||||||
|
});
|
||||||
|
|
||||||
|
watch.Stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output differences against a base DAT
|
// Output differences against a base DAT
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
||||||
{
|
{
|
||||||
// Loop through each input and diff against the base
|
// Loop through each input and diff against the base
|
||||||
foreach (ParentablePath inputPath in inputPaths)
|
Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
|
||||||
{
|
{
|
||||||
// Parse, extras, and filter the path to a new DatFile
|
// Parse, extras, and filter the path to a new DatFile
|
||||||
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
||||||
@@ -244,14 +270,14 @@ namespace SabreTools.Features
|
|||||||
// Finally output the diffed DatFile
|
// Finally output the diffed DatFile
|
||||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output DATs after replacing fields from a base DatFile
|
// Output DATs after replacing fields from a base DatFile
|
||||||
if (updateMode.HasFlag(UpdateMode.BaseReplace))
|
if (updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||||
{
|
{
|
||||||
// Loop through each input and apply the base DatFile
|
// Loop through each input and apply the base DatFile
|
||||||
foreach (ParentablePath inputPath in inputPaths)
|
Parallel.ForEach(inputPaths, Globals.ParallelOptions, inputPath =>
|
||||||
{
|
{
|
||||||
// Parse, extras, and filter the path to a new DatFile
|
// Parse, extras, and filter the path to a new DatFile
|
||||||
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
||||||
@@ -265,7 +291,7 @@ namespace SabreTools.Features
|
|||||||
// Finally output the replaced DatFile
|
// Finally output the replaced DatFile
|
||||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge all input files and write
|
// Merge all input files and write
|
||||||
|
|||||||
Reference in New Issue
Block a user