Move splitting to new class

This commit is contained in:
Matt Nadareski
2020-12-10 11:38:30 -08:00
parent 56c70b89db
commit c41f2cbed2
3 changed files with 79 additions and 71 deletions

View File

@@ -386,31 +386,6 @@ namespace SabreTools.DatFiles
return outerDiffData; return outerDiffData;
} }
/// <summary>
/// Fill a DatFile with all items with a particular ItemType
/// </summary>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
public void FillWithItemType(DatFile indexDat, ItemType itemType)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(Items[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
return;
foreach (DatItem item in items)
{
if (item.ItemType == itemType)
indexDat.Items.Add(key, item);
}
});
}
/// <summary> /// <summary>
/// Fill a DatFile with all items with a particular source index ID /// Fill a DatFile with all items with a particular source index ID
/// </summary> /// </summary>

View File

@@ -13,19 +13,21 @@ using NaturalSort;
// This file represents all methods related to splitting a DatFile into multiple // This file represents all methods related to splitting a DatFile into multiple
namespace SabreTools.DatFiles namespace SabreTools.DatFiles
{ {
// TODO: Re-evaluate if these should be made static instead of instanced
// TODO: Implement Level split // TODO: Implement Level split
public abstract partial class DatFile public partial class DatTool
{ {
/// <summary> /// <summary>
/// Split a DAT by input extensions /// Split a DAT by input extensions
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="extA">List of extensions to split on (first DAT)</param> /// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param> /// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>Extension Set A and Extension Set B DatFiles</returns> /// <returns>Extension Set A and Extension Set B DatFiles</returns>
public (DatFile extADat, DatFile extBDat) SplitByExtension(List<string> extA, List<string> extB) public (DatFile extADat, DatFile extBDat) SplitByExtension(DatFile datFile, List<string> extA, List<string> extB)
{ {
// If roms is empty, return false // If roms is empty, return false
if (Items.TotalCount == 0) if (datFile.Items.TotalCount == 0)
return (null, null); return (null, null);
// Make sure all of the extensions don't have a dot at the beginning // Make sure all of the extensions don't have a dot at the beginning
@@ -36,20 +38,20 @@ namespace SabreTools.DatFiles
string newExtBString = string.Join(",", newExtB); string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets // Set all of the appropriate outputs for each of the subsets
DatFile extADat = Create(Header.CloneStandard()); DatFile extADat = DatFile.Create(datFile.Header.CloneStandard());
extADat.Header.FileName += $" ({newExtAString})"; extADat.Header.FileName += $" ({newExtAString})";
extADat.Header.Name += $" ({newExtAString})"; extADat.Header.Name += $" ({newExtAString})";
extADat.Header.Description += $" ({newExtAString})"; extADat.Header.Description += $" ({newExtAString})";
DatFile extBDat = Create(Header.CloneStandard()); DatFile extBDat = DatFile.Create(datFile.Header.CloneStandard());
extBDat.Header.FileName += $" ({newExtBString})"; extBDat.Header.FileName += $" ({newExtBString})";
extBDat.Header.Name += $" ({newExtBString})"; extBDat.Header.Name += $" ({newExtBString})";
extBDat.Header.Description += $" ({newExtBString})"; extBDat.Header.Description += $" ({newExtBString})";
// Now separate the roms accordingly // Now separate the roms accordingly
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{ {
List<DatItem> items = Items[key]; List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items) foreach (DatItem item in items)
{ {
if (newExtA.Contains(PathExtensions.GetNormalizedExtension(item.GetName() ?? string.Empty))) if (newExtA.Contains(PathExtensions.GetNormalizedExtension(item.GetName() ?? string.Empty)))
@@ -75,8 +77,9 @@ namespace SabreTools.DatFiles
/// <summary> /// <summary>
/// Split a DAT by best available hashes /// Split a DAT by best available hashes
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of Field to DatFile mappings</returns> /// <returns>Dictionary of Field to DatFile mappings</returns>
public Dictionary<Field, DatFile> SplitByHash() public Dictionary<Field, DatFile> SplitByHash(DatFile datFile)
{ {
// Create each of the respective output DATs // Create each of the respective output DATs
logger.User("Creating and populating new DATs"); logger.User("Creating and populating new DATs");
@@ -85,57 +88,57 @@ namespace SabreTools.DatFiles
Dictionary<Field, DatFile> fieldDats = new Dictionary<Field, DatFile>(); Dictionary<Field, DatFile> fieldDats = new Dictionary<Field, DatFile>();
// TODO: Can this be made into a loop? // TODO: Can this be made into a loop?
fieldDats[Field.DatItem_Status] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_Status] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_Status].Header.FileName += " (Nodump)"; fieldDats[Field.DatItem_Status].Header.FileName += " (Nodump)";
fieldDats[Field.DatItem_Status].Header.Name += " (Nodump)"; fieldDats[Field.DatItem_Status].Header.Name += " (Nodump)";
fieldDats[Field.DatItem_Status].Header.Description += " (Nodump)"; fieldDats[Field.DatItem_Status].Header.Description += " (Nodump)";
fieldDats[Field.DatItem_SHA512] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_SHA512] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_SHA512].Header.FileName += " (SHA-512)"; fieldDats[Field.DatItem_SHA512].Header.FileName += " (SHA-512)";
fieldDats[Field.DatItem_SHA512].Header.Name += " (SHA-512)"; fieldDats[Field.DatItem_SHA512].Header.Name += " (SHA-512)";
fieldDats[Field.DatItem_SHA512].Header.Description += " (SHA-512)"; fieldDats[Field.DatItem_SHA512].Header.Description += " (SHA-512)";
fieldDats[Field.DatItem_SHA384] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_SHA384] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_SHA384].Header.FileName += " (SHA-384)"; fieldDats[Field.DatItem_SHA384].Header.FileName += " (SHA-384)";
fieldDats[Field.DatItem_SHA384].Header.Name += " (SHA-384)"; fieldDats[Field.DatItem_SHA384].Header.Name += " (SHA-384)";
fieldDats[Field.DatItem_SHA384].Header.Description += " (SHA-384)"; fieldDats[Field.DatItem_SHA384].Header.Description += " (SHA-384)";
fieldDats[Field.DatItem_SHA256] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_SHA256] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_SHA256].Header.FileName += " (SHA-256)"; fieldDats[Field.DatItem_SHA256].Header.FileName += " (SHA-256)";
fieldDats[Field.DatItem_SHA256].Header.Name += " (SHA-256)"; fieldDats[Field.DatItem_SHA256].Header.Name += " (SHA-256)";
fieldDats[Field.DatItem_SHA256].Header.Description += " (SHA-256)"; fieldDats[Field.DatItem_SHA256].Header.Description += " (SHA-256)";
fieldDats[Field.DatItem_SHA1] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_SHA1] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_SHA1].Header.FileName += " (SHA-1)"; fieldDats[Field.DatItem_SHA1].Header.FileName += " (SHA-1)";
fieldDats[Field.DatItem_SHA1].Header.Name += " (SHA-1)"; fieldDats[Field.DatItem_SHA1].Header.Name += " (SHA-1)";
fieldDats[Field.DatItem_SHA1].Header.Description += " (SHA-1)"; fieldDats[Field.DatItem_SHA1].Header.Description += " (SHA-1)";
#if NET_FRAMEWORK #if NET_FRAMEWORK
fieldDats[Field.DatItem_RIPEMD160] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_RIPEMD160] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_RIPEMD160].Header.FileName += " (RIPEMD160)"; fieldDats[Field.DatItem_RIPEMD160].Header.FileName += " (RIPEMD160)";
fieldDats[Field.DatItem_RIPEMD160].Header.Name += " (RIPEMD160)"; fieldDats[Field.DatItem_RIPEMD160].Header.Name += " (RIPEMD160)";
fieldDats[Field.DatItem_RIPEMD160].Header.Description += " (RIPEMD160)"; fieldDats[Field.DatItem_RIPEMD160].Header.Description += " (RIPEMD160)";
#endif #endif
fieldDats[Field.DatItem_MD5] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_MD5] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_MD5].Header.FileName += " (MD5)"; fieldDats[Field.DatItem_MD5].Header.FileName += " (MD5)";
fieldDats[Field.DatItem_MD5].Header.Name += " (MD5)"; fieldDats[Field.DatItem_MD5].Header.Name += " (MD5)";
fieldDats[Field.DatItem_MD5].Header.Description += " (MD5)"; fieldDats[Field.DatItem_MD5].Header.Description += " (MD5)";
fieldDats[Field.DatItem_CRC] = Create(Header.CloneStandard()); fieldDats[Field.DatItem_CRC] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.DatItem_CRC].Header.FileName += " (CRC)"; fieldDats[Field.DatItem_CRC].Header.FileName += " (CRC)";
fieldDats[Field.DatItem_CRC].Header.Name += " (CRC)"; fieldDats[Field.DatItem_CRC].Header.Name += " (CRC)";
fieldDats[Field.DatItem_CRC].Header.Description += " (CRC)"; fieldDats[Field.DatItem_CRC].Header.Description += " (CRC)";
fieldDats[Field.NULL] = Create(Header.CloneStandard()); fieldDats[Field.NULL] = DatFile.Create(datFile.Header.CloneStandard());
fieldDats[Field.NULL].Header.FileName += " (Other)"; fieldDats[Field.NULL].Header.FileName += " (Other)";
fieldDats[Field.NULL].Header.Name += " (Other)"; fieldDats[Field.NULL].Header.Name += " (Other)";
fieldDats[Field.NULL].Header.Description += " (Other)"; fieldDats[Field.NULL].Header.Description += " (Other)";
// Now populate each of the DAT objects in turn // Now populate each of the DAT objects in turn
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{ {
List<DatItem> items = Items[key]; List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items) foreach (DatItem item in items)
{ {
// If the file is not a Disk, Media, or Rom, continue // If the file is not a Disk, Media, or Rom, continue
@@ -211,21 +214,22 @@ namespace SabreTools.DatFiles
/// <summary> /// <summary>
/// Split a SuperDAT by lowest available directory level /// Split a SuperDAT by lowest available directory level
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="outDir">Name of the directory to write the DATs out to</param> /// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="shortname">True if short names should be used, false otherwise</param> /// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param> /// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns> /// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(string outDir, bool shortname, bool basedat) public bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
{ {
// First, bucket by games so that we can do the right thing // First, bucket by games so that we can do the right thing
Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true); datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true);
// Create a temporary DAT to add things to // Create a temporary DAT to add things to
DatFile tempDat = Create(Header); DatFile tempDat = DatFile.Create(datFile.Header);
tempDat.Header.Name = null; tempDat.Header.Name = null;
// Sort the input keys // Sort the input keys
List<string> keys = Items.Keys.ToList(); List<string> keys = datFile.Items.Keys.ToList();
keys.Sort(SplitByLevelSort); keys.Sort(SplitByLevelSort);
// Then, we loop over the games // Then, we loop over the games
@@ -235,12 +239,12 @@ namespace SabreTools.DatFiles
if (tempDat.Header.Name != null && tempDat.Header.Name != Path.GetDirectoryName(key)) if (tempDat.Header.Name != null && tempDat.Header.Name != Path.GetDirectoryName(key))
{ {
// Reset the DAT for the next items // Reset the DAT for the next items
tempDat = Create(Header); tempDat = DatFile.Create(datFile.Header);
tempDat.Header.Name = null; tempDat.Header.Name = null;
} }
// Clean the input list and set all games to be pathless // Clean the input list and set all games to be pathless
List<DatItem> items = Items[key]; List<DatItem> items = datFile.Items[key];
items.ForEach(item => item.Machine.Name = Path.GetFileName(item.Machine.Name)); items.ForEach(item => item.Machine.Name = Path.GetFileName(item.Machine.Name));
items.ForEach(item => item.Machine.Description = Path.GetFileName(item.Machine.Description)); items.ForEach(item => item.Machine.Description = Path.GetFileName(item.Machine.Description));
@@ -275,11 +279,12 @@ namespace SabreTools.DatFiles
/// <summary> /// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT /// Helper function for SplitByLevel to clean and write out a DAT
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="newDatFile">DAT to clean and write out</param> /// <param name="newDatFile">DAT to clean and write out</param>
/// <param name="outDir">Directory to write out to</param> /// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param> /// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param> /// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
private void SplitByLevelHelper(DatFile newDatFile, string outDir, bool shortname, bool restore) private void SplitByLevelHelper(DatFile datFile, DatFile newDatFile, string outDir, bool shortname, bool restore)
{ {
// Get the name from the DAT to use separately // Get the name from the DAT to use separately
string name = newDatFile.Header.Name; string name = newDatFile.Header.Name;
@@ -287,46 +292,46 @@ namespace SabreTools.DatFiles
// Now set the new output values // Now set the new output values
newDatFile.Header.FileName = WebUtility.HtmlDecode(string.IsNullOrWhiteSpace(name) newDatFile.Header.FileName = WebUtility.HtmlDecode(string.IsNullOrWhiteSpace(name)
? Header.FileName ? datFile.Header.FileName
: (shortname : (shortname
? Path.GetFileName(name) ? Path.GetFileName(name)
: expName : expName
) )
); );
newDatFile.Header.FileName = (restore ? $"{Header.FileName} ({newDatFile.Header.FileName})" : newDatFile.Header.FileName); newDatFile.Header.FileName = restore ? $"{datFile.Header.FileName} ({newDatFile.Header.FileName})" : newDatFile.Header.FileName;
newDatFile.Header.Name = $"{Header.Name} ({expName})"; newDatFile.Header.Name = $"{datFile.Header.Name} ({expName})";
newDatFile.Header.Description = (string.IsNullOrWhiteSpace(Header.Description) ? newDatFile.Header.Name : $"{Header.Description} ({expName})"); newDatFile.Header.Description = string.IsNullOrWhiteSpace(datFile.Header.Description) ? newDatFile.Header.Name : $"{datFile.Header.Description} ({expName})";
newDatFile.Header.Type = null; newDatFile.Header.Type = null;
// Write out the temporary DAT to the proper directory // Write out the temporary DAT to the proper directory
DatTool dt = new DatTool(); Write(newDatFile, outDir);
dt.Write(newDatFile, outDir);
} }
/// <summary> /// <summary>
/// Split a DAT by size of Rom /// Split a DAT by size of Rom
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="radix">Long value representing the split point</param> /// <param name="radix">Long value representing the split point</param>
/// <returns>Less Than and Greater Than DatFiles</returns> /// <returns>Less Than and Greater Than DatFiles</returns>
public (DatFile lessThan, DatFile greaterThan) SplitBySize(long radix) public (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
{ {
// Create each of the respective output DATs // Create each of the respective output DATs
logger.User("Creating and populating new DATs"); logger.User("Creating and populating new DATs");
DatFile lessThan = Create(Header.CloneStandard()); DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
lessThan.Header.FileName += $" (less than {radix})"; lessThan.Header.FileName += $" (less than {radix})";
lessThan.Header.Name += $" (less than {radix})"; lessThan.Header.Name += $" (less than {radix})";
lessThan.Header.Description += $" (less than {radix})"; lessThan.Header.Description += $" (less than {radix})";
DatFile greaterThan = Create(Header.CloneStandard()); DatFile greaterThan = DatFile.Create(datFile.Header.CloneStandard());
greaterThan.Header.FileName += $" (equal-greater than {radix})"; greaterThan.Header.FileName += $" (equal-greater than {radix})";
greaterThan.Header.Name += $" (equal-greater than {radix})"; greaterThan.Header.Name += $" (equal-greater than {radix})";
greaterThan.Header.Description += $" (equal-greater than {radix})"; greaterThan.Header.Description += $" (equal-greater than {radix})";
// Now populate each of the DAT objects in turn // Now populate each of the DAT objects in turn
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key => Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{ {
List<DatItem> items = Items[key]; List<DatItem> items = datFile.Items[key];
foreach (DatItem item in items) foreach (DatItem item in items)
{ {
// If the file is not a Rom, it automatically goes in the "lesser" dat // If the file is not a Rom, it automatically goes in the "lesser" dat
@@ -354,8 +359,9 @@ namespace SabreTools.DatFiles
/// <summary> /// <summary>
/// Split a DAT by type of DatItem /// Split a DAT by type of DatItem
/// </summary> /// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of ItemType to DatFile mappings</returns> /// <returns>Dictionary of ItemType to DatFile mappings</returns>
public Dictionary<ItemType, DatFile> SplitByType() public Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
{ {
// Create each of the respective output DATs // Create each of the respective output DATs
logger.User("Creating and populating new DATs"); logger.User("Creating and populating new DATs");
@@ -375,7 +381,7 @@ namespace SabreTools.DatFiles
// Setup all of the DatFiles // Setup all of the DatFiles
foreach (ItemType itemType in outputTypes) foreach (ItemType itemType in outputTypes)
{ {
typeDats[itemType] = Create(Header.CloneStandard()); typeDats[itemType] = DatFile.Create(datFile.Header.CloneStandard());
typeDats[itemType].Header.FileName += $" ({itemType})"; typeDats[itemType].Header.FileName += $" ({itemType})";
typeDats[itemType].Header.Name += $" ({itemType})"; typeDats[itemType].Header.Name += $" ({itemType})";
typeDats[itemType].Header.Description += $" ({itemType})"; typeDats[itemType].Header.Description += $" ({itemType})";
@@ -384,10 +390,36 @@ namespace SabreTools.DatFiles
// Now populate each of the DAT objects in turn // Now populate each of the DAT objects in turn
Parallel.ForEach(outputTypes, Globals.ParallelOptions, itemType => Parallel.ForEach(outputTypes, Globals.ParallelOptions, itemType =>
{ {
FillWithItemType(typeDats[itemType], itemType); FillWithItemType(datFile, typeDats[itemType], itemType);
}); });
return typeDats; return typeDats;
} }
/// <summary>
/// Fill a DatFile with all items with a particular ItemType
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
private void FillWithItemType(DatFile datFile, DatFile indexDat, ItemType itemType)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
return;
foreach (DatItem item in items)
{
if (item.ItemType == itemType)
indexDat.Items.Add(key, item);
}
});
}
} }
} }

View File

@@ -50,7 +50,7 @@ namespace SabreTools.Features
// Get only files from the inputs // Get only files from the inputs
List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true); List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
// Get the DatTool for parsing // Get the DatTool for operations
DatTool dt = new DatTool(); DatTool dt = new DatTool();
// Loop over the input files // Loop over the input files
@@ -66,7 +66,7 @@ namespace SabreTools.Features
// Extension splitting // Extension splitting
if (splittingMode.HasFlag(SplittingMode.Extension)) if (splittingMode.HasFlag(SplittingMode.Extension))
{ {
(DatFile extADat, DatFile extBDat) = internalDat.SplitByExtension(GetList(features, ExtAListValue), GetList(features, ExtBListValue)); (DatFile extADat, DatFile extBDat) = dt.SplitByExtension(internalDat, GetList(features, ExtAListValue), GetList(features, ExtBListValue));
InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting extension-split DATs");
@@ -80,7 +80,7 @@ namespace SabreTools.Features
// Hash splitting // Hash splitting
if (splittingMode.HasFlag(SplittingMode.Hash)) if (splittingMode.HasFlag(SplittingMode.Hash))
{ {
Dictionary<Field, DatFile> typeDats = internalDat.SplitByHash(); Dictionary<Field, DatFile> typeDats = dt.SplitByHash(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting hash-split DATs");
@@ -97,7 +97,8 @@ namespace SabreTools.Features
if (splittingMode.HasFlag(SplittingMode.Level)) if (splittingMode.HasFlag(SplittingMode.Level))
{ {
logger.Warning("This feature is not implemented: level-split"); logger.Warning("This feature is not implemented: level-split");
internalDat.SplitByLevel( dt.SplitByLevel(
internalDat,
OutputDir, OutputDir,
GetBoolean(features, ShortValue), GetBoolean(features, ShortValue),
GetBoolean(features, BaseValue)); GetBoolean(features, BaseValue));
@@ -106,7 +107,7 @@ namespace SabreTools.Features
// Size splitting // Size splitting
if (splittingMode.HasFlag(SplittingMode.Size)) if (splittingMode.HasFlag(SplittingMode.Size))
{ {
(DatFile lessThan, DatFile greaterThan) = internalDat.SplitBySize(GetInt64(features, RadixInt64Value)); (DatFile lessThan, DatFile greaterThan) = dt.SplitBySize(internalDat, GetInt64(features, RadixInt64Value));
InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting size-split DATs");
@@ -120,7 +121,7 @@ namespace SabreTools.Features
// Type splitting // Type splitting
if (splittingMode.HasFlag(SplittingMode.Type)) if (splittingMode.HasFlag(SplittingMode.Type))
{ {
Dictionary<ItemType, DatFile> typeDats = internalDat.SplitByType(); Dictionary<ItemType, DatFile> typeDats = dt.SplitByType(internalDat);
InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs"); InternalStopwatch watch = new InternalStopwatch("Outputting ItemType DATs");