mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[DatTools] Filtering updates
Instead of filtering as a post-process to a DAT, have it filter when the DATs are being parsed into the internal structure. This ends up with less memory used off the bat and it should make things quicker. This has not shown any issues in testing, so if this needs to be reverted, it will. Other small changes are also included in this. Some DFD issues might be lingering from this.
This commit is contained in:
@@ -129,7 +129,7 @@ namespace SabreTools
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// Get the file data to be split
|
||||
OutputFormat outputFormat = DatTools.GetOutputFormat(filename);
|
||||
OutputFormat outputFormat = DatTools.GetOutputFormat(filename, _logger);
|
||||
Dat datdata = new Dat();
|
||||
datdata = DatTools.Parse(filename, 0, 0, datdata, _logger, true);
|
||||
|
||||
@@ -337,7 +337,7 @@ namespace SabreTools
|
||||
datdata = DatTools.Parse(filename, 0, 0, datdata, _logger);
|
||||
|
||||
// Set all of the appropriate outputs for each of the subsets
|
||||
OutputFormat outputFormat = DatTools.GetOutputFormat(filename);
|
||||
OutputFormat outputFormat = DatTools.GetOutputFormat(filename, _logger);
|
||||
Dat datdataA = new Dat
|
||||
{
|
||||
FileName = datdata.FileName + " (" + string.Join(",", _extA) + ")",
|
||||
|
||||
Reference in New Issue
Block a user