mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Move updating to new class
This commit is contained in:
@@ -63,7 +63,7 @@ in -old DAT file. Ignores those entries in -old that are not in -new.";
|
|||||||
|
|
||||||
// Diff against the new datfile
|
// Diff against the new datfile
|
||||||
DatFile intDat = DatTool.CreateAndParse(newdat);
|
DatFile intDat = DatTool.CreateAndParse(newdat);
|
||||||
datfile.DiffAgainst(intDat, false);
|
DatTool.DiffAgainst(datfile, intDat, false);
|
||||||
DatTool.Write(intDat, outdat);
|
DatTool.Write(intDat, outdat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ namespace RombaSharp.Features
|
|||||||
|
|
||||||
// Diff against the new datfile
|
// Diff against the new datfile
|
||||||
DatFile intDat = DatTool.CreateAndParse(newdat);
|
DatFile intDat = DatTool.CreateAndParse(newdat);
|
||||||
datfile.DiffAgainst(intDat, false);
|
DatTool.DiffAgainst(datfile, intDat, false);
|
||||||
DatTool.Write(intDat, outdat);
|
DatTool.Write(intDat, outdat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,15 +11,16 @@ using SabreTools.Logging;
|
|||||||
// This file represents all methods related to converting and updating DatFiles
|
// This file represents all methods related to converting and updating DatFiles
|
||||||
namespace SabreTools.DatFiles
|
namespace SabreTools.DatFiles
|
||||||
{
|
{
|
||||||
public abstract partial class DatFile
|
public partial class DatTool
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Replace item values from the base set represented by the current DAT
|
/// Replace item values from the base set represented by the current DAT
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="intDat">DatFile to replace the values in</param>
|
/// <param name="intDat">DatFile to replace the values in</param>
|
||||||
/// <param name="updateFields">List of Fields representing what should be updated</param>
|
/// <param name="updateFields">List of Fields representing what should be updated</param>
|
||||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
||||||
public void BaseReplace(DatFile intDat, List<Field> updateFields, bool onlySame)
|
public static void BaseReplace(DatFile datFile, DatFile intDat, List<Field> updateFields, bool onlySame)
|
||||||
{
|
{
|
||||||
logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
|
logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
|
||||||
|
|
||||||
@@ -27,7 +28,7 @@ namespace SabreTools.DatFiles
|
|||||||
if (updateFields.Intersect(DatItem.DatItemFields).Any())
|
if (updateFields.Intersect(DatItem.DatItemFields).Any())
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use CRC as the base bucketing
|
// For comparison's sake, we want to use CRC as the base bucketing
|
||||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
||||||
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||||
|
|
||||||
// Then we do a hashwise comparison against the base DAT
|
// Then we do a hashwise comparison against the base DAT
|
||||||
@@ -37,7 +38,7 @@ namespace SabreTools.DatFiles
|
|||||||
List<DatItem> newDatItems = new List<DatItem>();
|
List<DatItem> newDatItems = new List<DatItem>();
|
||||||
foreach (DatItem datItem in datItems)
|
foreach (DatItem datItem in datItems)
|
||||||
{
|
{
|
||||||
List<DatItem> dupes = Items.GetDuplicates(datItem, sorted: true);
|
List<DatItem> dupes = datFile.Items.GetDuplicates(datItem, sorted: true);
|
||||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||||
|
|
||||||
// Replace fields from the first duplicate, if we have one
|
// Replace fields from the first duplicate, if we have one
|
||||||
@@ -57,7 +58,7 @@ namespace SabreTools.DatFiles
|
|||||||
if (updateFields.Intersect(DatItem.MachineFields).Any())
|
if (updateFields.Intersect(DatItem.MachineFields).Any())
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use Machine Name as the base bucketing
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
||||||
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
||||||
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||||
|
|
||||||
// Then we do a namewise comparison against the base DAT
|
// Then we do a namewise comparison against the base DAT
|
||||||
@@ -68,8 +69,8 @@ namespace SabreTools.DatFiles
|
|||||||
foreach (DatItem datItem in datItems)
|
foreach (DatItem datItem in datItems)
|
||||||
{
|
{
|
||||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||||
if (Items.ContainsKey(key) && Items[key].Count() > 0)
|
if (datFile.Items.ContainsKey(key) && datFile.Items[key].Count() > 0)
|
||||||
newDatItem.Machine.ReplaceFields(Items[key][0].Machine, updateFields, onlySame);
|
newDatItem.Machine.ReplaceFields(datFile.Items[key][0].Machine, updateFields, onlySame);
|
||||||
|
|
||||||
newDatItems.Add(newDatItem);
|
newDatItems.Add(newDatItem);
|
||||||
}
|
}
|
||||||
@@ -84,15 +85,16 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output diffs against a base set represented by the current DAT
|
/// Output diffs against a base set represented by the current DAT
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="intDat">DatFile to replace the values in</param>
|
/// <param name="intDat">DatFile to replace the values in</param>
|
||||||
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
||||||
public void DiffAgainst(DatFile intDat, bool useGames)
|
public static void DiffAgainst(DatFile datFile, DatFile intDat, bool useGames)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use a base ordering
|
// For comparison's sake, we want to use a base ordering
|
||||||
if (useGames)
|
if (useGames)
|
||||||
Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
datFile.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||||
else
|
else
|
||||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||||
|
|
||||||
logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
|
logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
|
||||||
|
|
||||||
@@ -110,11 +112,11 @@ namespace SabreTools.DatFiles
|
|||||||
if (useGames)
|
if (useGames)
|
||||||
{
|
{
|
||||||
// If the base DAT doesn't contain the key, keep it
|
// If the base DAT doesn't contain the key, keep it
|
||||||
if (!Items.ContainsKey(key))
|
if (!datFile.Items.ContainsKey(key))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// If the number of items is different, then keep it
|
// If the number of items is different, then keep it
|
||||||
if (Items[key].Count != intDat.Items[key].Count)
|
if (datFile.Items[key].Count != intDat.Items[key].Count)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Otherwise, compare by name and hash the remaining files
|
// Otherwise, compare by name and hash the remaining files
|
||||||
@@ -122,7 +124,7 @@ namespace SabreTools.DatFiles
|
|||||||
foreach (DatItem item in intDat.Items[key])
|
foreach (DatItem item in intDat.Items[key])
|
||||||
{
|
{
|
||||||
// TODO: Make this granular to name as well
|
// TODO: Make this granular to name as well
|
||||||
if (!Items[key].Contains(item))
|
if (!datFile.Items[key].Contains(item))
|
||||||
{
|
{
|
||||||
exactMatch = false;
|
exactMatch = false;
|
||||||
break;
|
break;
|
||||||
@@ -141,7 +143,7 @@ namespace SabreTools.DatFiles
|
|||||||
List<DatItem> keepDatItems = new List<DatItem>();
|
List<DatItem> keepDatItems = new List<DatItem>();
|
||||||
foreach (DatItem datItem in datItems)
|
foreach (DatItem datItem in datItems)
|
||||||
{
|
{
|
||||||
if (!Items.HasDuplicates(datItem, true))
|
if (!datFile.Items.HasDuplicates(datItem, true))
|
||||||
keepDatItems.Add(datItem);
|
keepDatItems.Add(datItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,15 +157,16 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output cascading diffs
|
/// Output cascading diffs
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
/// <returns>List of DatFiles representing the individually indexed items</returns>
|
/// <returns>List of DatFiles representing the individually indexed items</returns>
|
||||||
public List<DatFile> DiffCascade(List<DatHeader> datHeaders)
|
public static List<DatFile> DiffCascade(DatFile datFile, List<DatHeader> datHeaders)
|
||||||
{
|
{
|
||||||
// Create a list of DatData objects representing output files
|
// Create a list of DatData objects representing output files
|
||||||
List<DatFile> outDats = new List<DatFile>();
|
List<DatFile> outDats = new List<DatFile>();
|
||||||
|
|
||||||
// Ensure the current DatFile is sorted optimally
|
// Ensure the current DatFile is sorted optimally
|
||||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
datFile.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||||
|
|
||||||
// Loop through each of the inputs and get or create a new DatData object
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
|
InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
|
||||||
@@ -172,9 +175,9 @@ namespace SabreTools.DatFiles
|
|||||||
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
|
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
|
||||||
Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
|
Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
|
||||||
{
|
{
|
||||||
DatFile diffData = Create(datHeaders[j]);
|
DatFile diffData = DatFile.Create(datHeaders[j]);
|
||||||
diffData.Items = new ItemDictionary();
|
diffData.Items = new ItemDictionary();
|
||||||
FillWithSourceIndex(diffData, j);
|
FillWithSourceIndex(datFile, diffData, j);
|
||||||
outDatsArray[j] = diffData;
|
outDatsArray[j] = diffData;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -187,33 +190,35 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output duplicate item diff
|
/// Output duplicate item diff
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public DatFile DiffDuplicates(List<string> inputs)
|
public static DatFile DiffDuplicates(DatFile datFile, List<string> inputs)
|
||||||
{
|
{
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
return DiffDuplicates(paths);
|
return DiffDuplicates(datFile, paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output duplicate item diff
|
/// Output duplicate item diff
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public DatFile DiffDuplicates(List<ParentablePath> inputs)
|
public static DatFile DiffDuplicates(DatFile datFile, List<ParentablePath> inputs)
|
||||||
{
|
{
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
|
InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
|
||||||
|
|
||||||
// Fill in any information not in the base DAT
|
// Fill in any information not in the base DAT
|
||||||
if (string.IsNullOrWhiteSpace(Header.FileName))
|
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
|
||||||
Header.FileName = "All DATs";
|
datFile.Header.FileName = "All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Name))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Name))
|
||||||
Header.Name = "All DATs";
|
datFile.Header.Name = "datFile.All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Description))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Description))
|
||||||
Header.Description = "All DATs";
|
datFile.Header.Description = "datFile.All DATs";
|
||||||
|
|
||||||
string post = " (Duplicates)";
|
string post = " (Duplicates)";
|
||||||
DatFile dupeData = Create(Header);
|
DatFile dupeData = DatFile.Create(datFile.Header);
|
||||||
dupeData.Header.FileName += post;
|
dupeData.Header.FileName += post;
|
||||||
dupeData.Header.Name += post;
|
dupeData.Header.Name += post;
|
||||||
dupeData.Header.Description += post;
|
dupeData.Header.Description += post;
|
||||||
@@ -224,9 +229,9 @@ namespace SabreTools.DatFiles
|
|||||||
// Now, loop through the dictionary and populate the correct DATs
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
watch.Start("Populating duplicate DAT");
|
watch.Start("Populating duplicate DAT");
|
||||||
|
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
|
||||||
{
|
{
|
||||||
List<DatItem> items = DatItem.Merge(Items[key]);
|
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
// If the rom list is empty or null, just skip it
|
||||||
if (items == null || items.Count == 0)
|
if (items == null || items.Count == 0)
|
||||||
@@ -253,30 +258,32 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-cascading diffs
|
/// Output non-cascading diffs
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public List<DatFile> DiffIndividuals(List<string> inputs)
|
public static List<DatFile> DiffIndividuals(DatFile datFile, List<string> inputs)
|
||||||
{
|
{
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
return DiffIndividuals(paths);
|
return DiffIndividuals(datFile, paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-cascading diffs
|
/// Output non-cascading diffs
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public List<DatFile> DiffIndividuals(List<ParentablePath> inputs)
|
public static List<DatFile> DiffIndividuals(DatFile datFile, List<ParentablePath> inputs)
|
||||||
{
|
{
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
|
InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
|
||||||
|
|
||||||
// Fill in any information not in the base DAT
|
// Fill in any information not in the base DAT
|
||||||
if (string.IsNullOrWhiteSpace(Header.FileName))
|
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
|
||||||
Header.FileName = "All DATs";
|
datFile.Header.FileName = "All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Name))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Name))
|
||||||
Header.Name = "All DATs";
|
datFile.Header.Name = "All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Description))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Description))
|
||||||
Header.Description = "All DATs";
|
datFile.Header.Description = "All DATs";
|
||||||
|
|
||||||
// Loop through each of the inputs and get or create a new DatData object
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||||
@@ -284,7 +291,7 @@ namespace SabreTools.DatFiles
|
|||||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||||
{
|
{
|
||||||
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
||||||
DatFile diffData = Create(Header);
|
DatFile diffData = DatFile.Create(datFile.Header);
|
||||||
diffData.Header.FileName += innerpost;
|
diffData.Header.FileName += innerpost;
|
||||||
diffData.Header.Name += innerpost;
|
diffData.Header.Name += innerpost;
|
||||||
diffData.Header.Description += innerpost;
|
diffData.Header.Description += innerpost;
|
||||||
@@ -300,9 +307,9 @@ namespace SabreTools.DatFiles
|
|||||||
// Now, loop through the dictionary and populate the correct DATs
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
watch.Start("Populating all individual DATs");
|
watch.Start("Populating all individual DATs");
|
||||||
|
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
|
||||||
{
|
{
|
||||||
List<DatItem> items = DatItem.Merge(Items[key]);
|
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
// If the rom list is empty or null, just skip it
|
||||||
if (items == null || items.Count == 0)
|
if (items == null || items.Count == 0)
|
||||||
@@ -324,33 +331,35 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-duplicate item diff
|
/// Output non-duplicate item diff
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public DatFile DiffNoDuplicates(List<string> inputs)
|
public static DatFile DiffNoDuplicates(DatFile datFile, List<string> inputs)
|
||||||
{
|
{
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
return DiffNoDuplicates(paths);
|
return DiffNoDuplicates(datFile, paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-duplicate item diff
|
/// Output non-duplicate item diff
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
public DatFile DiffNoDuplicates(List<ParentablePath> inputs)
|
public static DatFile DiffNoDuplicates(DatFile datFile, List<ParentablePath> inputs)
|
||||||
{
|
{
|
||||||
InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
|
InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
|
||||||
|
|
||||||
// Fill in any information not in the base DAT
|
// Fill in any information not in the base DAT
|
||||||
if (string.IsNullOrWhiteSpace(Header.FileName))
|
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
|
||||||
Header.FileName = "All DATs";
|
datFile.Header.FileName = "All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Name))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Name))
|
||||||
Header.Name = "All DATs";
|
datFile.Header.Name = "All DATs";
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(Header.Description))
|
if (string.IsNullOrWhiteSpace(datFile.Header.Description))
|
||||||
Header.Description = "All DATs";
|
datFile.Header.Description = "All DATs";
|
||||||
|
|
||||||
string post = " (No Duplicates)";
|
string post = " (No Duplicates)";
|
||||||
DatFile outerDiffData = Create(Header);
|
DatFile outerDiffData = DatFile.Create(datFile.Header);
|
||||||
outerDiffData.Header.FileName += post;
|
outerDiffData.Header.FileName += post;
|
||||||
outerDiffData.Header.Name += post;
|
outerDiffData.Header.Name += post;
|
||||||
outerDiffData.Header.Description += post;
|
outerDiffData.Header.Description += post;
|
||||||
@@ -361,9 +370,9 @@ namespace SabreTools.DatFiles
|
|||||||
// Now, loop through the dictionary and populate the correct DATs
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
watch.Start("Populating no duplicate DAT");
|
watch.Start("Populating no duplicate DAT");
|
||||||
|
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
|
||||||
{
|
{
|
||||||
List<DatItem> items = DatItem.Merge(Items[key]);
|
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
// If the rom list is empty or null, just skip it
|
||||||
if (items == null || items.Count == 0)
|
if (items == null || items.Count == 0)
|
||||||
@@ -389,15 +398,16 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Fill a DatFile with all items with a particular source index ID
|
/// Fill a DatFile with all items with a particular source index ID
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="indexDat">DatFile to add found items to</param>
|
/// <param name="indexDat">DatFile to add found items to</param>
|
||||||
/// <param name="index">Source index ID to retrieve items for</param>
|
/// <param name="index">Source index ID to retrieve items for</param>
|
||||||
/// <returns>DatFile containing all items with the source index ID/returns>
|
/// <returns>DatFile containing all items with the source index ID/returns>
|
||||||
public void FillWithSourceIndex(DatFile indexDat, int index)
|
public static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
|
||||||
{
|
{
|
||||||
// Loop through and add the items for this index to the output
|
// Loop through and add the items for this index to the output
|
||||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
|
||||||
{
|
{
|
||||||
List<DatItem> items = DatItem.Merge(Items[key]);
|
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
// If the rom list is empty or null, just skip it
|
||||||
if (items == null || items.Count == 0)
|
if (items == null || items.Count == 0)
|
||||||
@@ -414,20 +424,22 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Populate from multiple paths while returning the invividual headers
|
/// Populate from multiple paths while returning the invividual headers
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">Paths to DATs to parse</param>
|
/// <param name="inputs">Paths to DATs to parse</param>
|
||||||
/// <returns>List of DatHeader objects representing headers</returns>
|
/// <returns>List of DatHeader objects representing headers</returns>
|
||||||
public List<DatHeader> PopulateUserData(List<string> inputs)
|
public static List<DatHeader> PopulateUserData(DatFile datFile, List<string> inputs)
|
||||||
{
|
{
|
||||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||||
return PopulateUserData(paths);
|
return PopulateUserData(datFile, paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Populate from multiple paths while returning the invividual headers
|
/// Populate from multiple paths while returning the invividual headers
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="datFile">Current DatFile object to use for updating</param>
|
||||||
/// <param name="inputs">Paths to DATs to parse</param>
|
/// <param name="inputs">Paths to DATs to parse</param>
|
||||||
/// <returns>List of DatHeader objects representing headers</returns>
|
/// <returns>List of DatHeader objects representing headers</returns>
|
||||||
public List<DatHeader> PopulateUserData(List<ParentablePath> inputs)
|
public static List<DatHeader> PopulateUserData(DatFile datFile, List<ParentablePath> inputs)
|
||||||
{
|
{
|
||||||
DatFile[] datFiles = new DatFile[inputs.Count];
|
DatFile[] datFiles = new DatFile[inputs.Count];
|
||||||
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
||||||
@@ -437,8 +449,8 @@ namespace SabreTools.DatFiles
|
|||||||
{
|
{
|
||||||
var input = inputs[i];
|
var input = inputs[i];
|
||||||
logger.User($"Adding DAT: {input.CurrentPath}");
|
logger.User($"Adding DAT: {input.CurrentPath}");
|
||||||
datFiles[i] = Create(Header.CloneFiltering());
|
datFiles[i] = DatFile.Create(datFile.Header.CloneFiltering());
|
||||||
DatTool.ParseInto(datFiles[i], input, i, keep: true);
|
ParseInto(datFiles[i], input, i, keep: true);
|
||||||
});
|
});
|
||||||
|
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
@@ -446,7 +458,7 @@ namespace SabreTools.DatFiles
|
|||||||
watch.Start("Populating internal DAT");
|
watch.Start("Populating internal DAT");
|
||||||
for (int i = 0; i < inputs.Count; i++)
|
for (int i = 0; i < inputs.Count; i++)
|
||||||
{
|
{
|
||||||
AddFromExisting(datFiles[i], true);
|
datFile.AddFromExisting(datFiles[i], true);
|
||||||
}
|
}
|
||||||
|
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
@@ -200,9 +200,9 @@ namespace SabreTools.Features
|
|||||||
// Populate using the correct set
|
// Populate using the correct set
|
||||||
List<DatHeader> datHeaders;
|
List<DatHeader> datHeaders;
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
|
if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||||
datHeaders = userInputDat.PopulateUserData(basePaths);
|
datHeaders = DatTool.PopulateUserData(userInputDat, basePaths);
|
||||||
else
|
else
|
||||||
datHeaders = userInputDat.PopulateUserData(inputPaths);
|
datHeaders = DatTool.PopulateUserData(userInputDat, inputPaths);
|
||||||
|
|
||||||
// Perform additional processing steps
|
// Perform additional processing steps
|
||||||
userInputDat.ApplyExtras(Extras);
|
userInputDat.ApplyExtras(Extras);
|
||||||
@@ -213,7 +213,7 @@ namespace SabreTools.Features
|
|||||||
// Output only DatItems that are duplicated across inputs
|
// Output only DatItems that are duplicated across inputs
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
|
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
|
||||||
{
|
{
|
||||||
DatFile dupeData = userInputDat.DiffDuplicates(inputPaths);
|
DatFile dupeData = DatTool.DiffDuplicates(userInputDat, inputPaths);
|
||||||
|
|
||||||
InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
|
InternalStopwatch watch = new InternalStopwatch("Outputting duplicate DAT");
|
||||||
DatTool.Write(dupeData, OutputDir, overwrite: false);
|
DatTool.Write(dupeData, OutputDir, overwrite: false);
|
||||||
@@ -223,7 +223,7 @@ namespace SabreTools.Features
|
|||||||
// Output only DatItems that are not duplicated across inputs
|
// Output only DatItems that are not duplicated across inputs
|
||||||
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
|
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
|
||||||
{
|
{
|
||||||
DatFile outerDiffData = userInputDat.DiffNoDuplicates(inputPaths);
|
DatFile outerDiffData = DatTool.DiffNoDuplicates(userInputDat, inputPaths);
|
||||||
|
|
||||||
InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
|
InternalStopwatch watch = new InternalStopwatch("Outputting no duplicate DAT");
|
||||||
DatTool.Write(outerDiffData, OutputDir, overwrite: false);
|
DatTool.Write(outerDiffData, OutputDir, overwrite: false);
|
||||||
@@ -234,7 +234,7 @@ namespace SabreTools.Features
|
|||||||
if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
||||||
{
|
{
|
||||||
// Get all of the output DatFiles
|
// Get all of the output DatFiles
|
||||||
List<DatFile> datFiles = userInputDat.DiffIndividuals(inputPaths);
|
List<DatFile> datFiles = DatTool.DiffIndividuals(userInputDat, inputPaths);
|
||||||
|
|
||||||
// Loop through and output the new DatFiles
|
// Loop through and output the new DatFiles
|
||||||
InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");
|
InternalStopwatch watch = new InternalStopwatch("Outputting all individual DATs");
|
||||||
@@ -269,7 +269,7 @@ namespace SabreTools.Features
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get all of the output DatFiles
|
// Get all of the output DatFiles
|
||||||
List<DatFile> datFiles = userInputDat.DiffCascade(datHeaders);
|
List<DatFile> datFiles = DatTool.DiffCascade(userInputDat, datHeaders);
|
||||||
|
|
||||||
// Loop through and output the new DatFiles
|
// Loop through and output the new DatFiles
|
||||||
InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");
|
InternalStopwatch watch = new InternalStopwatch("Outputting all created DATs");
|
||||||
@@ -303,7 +303,7 @@ namespace SabreTools.Features
|
|||||||
repDat.ApplyCleaning(Cleaner);
|
repDat.ApplyCleaning(Cleaner);
|
||||||
|
|
||||||
// Now replace the fields from the base DatFile
|
// Now replace the fields from the base DatFile
|
||||||
userInputDat.DiffAgainst(repDat, GetBoolean(Features, ByGameValue));
|
DatTool.DiffAgainst(userInputDat, repDat, GetBoolean(Features, ByGameValue));
|
||||||
|
|
||||||
// Finally output the diffed DatFile
|
// Finally output the diffed DatFile
|
||||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
@@ -328,7 +328,7 @@ namespace SabreTools.Features
|
|||||||
repDat.ApplyCleaning(Cleaner);
|
repDat.ApplyCleaning(Cleaner);
|
||||||
|
|
||||||
// Now replace the fields from the base DatFile
|
// Now replace the fields from the base DatFile
|
||||||
userInputDat.BaseReplace(repDat, updateFields, GetBoolean(features, OnlySameValue));
|
DatTool.BaseReplace(userInputDat, repDat, updateFields, GetBoolean(features, OnlySameValue));
|
||||||
|
|
||||||
// Finally output the replaced DatFile
|
// Finally output the replaced DatFile
|
||||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||||
|
|||||||
Reference in New Issue
Block a user