mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[DatFile] Add "against" diffing
This is the same diffing that is used by Romba to output its diffs. It is currently untested. Basically, this will take a set of base DATs as an input, and then diffs all of the individual DATs against that combined base and then outputs accordingly.
This commit is contained in:
@@ -236,6 +236,9 @@ namespace SabreTools.Library.Data
|
|||||||
// Cascaded diffs
|
// Cascaded diffs
|
||||||
Cascade = Individuals << 1,
|
Cascade = Individuals << 1,
|
||||||
ReverseCascade = Cascade << 1,
|
ReverseCascade = Cascade << 1,
|
||||||
|
|
||||||
|
// Base diffs
|
||||||
|
Against = ReverseCascade << 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@@ -250,8 +250,9 @@ namespace SabreTools.Library.Dats
|
|||||||
/// Check if a DAT contains the given rom
|
/// Check if a DAT contains the given rom
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="datdata">Dat to match against</param>
|
/// <param name="datdata">Dat to match against</param>
|
||||||
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
||||||
/// <returns>True if it contains the rom, false otherwise</returns>
|
/// <returns>True if it contains the rom, false otherwise</returns>
|
||||||
public bool HasDuplicates(DatFile datdata)
|
public bool HasDuplicates(DatFile datdata, bool sorted = false)
|
||||||
{
|
{
|
||||||
// Check for an empty rom list first
|
// Check for an empty rom list first
|
||||||
if (datdata.Count == 0)
|
if (datdata.Count == 0)
|
||||||
@@ -260,7 +261,7 @@ namespace SabreTools.Library.Dats
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We want to get the proper key for the DatItem
|
// We want to get the proper key for the DatItem
|
||||||
string key = SortAndGetKey(datdata);
|
string key = SortAndGetKey(datdata, sorted);
|
||||||
|
|
||||||
// If the key doesn't exist, return the empty list
|
// If the key doesn't exist, return the empty list
|
||||||
if (!datdata.ContainsKey(key))
|
if (!datdata.ContainsKey(key))
|
||||||
@@ -336,44 +337,30 @@ namespace SabreTools.Library.Dats
|
|||||||
/// Sort the input DAT and get the key to be used by the item
|
/// Sort the input DAT and get the key to be used by the item
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="datdata">Dat to match against</param>
|
/// <param name="datdata">Dat to match against</param>
|
||||||
|
/// <param name="sorted">True if the DAT is already sorted accordingly, false otherwise (default)</param>
|
||||||
/// <returns>Key to try to use</returns>
|
/// <returns>Key to try to use</returns>
|
||||||
private string SortAndGetKey(DatFile datdata)
|
private string SortAndGetKey(DatFile datdata, bool sorted = false)
|
||||||
{
|
{
|
||||||
string key = null;
|
string key = null;
|
||||||
|
|
||||||
|
// If we're not already sorted, take care of it
|
||||||
|
if (!sorted)
|
||||||
|
{
|
||||||
// If all items are supposed to have a SHA-512, we sort by that
|
// If all items are supposed to have a SHA-512, we sort by that
|
||||||
if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA512Count
|
if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA512Count
|
||||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA512))
|
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA512))
|
||||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA512))))
|
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA512))))
|
||||||
{
|
{
|
||||||
if (_itemType == ItemType.Rom)
|
|
||||||
{
|
|
||||||
key = ((Rom)this).SHA512;
|
|
||||||
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
|
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
|
||||||
key = ((Disk)this).SHA512;
|
|
||||||
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If all items are supposed to have a SHA-384, we sort by that
|
// If all items are supposed to have a SHA-384, we sort by that
|
||||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA384Count
|
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA384Count
|
||||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA384))
|
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA384))
|
||||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA384))))
|
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA384))))
|
||||||
{
|
{
|
||||||
if (_itemType == ItemType.Rom)
|
|
||||||
{
|
|
||||||
key = ((Rom)this).SHA384;
|
|
||||||
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
|
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
|
||||||
key = ((Disk)this).SHA384;
|
|
||||||
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If all items are supposed to have a SHA-256, we sort by that
|
// If all items are supposed to have a SHA-256, we sort by that
|
||||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA256Count
|
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA256Count
|
||||||
@@ -425,9 +412,83 @@ namespace SabreTools.Library.Dats
|
|||||||
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If we've gotten here and we have a Disk, sort by MD5
|
// Now that we have the sorted type, we get the proper key
|
||||||
|
switch (datdata.SortedBy)
|
||||||
|
{
|
||||||
|
case SortedBy.SHA512:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).SHA512;
|
||||||
|
}
|
||||||
else if (_itemType == ItemType.Disk)
|
else if (_itemType == ItemType.Disk)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA512;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.SHA384:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).SHA384;
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA384;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.SHA256:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).SHA256;
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA256;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.SHA1:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).SHA1;
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA1;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.MD5:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).MD5;
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).MD5;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.CRC:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).CRC;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SortedBy.Game:
|
||||||
|
key = this.Machine.Name;
|
||||||
|
break;
|
||||||
|
case SortedBy.Size:
|
||||||
|
if (_itemType == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)this).Size.ToString();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we got here and the key is still null...
|
||||||
|
if (key == null)
|
||||||
|
{
|
||||||
|
// If we've gotten here and we have a Disk, sort by MD5
|
||||||
|
if (_itemType == ItemType.Disk)
|
||||||
{
|
{
|
||||||
key = ((Disk)this).MD5;
|
key = ((Disk)this).MD5;
|
||||||
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
||||||
@@ -446,6 +507,7 @@ namespace SabreTools.Library.Dats
|
|||||||
key = "-1";
|
key = "-1";
|
||||||
datdata.BucketBy(SortedBy.Size, false /* mergeroms */);
|
datdata.BucketBy(SortedBy.Size, false /* mergeroms */);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ namespace SabreTools.Library.Dats
|
|||||||
/// Determine if input files should be merged, diffed, or processed invidually
|
/// Determine if input files should be merged, diffed, or processed invidually
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||||
|
/// <param name="basePaths">Names of base files and/or folders</param>
|
||||||
/// <param name="outDir">Optional param for output directory</param>
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||||
@@ -39,11 +40,11 @@ namespace SabreTools.Library.Dats
|
|||||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
|
public void DetermineUpdateType(List<string> inputPaths, List<string> basePaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
|
||||||
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||||
{
|
{
|
||||||
// If we're in merging or diffing mode, use the full list of inputs
|
// If we're in merging or diffing mode, use the full list of inputs
|
||||||
if (merge || diff != 0)
|
if (merge || (diff != 0 && (diff & DiffMode.Against) == 0))
|
||||||
{
|
{
|
||||||
// Make sure there are no folders in inputs
|
// Make sure there are no folders in inputs
|
||||||
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
||||||
@@ -74,6 +75,11 @@ namespace SabreTools.Library.Dats
|
|||||||
MergeNoDiff(outDir, newInputFileNames, datHeaders);
|
MergeNoDiff(outDir, newInputFileNames, datHeaders);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// If we're in "diff against" mode, we treat the inputs differently
|
||||||
|
else if ((diff & DiffMode.Against) != 0)
|
||||||
|
{
|
||||||
|
DiffAgainst(inputPaths, basePaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
|
||||||
|
}
|
||||||
// Otherwise, loop through all of the inputs individually
|
// Otherwise, loop through all of the inputs individually
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -85,8 +91,15 @@ namespace SabreTools.Library.Dats
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Populate the user DatData object from the input files
|
/// Populate the user DatData object from the input files
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <param name="inputs">Paths to DATs to parse</param>
|
||||||
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||||
|
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||||
|
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
@@ -109,7 +122,7 @@ namespace SabreTools.Library.Dats
|
|||||||
MergeRoms = MergeRoms,
|
MergeRoms = MergeRoms,
|
||||||
};
|
};
|
||||||
|
|
||||||
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, true, clean, descAsName);
|
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||||
});
|
});
|
||||||
|
|
||||||
Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
@@ -143,6 +156,201 @@ namespace SabreTools.Library.Dats
|
|||||||
return datHeaders.ToList();
|
return datHeaders.ToList();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output diffs against a base set
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||||
|
/// <param name="basePaths">Names of base files and/or folders</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||||
|
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||||
|
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||||
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
|
public void DiffAgainst(List<string> inputPaths, List<string> basePaths, string outDir, bool inplace, bool clean, bool remUnicode,
|
||||||
|
bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||||
|
{
|
||||||
|
// First we want to parse all of the base DATs into the input
|
||||||
|
List<string> baseFileNames = FileTools.GetOnlyFilesFromInputs(basePaths);
|
||||||
|
Parallel.ForEach(baseFileNames,
|
||||||
|
Globals.ParallelOptions,
|
||||||
|
path =>
|
||||||
|
{
|
||||||
|
Parse(path, 0, 0, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||||
|
});
|
||||||
|
|
||||||
|
// For comparison's sake, we want to use CRC as the base ordering
|
||||||
|
BucketBy(SortedBy.CRC, true);
|
||||||
|
|
||||||
|
// Now we want to compare each input DAT against the base
|
||||||
|
List<string> inputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
||||||
|
Parallel.ForEach(inputFileNames,
|
||||||
|
Globals.ParallelOptions,
|
||||||
|
path =>
|
||||||
|
{
|
||||||
|
// First we parse in the DAT internally
|
||||||
|
DatFile intDat = new DatFile();
|
||||||
|
intDat.Parse(path.Split('¬')[0], 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||||
|
|
||||||
|
// For comparison's sake, we want to use CRC as the base ordering
|
||||||
|
BucketBy(SortedBy.CRC, true);
|
||||||
|
|
||||||
|
// Then we do a hashwise comparison against the base DAT
|
||||||
|
List<string> keys = intDat.Keys.ToList();
|
||||||
|
Parallel.ForEach(keys,
|
||||||
|
Globals.ParallelOptions,
|
||||||
|
key =>
|
||||||
|
{
|
||||||
|
List<DatItem> datItems = intDat[key];
|
||||||
|
List<DatItem> keepDatItems = new List<DatItem>();
|
||||||
|
Parallel.ForEach(datItems,
|
||||||
|
Globals.ParallelOptions,
|
||||||
|
datItem =>
|
||||||
|
{
|
||||||
|
if (!datItem.HasDuplicates(this))
|
||||||
|
{
|
||||||
|
lock (keepDatItems)
|
||||||
|
{
|
||||||
|
keepDatItems.Add(datItem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Now add the new list to the key
|
||||||
|
intDat.Remove(key);
|
||||||
|
intDat.AddRange(key, keepDatItems);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine the output path for the DAT
|
||||||
|
string interOutDir = outDir;
|
||||||
|
if (inplace)
|
||||||
|
{
|
||||||
|
interOutDir = Path.GetDirectoryName(path);
|
||||||
|
}
|
||||||
|
else if (!String.IsNullOrEmpty(interOutDir))
|
||||||
|
{
|
||||||
|
interOutDir = Path.Combine(interOutDir, path.Split('¬')[1]);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
interOutDir = Path.Combine(Environment.CurrentDirectory, path.Split('¬')[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Once we're done, we check to see if there's anything to write out
|
||||||
|
if (intDat.Count > 0)
|
||||||
|
{
|
||||||
|
intDat.WriteToFile(interOutDir);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
|
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
|
||||||
|
{
|
||||||
|
string post = "";
|
||||||
|
|
||||||
|
// Create a list of DatData objects representing output files
|
||||||
|
List<DatFile> outDats = new List<DatFile>();
|
||||||
|
|
||||||
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
Globals.Logger.User("Initializing all output DATs");
|
||||||
|
|
||||||
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||||
|
|
||||||
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||||
|
{
|
||||||
|
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||||
|
DatFile diffData;
|
||||||
|
|
||||||
|
// If we're in inplace mode, take the appropriate DatData object already stored
|
||||||
|
if (inplace || !String.IsNullOrEmpty(outDir))
|
||||||
|
{
|
||||||
|
diffData = datHeaders[j];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
diffData = new DatFile(this);
|
||||||
|
diffData.FileName += post;
|
||||||
|
diffData.Name += post;
|
||||||
|
diffData.Description += post;
|
||||||
|
}
|
||||||
|
diffData.Reset();
|
||||||
|
|
||||||
|
outDatsArray[j] = diffData;
|
||||||
|
});
|
||||||
|
|
||||||
|
outDats = outDatsArray.ToList();
|
||||||
|
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
Globals.Logger.User("Populating all output DATs");
|
||||||
|
List<string> keys = Keys.ToList();
|
||||||
|
|
||||||
|
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||||
|
{
|
||||||
|
List<DatItem> items = DatItem.Merge(this[key]);
|
||||||
|
|
||||||
|
// If the rom list is empty or null, just skip it
|
||||||
|
if (items == null || items.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||||
|
{
|
||||||
|
// There's odd cases where there are items with System ID < 0. Skip them for now
|
||||||
|
if (item.SystemID < 0)
|
||||||
|
{
|
||||||
|
Globals.Logger.Warning("Item found with a <0 SystemID: " + item.Name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
outDats[item.SystemID].Add(key, item);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Finally, loop through and output each of the DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
Globals.Logger.User("Outputting all created DATs");
|
||||||
|
|
||||||
|
Parallel.For((skip ? 1 : 0), inputs.Count, j =>
|
||||||
|
{
|
||||||
|
// If we have an output directory set, replace the path
|
||||||
|
string path = "";
|
||||||
|
if (inplace)
|
||||||
|
{
|
||||||
|
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
|
||||||
|
}
|
||||||
|
else if (!String.IsNullOrEmpty(outDir))
|
||||||
|
{
|
||||||
|
string[] split = inputs[j].Split('¬');
|
||||||
|
path = outDir + (split[0] == split[1]
|
||||||
|
? Path.GetFileName(split[0])
|
||||||
|
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to output the file
|
||||||
|
outDats[j].WriteToFile(path);
|
||||||
|
});
|
||||||
|
|
||||||
|
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output non-cascading diffs
|
/// Output non-cascading diffs
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@@ -308,109 +516,6 @@ namespace SabreTools.Library.Dats
|
|||||||
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Output cascading diffs
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
||||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
|
||||||
/// <param name="inputs">List of inputs to write out from</param>
|
|
||||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
||||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
||||||
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
|
|
||||||
{
|
|
||||||
string post = "";
|
|
||||||
|
|
||||||
// Create a list of DatData objects representing output files
|
|
||||||
List<DatFile> outDats = new List<DatFile>();
|
|
||||||
|
|
||||||
// Loop through each of the inputs and get or create a new DatData object
|
|
||||||
DateTime start = DateTime.Now;
|
|
||||||
Globals.Logger.User("Initializing all output DATs");
|
|
||||||
|
|
||||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
||||||
|
|
||||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
|
||||||
{
|
|
||||||
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
|
||||||
DatFile diffData;
|
|
||||||
|
|
||||||
// If we're in inplace mode, take the appropriate DatData object already stored
|
|
||||||
if (inplace || !String.IsNullOrEmpty(outDir))
|
|
||||||
{
|
|
||||||
diffData = datHeaders[j];
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
diffData = new DatFile(this);
|
|
||||||
diffData.FileName += post;
|
|
||||||
diffData.Name += post;
|
|
||||||
diffData.Description += post;
|
|
||||||
}
|
|
||||||
diffData.Reset();
|
|
||||||
|
|
||||||
outDatsArray[j] = diffData;
|
|
||||||
});
|
|
||||||
|
|
||||||
outDats = outDatsArray.ToList();
|
|
||||||
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
|
||||||
|
|
||||||
// Now, loop through the dictionary and populate the correct DATs
|
|
||||||
start = DateTime.Now;
|
|
||||||
Globals.Logger.User("Populating all output DATs");
|
|
||||||
List<string> keys = Keys.ToList();
|
|
||||||
|
|
||||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
|
||||||
{
|
|
||||||
List<DatItem> items = DatItem.Merge(this[key]);
|
|
||||||
|
|
||||||
// If the rom list is empty or null, just skip it
|
|
||||||
if (items == null || items.Count == 0)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
|
||||||
{
|
|
||||||
// There's odd cases where there are items with System ID < 0. Skip them for now
|
|
||||||
if (item.SystemID < 0)
|
|
||||||
{
|
|
||||||
Globals.Logger.Warning("Item found with a <0 SystemID: " + item.Name);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
outDats[item.SystemID].Add(key, item);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
|
||||||
|
|
||||||
// Finally, loop through and output each of the DATs
|
|
||||||
start = DateTime.Now;
|
|
||||||
Globals.Logger.User("Outputting all created DATs");
|
|
||||||
|
|
||||||
Parallel.For((skip ? 1 : 0), inputs.Count, j =>
|
|
||||||
{
|
|
||||||
// If we have an output directory set, replace the path
|
|
||||||
string path = "";
|
|
||||||
if (inplace)
|
|
||||||
{
|
|
||||||
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
|
|
||||||
}
|
|
||||||
else if (!String.IsNullOrEmpty(outDir))
|
|
||||||
{
|
|
||||||
string[] split = inputs[j].Split('¬');
|
|
||||||
path = outDir + (split[0] == split[1]
|
|
||||||
? Path.GetFileName(split[0])
|
|
||||||
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to output the file
|
|
||||||
outDats[j].WriteToFile(path);
|
|
||||||
});
|
|
||||||
|
|
||||||
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Output user defined merge
|
/// Output user defined merge
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@@ -1119,6 +1119,18 @@ Options:
|
|||||||
All files that have no duplicates outside of the original
|
All files that have no duplicates outside of the original
|
||||||
DATs are included
|
DATs are included
|
||||||
|
|
||||||
|
-ag, --against Diff all inputs against a set of base DATs
|
||||||
|
This flag will enable a special type of diffing in which a set
|
||||||
|
of base DATs are used as a comparison point for each of the
|
||||||
|
input DATs. This allows users to get a slightly different
|
||||||
|
output to cascaded diffing, which may be more useful in
|
||||||
|
some cases. This is heavily influenced by the diffing model
|
||||||
|
used by Romba
|
||||||
|
|
||||||
|
-bd=, --base-dat= Add a base DAT for diffing
|
||||||
|
Add a DAT or folder of DATs to the base set to be used in
|
||||||
|
comparison against all inputs
|
||||||
|
|
||||||
-b, --bare Don't include date in file name
|
-b, --bare Don't include date in file name
|
||||||
Normally, the DAT will be created with the date in the file name
|
Normally, the DAT will be created with the date in the file name
|
||||||
in brackets. This flag removes that instead of the default.
|
in brackets. This flag removes that instead of the default.
|
||||||
|
|||||||
@@ -710,7 +710,7 @@ namespace SabreTools
|
|||||||
"All -diX, --diff-XX flags can be used with each other",
|
"All -diX, --diff-XX flags can be used with each other",
|
||||||
|
|
||||||
"",
|
"",
|
||||||
"Filter parameters game name, rom name, CRC, MD5, SHA-1 can",
|
"Filter parameters game name, rom name, all hashes can",
|
||||||
"be matched using full C#-style regex.",
|
"be matched using full C#-style regex.",
|
||||||
|
|
||||||
"",
|
"",
|
||||||
@@ -1130,6 +1130,16 @@ namespace SabreTools
|
|||||||
"Create diffdats from inputs (all outputs)",
|
"Create diffdats from inputs (all outputs)",
|
||||||
FeatureType.Flag,
|
FeatureType.Flag,
|
||||||
null));
|
null));
|
||||||
|
update["diff"].AddFeature("against", new Feature(
|
||||||
|
new List<string>() { "-ag", "--against" },
|
||||||
|
"Diff all inputs against a set of base DATs",
|
||||||
|
FeatureType.Flag,
|
||||||
|
null));
|
||||||
|
update["diff"]["against"].AddFeature("base-dat", new Feature(
|
||||||
|
new List<string() { "-bd", "--base-dat" },
|
||||||
|
"Add a base DAT for diffing",
|
||||||
|
FeatureType.List,
|
||||||
|
null));
|
||||||
update["diff"].AddFeature("bare", new Feature(
|
update["diff"].AddFeature("bare", new Feature(
|
||||||
new List<string>() { "-b", "--bare" },
|
new List<string>() { "-b", "--bare" },
|
||||||
"Don't include the date in automatic name",
|
"Don't include the date in automatic name",
|
||||||
|
|||||||
@@ -445,7 +445,8 @@ namespace SabreTools
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Wrap converting and updating DAT file from any format to any format
|
/// Wrap converting and updating DAT file from any format to any format
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="inputs">List of input filenames</param>
|
/// <param name="inputPaths">List of input filenames</param>
|
||||||
|
/// <param name="basePaths">List of base filenames</param>
|
||||||
/// /* Normal DAT header info */
|
/// /* Normal DAT header info */
|
||||||
/// <param name="filename">New filename</param>
|
/// <param name="filename">New filename</param>
|
||||||
/// <param name="name">New name</param>
|
/// <param name="name">New name</param>
|
||||||
@@ -498,7 +499,10 @@ namespace SabreTools
|
|||||||
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
|
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
|
||||||
/// <param name="dedup">True to dedupe the roms in the DAT, false otherwise (default)</param>
|
/// <param name="dedup">True to dedupe the roms in the DAT, false otherwise (default)</param>
|
||||||
/// <param name="stripHash">StripHash that represents the hash(es) that you want to remove from the output</param>
|
/// <param name="stripHash">StripHash that represents the hash(es) that you want to remove from the output</param>
|
||||||
private static void InitUpdate(List<string> inputs,
|
private static void InitUpdate(
|
||||||
|
List<string> inputPaths,
|
||||||
|
List<string> basePaths,
|
||||||
|
|
||||||
/* Normal DAT header info */
|
/* Normal DAT header info */
|
||||||
string filename,
|
string filename,
|
||||||
string name,
|
string name,
|
||||||
@@ -722,7 +726,7 @@ namespace SabreTools
|
|||||||
Romba = romba,
|
Romba = romba,
|
||||||
};
|
};
|
||||||
|
|
||||||
userInputDat.DetermineUpdateType(inputs, outDir, merge, diffMode, inplace, skip, bare, clean,
|
userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, merge, diffMode, inplace, skip, bare, clean,
|
||||||
remUnicode, descAsName, filter, splitType, trim, single, root);
|
remUnicode, descAsName, filter, splitType, trim, single, root);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -159,6 +159,7 @@ namespace SabreTools
|
|||||||
url = null,
|
url = null,
|
||||||
version = null;
|
version = null;
|
||||||
Filter filter = new Filter();
|
Filter filter = new Filter();
|
||||||
|
List<string> basePaths = new List<string>();
|
||||||
List<string> datfiles = new List<string>();
|
List<string> datfiles = new List<string>();
|
||||||
List<string> exta = new List<string>();
|
List<string> exta = new List<string>();
|
||||||
List<string> extb = new List<string>();
|
List<string> extb = new List<string>();
|
||||||
@@ -287,6 +288,10 @@ namespace SabreTools
|
|||||||
case "--add-date":
|
case "--add-date":
|
||||||
addFileDates = true;
|
addFileDates = true;
|
||||||
break;
|
break;
|
||||||
|
case "-ag":
|
||||||
|
case "--against":
|
||||||
|
diffMode |= DiffMode.Against;
|
||||||
|
break;
|
||||||
case "-b":
|
case "-b":
|
||||||
case "--bare":
|
case "--bare":
|
||||||
removeDateFromAutomaticName = true;
|
removeDateFromAutomaticName = true;
|
||||||
@@ -632,6 +637,10 @@ namespace SabreTools
|
|||||||
case "--author":
|
case "--author":
|
||||||
author = args[++i];
|
author = args[++i];
|
||||||
break;
|
break;
|
||||||
|
case "-bd":
|
||||||
|
case "--base-dat":
|
||||||
|
basePaths.Add(args[++i]);
|
||||||
|
break;
|
||||||
case "-ca":
|
case "-ca":
|
||||||
case "--category=":
|
case "--category=":
|
||||||
category = args[++i];
|
category = args[++i];
|
||||||
@@ -916,6 +925,10 @@ namespace SabreTools
|
|||||||
case "--author":
|
case "--author":
|
||||||
author = split[1];
|
author = split[1];
|
||||||
break;
|
break;
|
||||||
|
case "-bd":
|
||||||
|
case "--base-dat":
|
||||||
|
basePaths.Add(split[1]);
|
||||||
|
break;
|
||||||
case "-ca":
|
case "-ca":
|
||||||
case "--category=":
|
case "--category=":
|
||||||
category = split[1];
|
category = split[1];
|
||||||
@@ -1280,7 +1293,7 @@ namespace SabreTools
|
|||||||
// Convert, update, merge, diff, and filter a DAT or folder of DATs
|
// Convert, update, merge, diff, and filter a DAT or folder of DATs
|
||||||
else if (update)
|
else if (update)
|
||||||
{
|
{
|
||||||
InitUpdate(inputs, filename, name, description, rootdir, category, version, date, author, email, homepage, url, comment, header,
|
InitUpdate(inputs, basePaths, filename, name, description, rootdir, category, version, date, author, email, homepage, url, comment, header,
|
||||||
superdat, forcemerge, forcend, forcepack, excludeOf, datFormat, usegame, prefix, postfix, quotes, repext, addext, remext,
|
superdat, forcemerge, forcend, forcepack, excludeOf, datFormat, usegame, prefix, postfix, quotes, repext, addext, remext,
|
||||||
datPrefix, romba, merge, diffMode, inplace, skip, removeDateFromAutomaticName, filter, oneGameOneRegion, regions,
|
datPrefix, romba, merge, diffMode, inplace, skip, removeDateFromAutomaticName, filter, oneGameOneRegion, regions,
|
||||||
splitType, trim, single, root, outDir, cleanGameNames, removeUnicode, descAsName, dedup, stripHash);
|
splitType, trim, single, root, outDir, cleanGameNames, removeUnicode, descAsName, dedup, stripHash);
|
||||||
|
|||||||
Reference in New Issue
Block a user