mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[DatFile] Consolodation
This commit is contained in:
@@ -1,639 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.Items;
|
||||
using SabreTools.Library.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
|
||||
namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a format-agnostic DAT
|
||||
/// </summary>
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Converting and Updating
|
||||
|
||||
/// <summary>
|
||||
/// Determine if input files should be merged, diffed, or processed invidually
|
||||
/// </summary>
|
||||
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||
/// <param name="basePaths">Names of base files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void DetermineUpdateType(List<string> inputPaths, List<string> basePaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
|
||||
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
// If we're in merging or diffing mode, use the full list of inputs
|
||||
if (merge || (diff != 0 && (diff & DiffMode.Against) == 0))
|
||||
{
|
||||
// Make sure there are no folders in inputs
|
||||
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
||||
|
||||
// Reverse if we have to
|
||||
if ((diff & DiffMode.ReverseCascade) != 0)
|
||||
{
|
||||
newInputFileNames.Reverse();
|
||||
}
|
||||
|
||||
// Create a dictionary of all ROMs from the input DATs
|
||||
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean,
|
||||
remUnicode, descAsName, outDir, filter, splitType, trim, single, root);
|
||||
|
||||
// Modify the Dictionary if necessary and output the results
|
||||
if (diff != 0 && diff < DiffMode.Cascade)
|
||||
{
|
||||
DiffNoCascade(diff, outDir, newInputFileNames);
|
||||
}
|
||||
// If we're in cascade and diff, output only cascaded diffs
|
||||
else if (diff != 0 && diff >= DiffMode.Cascade)
|
||||
{
|
||||
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip);
|
||||
}
|
||||
// Output all entries with user-defined merge
|
||||
else
|
||||
{
|
||||
MergeNoDiff(outDir, newInputFileNames, datHeaders);
|
||||
}
|
||||
}
|
||||
// If we're in "diff against" mode, we treat the inputs differently
|
||||
else if ((diff & DiffMode.Against) != 0)
|
||||
{
|
||||
DiffAgainst(inputPaths, basePaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
|
||||
}
|
||||
// Otherwise, loop through all of the inputs individually
|
||||
else
|
||||
{
|
||||
Update(inputPaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Populate the user DatData object from the input files
|
||||
/// </summary>
|
||||
/// <param name="inputs">Paths to DATs to parse</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
/// <returns>List of DatData objects representing headers</returns>
|
||||
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
|
||||
string outDir, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
DatFile[] datHeaders = new DatFile[inputs.Count];
|
||||
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
||||
|
||||
// Parse all of the DATs into their own DatFiles in the array
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
||||
{
|
||||
string input = inputs[i];
|
||||
Globals.Logger.User("Adding DAT: {0}", input.Split('¬')[0]);
|
||||
datHeaders[i] = new DatFile
|
||||
{
|
||||
DatFormat = (DatFormat != 0 ? DatFormat : 0),
|
||||
DedupeRoms = DedupeRoms,
|
||||
};
|
||||
|
||||
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||
});
|
||||
|
||||
watch.Stop();
|
||||
|
||||
watch.Start("Populating internal DAT");
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
||||
{
|
||||
// Get the list of keys from the DAT
|
||||
List<string> keys = datHeaders[i].Keys.ToList();
|
||||
foreach (string key in keys)
|
||||
{
|
||||
// Add everything from the key to the internal DAT
|
||||
AddRange(key, datHeaders[i][key]);
|
||||
|
||||
// Now remove the key from the source DAT
|
||||
datHeaders[i].Remove(key);
|
||||
}
|
||||
|
||||
// Now remove the file dictionary from the souce DAT to save memory
|
||||
datHeaders[i].DeleteDictionary();
|
||||
});
|
||||
|
||||
// Now that we have a merged DAT, filter it
|
||||
Filter(filter, single, trim, root);
|
||||
|
||||
watch.Stop();
|
||||
|
||||
return datHeaders.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output diffs against a base set
|
||||
/// </summary>
|
||||
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||
/// <param name="basePaths">Names of base files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void DiffAgainst(List<string> inputPaths, List<string> basePaths, string outDir, bool inplace, bool clean, bool remUnicode,
|
||||
bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
// First we want to parse all of the base DATs into the input
|
||||
InternalStopwatch watch = new InternalStopwatch("Populating base DAT for comparison...");
|
||||
|
||||
List<string> baseFileNames = FileTools.GetOnlyFilesFromInputs(basePaths);
|
||||
Parallel.ForEach(baseFileNames, Globals.ParallelOptions, path =>
|
||||
{
|
||||
Parse(path, 0, 0, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||
});
|
||||
|
||||
watch.Stop();
|
||||
|
||||
// For comparison's sake, we want to use CRC as the base ordering
|
||||
BucketBy(SortedBy.CRC, DedupeType.Full);
|
||||
|
||||
// Now we want to compare each input DAT against the base
|
||||
List<string> inputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
||||
foreach (string path in inputFileNames)
|
||||
{
|
||||
// Get the two halves of the path
|
||||
string[] splitpath = path.Split('¬');
|
||||
|
||||
Globals.Logger.User("Comparing '{0}'' to base DAT", splitpath[0]);
|
||||
|
||||
// First we parse in the DAT internally
|
||||
DatFile intDat = new DatFile();
|
||||
intDat.Parse(splitpath[0], 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
|
||||
|
||||
// For comparison's sake, we want to use CRC as the base ordering
|
||||
intDat.BucketBy(SortedBy.CRC, DedupeType.Full);
|
||||
|
||||
// Then we do a hashwise comparison against the base DAT
|
||||
List<string> keys = intDat.Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> datItems = intDat[key];
|
||||
List<DatItem> keepDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
if (!datItem.HasDuplicates(this, true))
|
||||
{
|
||||
keepDatItems.Add(datItem);
|
||||
}
|
||||
}
|
||||
|
||||
// Now add the new list to the key
|
||||
intDat.Remove(key);
|
||||
intDat.AddRange(key, keepDatItems);
|
||||
});
|
||||
|
||||
// Determine the output path for the DAT
|
||||
string interOutDir = outDir;
|
||||
if (inplace)
|
||||
{
|
||||
interOutDir = Path.GetDirectoryName(path);
|
||||
}
|
||||
else if (!String.IsNullOrEmpty(interOutDir))
|
||||
{
|
||||
interOutDir = Path.GetDirectoryName(Path.Combine(interOutDir, splitpath[0].Remove(0, splitpath[1].Length + 1)));
|
||||
}
|
||||
else
|
||||
{
|
||||
interOutDir = Path.GetDirectoryName(Path.Combine(Environment.CurrentDirectory, splitpath[0].Remove(0, splitpath[1].Length + 1)));
|
||||
}
|
||||
|
||||
// Once we're done, try writing out
|
||||
intDat.WriteToFile(interOutDir);
|
||||
|
||||
// Due to possible memory requirements, we force a garbage collection
|
||||
GC.Collect();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output cascading diffs
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
|
||||
{
|
||||
string post = "";
|
||||
|
||||
// Create a list of DatData objects representing output files
|
||||
List<DatFile> outDats = new List<DatFile>();
|
||||
|
||||
// Loop through each of the inputs and get or create a new DatData object
|
||||
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
||||
|
||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||
DatFile diffData;
|
||||
|
||||
// If we're in inplace mode, take the appropriate DatData object already stored
|
||||
if (inplace || outDir != Environment.CurrentDirectory)
|
||||
{
|
||||
diffData = datHeaders[j];
|
||||
}
|
||||
else
|
||||
{
|
||||
diffData = new DatFile(this);
|
||||
diffData.FileName += post;
|
||||
diffData.Name += post;
|
||||
diffData.Description += post;
|
||||
}
|
||||
diffData.ResetDictionary();
|
||||
|
||||
outDatsArray[j] = diffData;
|
||||
});
|
||||
|
||||
outDats = outDatsArray.ToList();
|
||||
watch.Stop();
|
||||
|
||||
// Now, loop through the dictionary and populate the correct DATs
|
||||
watch.Start("Populating all output DATs");
|
||||
List<string> keys = Keys.ToList();
|
||||
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = DatItem.Merge(this[key]);
|
||||
|
||||
// If the rom list is empty or null, just skip it
|
||||
if (items == null || items.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// There's odd cases where there are items with System ID < 0. Skip them for now
|
||||
if (item.SystemID < 0)
|
||||
{
|
||||
Globals.Logger.Warning("Item found with a <0 SystemID: {0}", item.Name);
|
||||
continue;
|
||||
}
|
||||
|
||||
outDats[item.SystemID].Add(key, item);
|
||||
}
|
||||
});
|
||||
|
||||
watch.Stop();
|
||||
|
||||
// Finally, loop through and output each of the DATs
|
||||
watch.Start("Outputting all created DATs");
|
||||
|
||||
Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
// If we have an output directory set, replace the path
|
||||
string path = "";
|
||||
if (inplace)
|
||||
{
|
||||
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
|
||||
}
|
||||
else if (outDir != Environment.CurrentDirectory)
|
||||
{
|
||||
string[] split = inputs[j].Split('¬');
|
||||
path = outDir + (split[0] == split[1]
|
||||
? Path.GetFileName(split[0])
|
||||
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
|
||||
}
|
||||
|
||||
// Try to output the file
|
||||
outDats[j].WriteToFile(path);
|
||||
});
|
||||
|
||||
watch.Stop();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output non-cascading diffs
|
||||
/// </summary>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs)
|
||||
{
|
||||
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
||||
|
||||
// Default vars for use
|
||||
string post = "";
|
||||
DatFile outerDiffData = new DatFile();
|
||||
DatFile dupeData = new DatFile();
|
||||
|
||||
// Fill in any information not in the base DAT
|
||||
if (String.IsNullOrEmpty(FileName))
|
||||
{
|
||||
FileName = "All DATs";
|
||||
}
|
||||
if (String.IsNullOrEmpty(Name))
|
||||
{
|
||||
Name = "All DATs";
|
||||
}
|
||||
if (String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Description = "All DATs";
|
||||
}
|
||||
|
||||
// Don't have External dupes
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
post = " (No Duplicates)";
|
||||
outerDiffData = new DatFile(this);
|
||||
outerDiffData.FileName += post;
|
||||
outerDiffData.Name += post;
|
||||
outerDiffData.Description += post;
|
||||
outerDiffData.ResetDictionary();
|
||||
}
|
||||
|
||||
// Have External dupes
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
post = " (Duplicates)";
|
||||
dupeData = new DatFile(this);
|
||||
dupeData.FileName += post;
|
||||
dupeData.Name += post;
|
||||
dupeData.Description += post;
|
||||
dupeData.ResetDictionary();
|
||||
}
|
||||
|
||||
// Create a list of DatData objects representing individual output files
|
||||
List<DatFile> outDats = new List<DatFile>();
|
||||
|
||||
// Loop through each of the inputs and get or create a new DatData object
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||
DatFile diffData = new DatFile(this);
|
||||
diffData.FileName += innerpost;
|
||||
diffData.Name += innerpost;
|
||||
diffData.Description += innerpost;
|
||||
diffData.ResetDictionary();
|
||||
outDatsArray[j] = diffData;
|
||||
});
|
||||
|
||||
outDats = outDatsArray.ToList();
|
||||
}
|
||||
|
||||
watch.Stop();
|
||||
|
||||
// Now, loop through the dictionary and populate the correct DATs
|
||||
watch.Start("Populating all output DATs");
|
||||
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = DatItem.Merge(this[key]);
|
||||
|
||||
// If the rom list is empty or null, just skip it
|
||||
if (items == null || items.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Loop through and add the items correctly
|
||||
foreach(DatItem item in items)
|
||||
{
|
||||
// No duplicates
|
||||
if ((diff & DiffMode.NoDupes) != 0 || (diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
if ((item.Dupe & DupeType.Internal) != 0)
|
||||
{
|
||||
// Individual DATs that are output
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
outDats[item.SystemID].Add(key, item);
|
||||
}
|
||||
|
||||
// Merged no-duplicates DAT
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
DatItem newrom = item.Clone() as DatItem;
|
||||
newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||
|
||||
outerDiffData.Add(key, newrom);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Duplicates only
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
if ((item.Dupe & DupeType.External) != 0)
|
||||
{
|
||||
DatItem newrom = item.Clone() as DatItem;
|
||||
newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||
|
||||
dupeData.Add(key, newrom);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
watch.Stop();
|
||||
|
||||
// Finally, loop through and output each of the DATs
|
||||
watch.Start("Outputting all created DATs");
|
||||
|
||||
// Output the difflist (a-b)+(b-a) diff
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
outerDiffData.WriteToFile(outDir);
|
||||
}
|
||||
|
||||
// Output the (ab) diff
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
dupeData.WriteToFile(outDir);
|
||||
}
|
||||
|
||||
// Output the individual (a-b) DATs
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
// If we have an output directory set, replace the path
|
||||
string[] split = inputs[j].Split('¬');
|
||||
string path = Path.Combine(outDir,
|
||||
(split[0] == split[1]
|
||||
? Path.GetFileName(split[0])
|
||||
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))));
|
||||
|
||||
// Try to output the file
|
||||
outDats[j].WriteToFile(path);
|
||||
});
|
||||
}
|
||||
|
||||
watch.Stop();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output user defined merge
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders)
|
||||
{
|
||||
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key].ToList();
|
||||
List<DatItem> newItems = new List<DatItem>();
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
DatItem newItem = item;
|
||||
string filename = inputs[newItem.SystemID].Split('¬')[0];
|
||||
string rootpath = inputs[newItem.SystemID].Split('¬')[1];
|
||||
|
||||
rootpath += (rootpath == "" ? "" : Path.DirectorySeparatorChar.ToString());
|
||||
filename = filename.Remove(0, rootpath.Length);
|
||||
newItem.MachineName = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
||||
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
||||
+ newItem.MachineName;
|
||||
|
||||
newItems.Add(newItem);
|
||||
}
|
||||
|
||||
Remove(key);
|
||||
AddRange(key, newItems);
|
||||
});
|
||||
}
|
||||
|
||||
// Try to output the file
|
||||
WriteToFile(outDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert, update, and filter a DAT file or set of files using a base
|
||||
/// </summary>
|
||||
/// <param name="inputFileNames">Names of the input files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void Update(List<string> inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
|
||||
Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
for (int i = 0; i < inputFileNames.Count; i++)
|
||||
{
|
||||
// Get the input file name
|
||||
string inputFileName = inputFileNames[i];
|
||||
|
||||
// Clean the input string
|
||||
if (inputFileName != "")
|
||||
{
|
||||
inputFileName = Path.GetFullPath(inputFileName);
|
||||
}
|
||||
|
||||
if (File.Exists(inputFileName))
|
||||
{
|
||||
// If inplace is set, override the output dir
|
||||
string realOutDir = outDir;
|
||||
if (inplace)
|
||||
{
|
||||
realOutDir = Path.GetDirectoryName(inputFileName);
|
||||
}
|
||||
|
||||
DatFile innerDatdata = new DatFile(this);
|
||||
Globals.Logger.User("Processing '{0}'", Path.GetFileName(inputFileName));
|
||||
innerDatdata.Parse(inputFileName, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
|
||||
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||
innerDatdata.Filter(filter, trim, single, root);
|
||||
|
||||
// Try to output the file
|
||||
innerDatdata.WriteToFile((realOutDir == Environment.CurrentDirectory ? Path.GetDirectoryName(inputFileName) : realOutDir), overwrite: (realOutDir != Environment.CurrentDirectory));
|
||||
}
|
||||
else if (Directory.Exists(inputFileName))
|
||||
{
|
||||
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
|
||||
|
||||
// If inplace is set, override the output dir
|
||||
string realOutDir = outDir;
|
||||
if (inplace)
|
||||
{
|
||||
realOutDir = Path.GetDirectoryName(inputFileName);
|
||||
}
|
||||
|
||||
List<string> subFiles = Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories).ToList();
|
||||
Parallel.ForEach(subFiles, Globals.ParallelOptions, file =>
|
||||
{
|
||||
Globals.Logger.User("Processing '{0}'", Path.GetFullPath(file).Remove(0, inputFileName.Length));
|
||||
DatFile innerDatdata = new DatFile(this);
|
||||
innerDatdata.Parse(file, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
|
||||
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||
innerDatdata.Filter(filter, trim, single, root);
|
||||
|
||||
// Try to output the file
|
||||
innerDatdata.WriteToFile((realOutDir == Environment.CurrentDirectory ? Path.GetDirectoryName(file) : realOutDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
|
||||
overwrite: (realOutDir != Environment.CurrentDirectory));
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
Globals.Logger.Error("I'm sorry but '{0}' doesn't exist!", inputFileName);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,426 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.Items;
|
||||
using SabreTools.Library.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using IOException = System.IO.IOException;
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
#endif
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a format-agnostic DAT
|
||||
/// </summary>
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Populate DAT from Directory [MODULAR DONE, FOR NOW]
|
||||
|
||||
/// <summary>
|
||||
/// Create a new Dat from a directory
|
||||
/// </summary>
|
||||
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped</param>
|
||||
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||
/// <param name="outDir">Output directory to </param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, bool enableGzip,
|
||||
SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
|
||||
{
|
||||
// If the description is defined but not the name, set the name from the description
|
||||
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Name = Description;
|
||||
}
|
||||
|
||||
// If the name is defined but not the description, set the description from the name
|
||||
else if (!String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||
}
|
||||
|
||||
// If neither the name or description are defined, set them from the automatic values
|
||||
else if (String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Name = basePath.Split(Path.DirectorySeparatorChar).Last();
|
||||
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||
}
|
||||
|
||||
// Process the input
|
||||
if (Directory.Exists(basePath))
|
||||
{
|
||||
Globals.Logger.Verbose("Folder found: {0}", basePath);
|
||||
|
||||
// Process the files in the main folder
|
||||
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
Parallel.ForEach(files, Globals.ParallelOptions, item =>
|
||||
{
|
||||
PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
|
||||
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
});
|
||||
|
||||
// Find all top-level subfolders
|
||||
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
foreach (string item in files)
|
||||
{
|
||||
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
|
||||
Parallel.ForEach(subfiles, Globals.ParallelOptions, subitem =>
|
||||
{
|
||||
PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
|
||||
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
});
|
||||
}
|
||||
|
||||
// Now find all folders that are empty, if we are supposed to
|
||||
if (!Romba && addBlanks)
|
||||
{
|
||||
List<string> empties = FileTools.GetEmptyDirectories(basePath).ToList();
|
||||
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
|
||||
{
|
||||
// Get the full path for the directory
|
||||
string fulldir = Path.GetFullPath(dir);
|
||||
|
||||
// Set the temporary variables
|
||||
string gamename = "";
|
||||
string romname = "";
|
||||
|
||||
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = fulldir.Remove(0, basePath.Length + 1);
|
||||
romname = "_";
|
||||
}
|
||||
|
||||
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
|
||||
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
|
||||
}
|
||||
|
||||
// Sanitize the names
|
||||
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(1);
|
||||
}
|
||||
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||
}
|
||||
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(1);
|
||||
}
|
||||
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(0, romname.Length - 1);
|
||||
}
|
||||
|
||||
Globals.Logger.Verbose("Adding blank empty folder: {0}", gamename);
|
||||
this["null"].Add(new Rom(romname, gamename, omitFromScan));
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (File.Exists(basePath))
|
||||
{
|
||||
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip,
|
||||
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
}
|
||||
|
||||
// Now that we're done, delete the temp folder (if it's not the default)
|
||||
Globals.Logger.User("Cleaning temp folder");
|
||||
if (tempDir != Path.GetTempPath())
|
||||
{
|
||||
FileTools.TryDeleteDirectory(tempDir);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check a given file for hashes, based on current settings
|
||||
/// </summary>
|
||||
/// <param name="item">Filename of the item to be checked</param>
|
||||
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped</param>
|
||||
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
|
||||
bool enableGzip, SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
|
||||
{
|
||||
// Define the temporary directory
|
||||
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
|
||||
|
||||
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
|
||||
if (Romba)
|
||||
{
|
||||
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item);
|
||||
|
||||
// If the rom is valid, write it out
|
||||
if (rom != null && rom.Name != null)
|
||||
{
|
||||
// Add the list if it doesn't exist already
|
||||
Add(rom.Size + "-" + rom.CRC, rom);
|
||||
Globals.Logger.User("File added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||
}
|
||||
else
|
||||
{
|
||||
Globals.Logger.User("File not added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||
return;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// If we're copying files, copy it first and get the new filename
|
||||
string newItem = item;
|
||||
string newBasePath = basePath;
|
||||
if (copyFiles)
|
||||
{
|
||||
newBasePath = Path.Combine(tempDir, Path.GetRandomFileName());
|
||||
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(newItem));
|
||||
File.Copy(item, newItem, true);
|
||||
}
|
||||
|
||||
// Create a list for all found items
|
||||
List<Rom> extracted = null;
|
||||
|
||||
// Temporarily set the archivesAsFiles if we have a GZip archive and we're not supposed to use it as one
|
||||
if (archivesAsFiles && !enableGzip && newItem.EndsWith(".gz"))
|
||||
{
|
||||
archivesAsFiles = false;
|
||||
}
|
||||
|
||||
// If we don't have archives as files, try to scan the file as an archive
|
||||
if (!archivesAsFiles)
|
||||
{
|
||||
// If all deep hash skip flags are set, do a quickscan
|
||||
if (omitFromScan == Hash.SecureHashes)
|
||||
{
|
||||
extracted = ArchiveTools.GetArchiveFileInfo(newItem, date: addDate);
|
||||
}
|
||||
// Otherwise, get the list with whatever hashes are wanted
|
||||
else
|
||||
{
|
||||
extracted = ArchiveTools.GetExtendedArchiveFileInfo(newItem, omitFromScan: omitFromScan, date: addDate);
|
||||
}
|
||||
}
|
||||
|
||||
// If the file should be skipped based on type, do so now
|
||||
if ((extracted != null && skipFileType == SkipFileType.Archive)
|
||||
|| (extracted == null && skipFileType == SkipFileType.File))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the extracted list is null, just scan the item itself
|
||||
if (extracted == null || archivesAsFiles)
|
||||
{
|
||||
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
|
||||
}
|
||||
// Otherwise, add all of the found items
|
||||
else
|
||||
{
|
||||
// First take care of the found items
|
||||
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
|
||||
{
|
||||
PopulateFromDirProcessFileHelper(newItem,
|
||||
rom,
|
||||
basePath,
|
||||
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
||||
});
|
||||
|
||||
// Then, if we're looking for blanks, get all of the blank folders and add them
|
||||
if (addBlanks)
|
||||
{
|
||||
List<string> empties = ArchiveTools.GetEmptyFoldersInArchive(newItem);
|
||||
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
|
||||
{
|
||||
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
|
||||
PopulateFromDirProcessFileHelper(newItem,
|
||||
emptyRom,
|
||||
basePath,
|
||||
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Cue to delete the file if it's a copy
|
||||
if (copyFiles && item != newItem)
|
||||
{
|
||||
FileTools.TryDeleteDirectory(newBasePath);
|
||||
}
|
||||
|
||||
// Delete the sub temp directory
|
||||
FileTools.TryDeleteDirectory(tempSubDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Process a single file as a file
|
||||
/// </summary>
|
||||
/// <param name="item">File to be added</param>
|
||||
/// <param name="parent">Parent game to be used</param>
|
||||
/// <param name="basePath">Path the represents the parent directory</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan,
|
||||
bool addDate, string headerToCheckAgainst)
|
||||
{
|
||||
Globals.Logger.Verbose("'{0}' treated like a file", Path.GetFileName(item));
|
||||
Rom rom = FileTools.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst);
|
||||
|
||||
PopulateFromDirProcessFileHelper(item, rom, basePath, parent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Process a single file as a file (with found Rom data)
|
||||
/// </summary>
|
||||
/// <param name="item">File to be added</param>
|
||||
/// <param name="item">Rom data to be used to write to file</param>
|
||||
/// <param name="basepath">Path the represents the parent directory</param>
|
||||
/// <param name="parent">Parent game to be used</param>
|
||||
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
|
||||
{
|
||||
// If the datItem isn't a Rom or Disk, return
|
||||
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
string key = "";
|
||||
if (datItem.Type == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)datItem).Size + "-" + ((Rom)datItem).CRC;
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)datItem).MD5;
|
||||
}
|
||||
|
||||
// Add the list if it doesn't exist already
|
||||
Add(key);
|
||||
|
||||
try
|
||||
{
|
||||
// If the basepath ends with a directory separator, remove it
|
||||
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
basepath += Path.DirectorySeparatorChar.ToString();
|
||||
}
|
||||
|
||||
// Make sure we have the full item path
|
||||
item = Path.GetFullPath(item);
|
||||
|
||||
// Get the data to be added as game and item names
|
||||
string gamename = "";
|
||||
string romname = "";
|
||||
|
||||
// If the parent is blank, then we have a non-archive file
|
||||
if (parent == "")
|
||||
{
|
||||
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
|
||||
romname = Path.GetFileName(item);
|
||||
}
|
||||
|
||||
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
|
||||
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we assume that we have an archive
|
||||
else
|
||||
{
|
||||
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = parent;
|
||||
romname = datItem.Name;
|
||||
}
|
||||
|
||||
// Otherwise, we want the archive name as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = parent;
|
||||
romname = datItem.Name;
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize the names
|
||||
if (romname == null)
|
||||
{
|
||||
romname = "";
|
||||
}
|
||||
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(1);
|
||||
}
|
||||
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||
}
|
||||
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(1);
|
||||
}
|
||||
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(0, romname.Length - 1);
|
||||
}
|
||||
if (!String.IsNullOrEmpty(gamename) && String.IsNullOrEmpty(romname))
|
||||
{
|
||||
romname = gamename;
|
||||
gamename = "Default";
|
||||
}
|
||||
|
||||
// Update rom information
|
||||
datItem.Name = romname;
|
||||
datItem.MachineName = gamename;
|
||||
datItem.MachineDescription = gamename;
|
||||
|
||||
// Add the file information to the DAT
|
||||
Add(key, datItem);
|
||||
|
||||
Globals.Logger.User("File added: {0}", romname + Environment.NewLine);
|
||||
}
|
||||
catch (IOException ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,839 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.Items;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a format-agnostic DAT
|
||||
/// </summary>
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Instance Methods
|
||||
|
||||
#region Bucketing
|
||||
|
||||
/// <summary>
|
||||
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by a user-defined method
|
||||
/// </summary>
|
||||
/// <param name="bucketBy">SortedBy enum representing how to sort the individual items</param>
|
||||
/// <param name="deduperoms">Dedupe type that should be used</param>
|
||||
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
|
||||
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
||||
public void BucketBy(SortedBy bucketBy, DedupeType deduperoms, bool lower = true, bool norename = true)
|
||||
{
|
||||
// If we have a situation where there's no dictionary or no keys at all, we skip
|
||||
if (_items == null || _items.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
Globals.Logger.User("Organizing roms by {0}" + (deduperoms != DedupeType.None ? " and merging" : ""), bucketBy);
|
||||
|
||||
// If the sorted type isn't the same, we want to sort the dictionary accordingly
|
||||
if (_sortedBy != bucketBy)
|
||||
{
|
||||
// Set the sorted type
|
||||
_sortedBy = bucketBy;
|
||||
|
||||
// First do the initial sort of all of the roms inplace
|
||||
List<string> oldkeys = Keys.ToList();
|
||||
Parallel.ForEach(oldkeys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Get the unsorted current list
|
||||
List<DatItem> roms = this[key];
|
||||
|
||||
// Now add each of the roms to their respective games
|
||||
foreach (DatItem rom in roms)
|
||||
{
|
||||
// We want to get the key most appropriate for the given sorting type
|
||||
string newkey = GetKey(rom, bucketBy, lower, norename);
|
||||
|
||||
// Add the DatItem to the dictionary
|
||||
Add(newkey, rom);
|
||||
}
|
||||
|
||||
// Finally, remove the entire original key
|
||||
Remove(key);
|
||||
});
|
||||
}
|
||||
|
||||
// Now go through and sort all of the individual lists
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Get the possibly unsorted list
|
||||
List<DatItem> sortedlist = this[key];
|
||||
|
||||
// Sort the list of items to be consistent
|
||||
DatItem.Sort(ref sortedlist, false);
|
||||
|
||||
// If we're merging the roms, do so
|
||||
if (deduperoms == DedupeType.Full || (deduperoms == DedupeType.Game && bucketBy == SortedBy.Game))
|
||||
{
|
||||
sortedlist = DatItem.Merge(sortedlist);
|
||||
}
|
||||
|
||||
// Add the list back to the dictionary
|
||||
Remove(key);
|
||||
AddRange(key, sortedlist);
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the dictionary key that should be used for a given item and sorting type
|
||||
/// </summary>
|
||||
/// <param name="item">DatItem to get the key for</param>
|
||||
/// <param name="sortedBy">SortedBy enum representing what key to get</param>
|
||||
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
|
||||
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
||||
/// <returns>String representing the key to be used for the DatItem</returns>
|
||||
private string GetKey(DatItem item, SortedBy sortedBy, bool lower = true, bool norename = true)
|
||||
{
|
||||
// Set the output key as the default blank string
|
||||
string key = "";
|
||||
|
||||
// Now determine what the key should be based on the sortedBy value
|
||||
switch (sortedBy)
|
||||
{
|
||||
case SortedBy.CRC:
|
||||
key = (item.Type == ItemType.Rom ? ((Rom)item).CRC : Constants.CRCZero);
|
||||
break;
|
||||
case SortedBy.Game:
|
||||
key = (norename ? ""
|
||||
: item.SystemID.ToString().PadLeft(10, '0')
|
||||
+ "-"
|
||||
+ item.SourceID.ToString().PadLeft(10, '0') + "-")
|
||||
+ (String.IsNullOrEmpty(item.MachineName)
|
||||
? "Default"
|
||||
: item.MachineName);
|
||||
if (lower)
|
||||
{
|
||||
key = key.ToLowerInvariant();
|
||||
}
|
||||
if (key == null)
|
||||
{
|
||||
key = "null";
|
||||
}
|
||||
|
||||
key = HttpUtility.HtmlEncode(key);
|
||||
break;
|
||||
case SortedBy.MD5:
|
||||
key = (item.Type == ItemType.Rom
|
||||
? ((Rom)item).MD5
|
||||
: (item.Type == ItemType.Disk
|
||||
? ((Disk)item).MD5
|
||||
: Constants.MD5Zero));
|
||||
break;
|
||||
case SortedBy.SHA1:
|
||||
key = (item.Type == ItemType.Rom
|
||||
? ((Rom)item).SHA1
|
||||
: (item.Type == ItemType.Disk
|
||||
? ((Disk)item).SHA1
|
||||
: Constants.SHA1Zero));
|
||||
break;
|
||||
case SortedBy.SHA256:
|
||||
key = (item.Type == ItemType.Rom
|
||||
? ((Rom)item).SHA256
|
||||
: (item.Type == ItemType.Disk
|
||||
? ((Disk)item).SHA256
|
||||
: Constants.SHA256Zero));
|
||||
break;
|
||||
case SortedBy.SHA384:
|
||||
key = (item.Type == ItemType.Rom
|
||||
? ((Rom)item).SHA384
|
||||
: (item.Type == ItemType.Disk
|
||||
? ((Disk)item).SHA384
|
||||
: Constants.SHA384Zero));
|
||||
break;
|
||||
case SortedBy.SHA512:
|
||||
key = (item.Type == ItemType.Rom
|
||||
? ((Rom)item).SHA512
|
||||
: (item.Type == ItemType.Disk
|
||||
? ((Disk)item).SHA512
|
||||
: Constants.SHA512Zero));
|
||||
break;
|
||||
}
|
||||
|
||||
// Double and triple check the key for corner cases
|
||||
if (key == null)
|
||||
{
|
||||
key = "";
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Filtering
|
||||
|
||||
/// <summary>
|
||||
/// Filter a DAT based on input parameters and modify the items
|
||||
/// </summary>
|
||||
/// <param name="filter">Filter object for passing to the DatItem level</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void Filter(Filter filter, bool single, bool trim, string root)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Loop over every key in the dictionary
|
||||
List<string> keys = Keys.ToList();
|
||||
foreach (string key in keys)
|
||||
{
|
||||
// For every item in the current key
|
||||
List<DatItem> items = this[key];
|
||||
List<DatItem> newitems = new List<DatItem>();
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// If the rom passes the filter, include it
|
||||
if (filter.ItemPasses(item))
|
||||
{
|
||||
// If we are in single game mode, rename all games
|
||||
if (single)
|
||||
{
|
||||
item.MachineName = "!";
|
||||
}
|
||||
|
||||
// If we are in NTFS trim mode, trim the game name
|
||||
if (trim)
|
||||
{
|
||||
// Windows max name length is 260
|
||||
int usableLength = 260 - item.MachineName.Length - root.Length;
|
||||
if (item.Name.Length > usableLength)
|
||||
{
|
||||
string ext = Path.GetExtension(item.Name);
|
||||
item.Name = item.Name.Substring(0, usableLength - ext.Length);
|
||||
item.Name += ext;
|
||||
}
|
||||
}
|
||||
|
||||
// Lock the list and add the item back
|
||||
lock (newitems)
|
||||
{
|
||||
newitems.Add(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Remove(key);
|
||||
AddRange(key, newitems);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
|
||||
/// </summary>
|
||||
public void MachineDescriptionToName()
|
||||
{
|
||||
try
|
||||
{
|
||||
// First we want to get a mapping for all games to description
|
||||
ConcurrentDictionary<string, string> mapping = new ConcurrentDictionary<string, string>();
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// If the key mapping doesn't exist, add it
|
||||
if (!mapping.ContainsKey(item.MachineName))
|
||||
{
|
||||
mapping.TryAdd(item.MachineName, item.MachineDescription.Replace('/', '_').Replace("\"", "''"));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Now we loop through every item and update accordingly
|
||||
keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
List<DatItem> newItems = new List<DatItem>();
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// Update machine name
|
||||
if (!String.IsNullOrEmpty(item.MachineName) && mapping.ContainsKey(item.MachineName))
|
||||
{
|
||||
item.MachineName = mapping[item.MachineName];
|
||||
}
|
||||
|
||||
// Update cloneof
|
||||
if (!String.IsNullOrEmpty(item.CloneOf) && mapping.ContainsKey(item.CloneOf))
|
||||
{
|
||||
item.CloneOf = mapping[item.CloneOf];
|
||||
}
|
||||
|
||||
// Update romof
|
||||
if (!String.IsNullOrEmpty(item.RomOf) && mapping.ContainsKey(item.RomOf))
|
||||
{
|
||||
item.RomOf = mapping[item.RomOf];
|
||||
}
|
||||
|
||||
// Update sampleof
|
||||
if (!String.IsNullOrEmpty(item.SampleOf) && mapping.ContainsKey(item.SampleOf))
|
||||
{
|
||||
item.SampleOf = mapping[item.SampleOf];
|
||||
}
|
||||
|
||||
// Add the new item to the output list
|
||||
newItems.Add(item);
|
||||
}
|
||||
|
||||
// Replace the old list of roms with the new one
|
||||
Remove(key);
|
||||
AddRange(key, newItems);
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Globals.Logger.Warning(ex.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Strip the given hash types from the DAT
|
||||
/// </summary>
|
||||
public void StripHashesFromItems()
|
||||
{
|
||||
// Output the logging statement
|
||||
Globals.Logger.User("Stripping requested hashes");
|
||||
|
||||
// Now process all of the roms
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
for (int j = 0; j < items.Count; j++)
|
||||
{
|
||||
DatItem item = items[j];
|
||||
if (item.Type == ItemType.Rom)
|
||||
{
|
||||
Rom rom = (Rom)item;
|
||||
if ((StripHash & Hash.MD5) != 0)
|
||||
{
|
||||
rom.MD5 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA1) != 0)
|
||||
{
|
||||
rom.SHA1 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA256) != 0)
|
||||
{
|
||||
rom.SHA256 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA384) != 0)
|
||||
{
|
||||
rom.SHA384 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA512) != 0)
|
||||
{
|
||||
rom.SHA512 = null;
|
||||
}
|
||||
|
||||
items[j] = rom;
|
||||
}
|
||||
else if (item.Type == ItemType.Disk)
|
||||
{
|
||||
Disk disk = (Disk)item;
|
||||
if ((StripHash & Hash.MD5) != 0)
|
||||
{
|
||||
disk.MD5 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA1) != 0)
|
||||
{
|
||||
disk.SHA1 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA256) != 0)
|
||||
{
|
||||
disk.SHA256 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA384) != 0)
|
||||
{
|
||||
disk.SHA384 = null;
|
||||
}
|
||||
if ((StripHash & Hash.SHA512) != 0)
|
||||
{
|
||||
disk.SHA512 = null;
|
||||
}
|
||||
|
||||
items[j] = disk;
|
||||
}
|
||||
}
|
||||
|
||||
Remove(key);
|
||||
AddRange(key, items);
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merging/Splitting Methods
|
||||
|
||||
/// <summary>
|
||||
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
|
||||
/// </summary>
|
||||
/// <param name="mergeroms">Dedupe type to be used</param>
|
||||
public void CreateDeviceNonMergedSets(DedupeType mergeroms)
|
||||
{
|
||||
Globals.Logger.User("Creating device non-merged sets from the DAT");
|
||||
|
||||
// For sake of ease, the first thing we want to do is sort by game
|
||||
BucketBy(SortedBy.Game, mergeroms, norename: true);
|
||||
_sortedBy = SortedBy.Default;
|
||||
|
||||
// Now we want to loop through all of the games and set the correct information
|
||||
AddRomsFromDevices();
|
||||
|
||||
// Then, remove the romof and cloneof tags so it's not picked up by the manager
|
||||
RemoveTagsFromChild();
|
||||
|
||||
// Finally, remove all sets that are labeled as bios or device
|
||||
//RemoveBiosAndDeviceSets(logger);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
|
||||
/// </summary>
|
||||
/// <param name="mergeroms">Dedupe type to be used</param>
|
||||
public void CreateFullyNonMergedSets(DedupeType mergeroms)
|
||||
{
|
||||
Globals.Logger.User("Creating fully non-merged sets from the DAT");
|
||||
|
||||
// For sake of ease, the first thing we want to do is sort by game
|
||||
BucketBy(SortedBy.Game, mergeroms, norename: true);
|
||||
_sortedBy = SortedBy.Default;
|
||||
|
||||
// Now we want to loop through all of the games and set the correct information
|
||||
AddRomsFromDevices();
|
||||
AddRomsFromParent();
|
||||
|
||||
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
||||
AddRomsFromBios();
|
||||
|
||||
// Then, remove the romof and cloneof tags so it's not picked up by the manager
|
||||
RemoveTagsFromChild();
|
||||
|
||||
// Finally, remove all sets that are labeled as bios or device
|
||||
//RemoveBiosAndDeviceSets(logger);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to create merged sets and remove the tags
|
||||
/// </summary>
|
||||
/// <param name="mergeroms">Dedupe type to be used</param>
|
||||
public void CreateMergedSets(DedupeType mergeroms)
|
||||
{
|
||||
Globals.Logger.User("Creating merged sets from the DAT");
|
||||
|
||||
// For sake of ease, the first thing we want to do is sort by game
|
||||
BucketBy(SortedBy.Game, mergeroms, norename: true);
|
||||
_sortedBy = SortedBy.Default;
|
||||
|
||||
// Now we want to loop through all of the games and set the correct information
|
||||
AddRomsFromChildren();
|
||||
|
||||
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
||||
RemoveBiosRomsFromChild();
|
||||
|
||||
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
||||
RemoveTagsFromChild();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to create non-merged sets and remove the tags
|
||||
/// </summary>
|
||||
/// <param name="mergeroms">Dedupe type to be used</param>
|
||||
public void CreateNonMergedSets(DedupeType mergeroms)
|
||||
{
|
||||
Globals.Logger.User("Creating non-merged sets from the DAT");
|
||||
|
||||
// For sake of ease, the first thing we want to do is sort by game
|
||||
BucketBy(SortedBy.Game, mergeroms, norename: true);
|
||||
_sortedBy = SortedBy.Default;
|
||||
|
||||
// Now we want to loop through all of the games and set the correct information
|
||||
AddRomsFromParent();
|
||||
|
||||
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
||||
RemoveBiosRomsFromChild();
|
||||
|
||||
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
||||
RemoveTagsFromChild();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof and romof tags to create split sets and remove the tags
|
||||
/// </summary>
|
||||
/// <param name="mergeroms">Dedupe type to be used</param>
|
||||
public void CreateSplitSets(DedupeType mergeroms)
|
||||
{
|
||||
Globals.Logger.User("Creating split sets from the DAT");
|
||||
|
||||
// For sake of ease, the first thing we want to do is sort by game
|
||||
BucketBy(SortedBy.Game, mergeroms, norename: true);
|
||||
_sortedBy = SortedBy.Default;
|
||||
|
||||
// Now we want to loop through all of the games and set the correct information
|
||||
RemoveRomsFromChild();
|
||||
|
||||
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
||||
RemoveBiosRomsFromChild();
|
||||
|
||||
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
||||
RemoveTagsFromChild();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merging/Splitting Helper Methods
|
||||
|
||||
/// <summary>
|
||||
/// Use romof tags to add roms to the children
|
||||
/// </summary>
|
||||
private void AddRomsFromBios()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// If the game has no items in it, we want to continue
|
||||
if (this[game].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine if the game has a parent or not
|
||||
string parent = null;
|
||||
if (!String.IsNullOrEmpty(this[game][0].RomOf))
|
||||
{
|
||||
parent = this[game][0].RomOf;
|
||||
}
|
||||
|
||||
// If the parent doesnt exist, we want to continue
|
||||
if (String.IsNullOrEmpty(parent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent doesn't have any items, we want to continue
|
||||
if (this[parent].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent exists and has items, we copy the items from the parent to the current game
|
||||
DatItem copyFrom = this[game][0];
|
||||
List<DatItem> parentItems = this[parent];
|
||||
foreach (DatItem item in parentItems)
|
||||
{
|
||||
DatItem datItem = (DatItem)item.Clone();
|
||||
datItem.CopyMachineInformation(copyFrom);
|
||||
if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
|
||||
{
|
||||
Add(game, datItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use device_ref tags to add roms to the children
|
||||
/// </summary>
|
||||
private void AddRomsFromDevices()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// If the game has no devices, we continue
|
||||
if (this[game][0].Devices == null || this[game][0].Devices.Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine if the game has any devices or not
|
||||
List<string> devices = this[game][0].Devices;
|
||||
foreach (string device in devices)
|
||||
{
|
||||
// If the device doesn't exist then we continue
|
||||
if (this[device].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, copy the items from the device to the current game
|
||||
DatItem copyFrom = this[game][0];
|
||||
List<DatItem> devItems = this[device];
|
||||
foreach (DatItem item in devItems)
|
||||
{
|
||||
DatItem datItem = (DatItem)item.Clone();
|
||||
datItem.CopyMachineInformation(copyFrom);
|
||||
if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
|
||||
{
|
||||
Add(game, datItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
|
||||
/// </summary>
|
||||
private void AddRomsFromParent()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// If the game has no items in it, we want to continue
|
||||
if (this[game].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine if the game has a parent or not
|
||||
string parent = null;
|
||||
if (!String.IsNullOrEmpty(this[game][0].CloneOf))
|
||||
{
|
||||
parent = this[game][0].CloneOf;
|
||||
}
|
||||
|
||||
// If the parent doesnt exist, we want to continue
|
||||
if (String.IsNullOrEmpty(parent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent doesn't have any items, we want to continue
|
||||
if (this[parent].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent exists and has items, we copy the items from the parent to the current game
|
||||
DatItem copyFrom = this[game][0];
|
||||
List<DatItem> parentItems = this[parent];
|
||||
foreach (DatItem item in parentItems)
|
||||
{
|
||||
DatItem datItem = (DatItem)item.Clone();
|
||||
datItem.CopyMachineInformation(copyFrom);
|
||||
if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
|
||||
{
|
||||
Add(game, datItem);
|
||||
}
|
||||
}
|
||||
|
||||
// Now we want to get the parent romof tag and put it in each of the items
|
||||
List<DatItem> items = this[game];
|
||||
string romof = this[parent][0].RomOf;
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
item.RomOf = romof;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to add roms to the parents, removing the child sets in the process
|
||||
/// </summary>
|
||||
private void AddRomsFromChildren()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// Determine if the game has a parent or not
|
||||
string parent = null;
|
||||
if (!String.IsNullOrEmpty(this[game][0].CloneOf))
|
||||
{
|
||||
parent = this[game][0].CloneOf;
|
||||
}
|
||||
|
||||
// If there is no parent, then we continue
|
||||
if (String.IsNullOrEmpty(parent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, move the items from the current game to a subfolder of the parent game
|
||||
DatItem copyFrom = this[parent].Count == 0 ? new Rom { MachineName = parent, MachineDescription = parent } : this[parent][0];
|
||||
List<DatItem> items = this[game];
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// If the disk doesn't have a valid merge tag OR the merged file doesn't exist in the parent, then add it
|
||||
if (item.Type == ItemType.Disk && (item.MergeTag == null || !this[parent].Select(i => i.Name).Contains(item.MergeTag)))
|
||||
{
|
||||
item.CopyMachineInformation(copyFrom);
|
||||
Add(parent, item);
|
||||
}
|
||||
|
||||
// Otherwise, if the parent doesn't already contain the non-disk, add it
|
||||
else if (item.Type != ItemType.Disk && !this[parent].Contains(item))
|
||||
{
|
||||
// Rename the child so it's in a subfolder
|
||||
item.Name = item.Name + "\\" + item.Name;
|
||||
|
||||
// Update the machine to be the new parent
|
||||
item.CopyMachineInformation(copyFrom);
|
||||
|
||||
// Add the rom to the parent set
|
||||
Add(parent, item);
|
||||
}
|
||||
}
|
||||
|
||||
// Then, remove the old game so it's not picked up by the writer
|
||||
Remove(game);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove all BIOS and device sets
|
||||
/// </summary>
|
||||
private void RemoveBiosAndDeviceSets()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
if (this[game].Count > 0
|
||||
&& (this[game][0].MachineType == MachineType.Bios
|
||||
|| this[game][0].MachineType == MachineType.Device))
|
||||
{
|
||||
Remove(game);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use romof tags to remove roms from the children
|
||||
/// </summary>
|
||||
private void RemoveBiosRomsFromChild()
|
||||
{
|
||||
// Loop through the romof tags
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// If the game has no items in it, we want to continue
|
||||
if (this[game].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine if the game has a parent or not
|
||||
string parent = null;
|
||||
if (!String.IsNullOrEmpty(this[game][0].RomOf))
|
||||
{
|
||||
parent = this[game][0].RomOf;
|
||||
}
|
||||
|
||||
// If the parent doesnt exist, we want to continue
|
||||
if (String.IsNullOrEmpty(parent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent doesn't have any items, we want to continue
|
||||
if (this[parent].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent exists and has items, we remove the items that are in the parent from the current game
|
||||
List<DatItem> parentItems = this[parent];
|
||||
foreach (DatItem item in parentItems)
|
||||
{
|
||||
DatItem datItem = (DatItem)item.Clone();
|
||||
Remove(game, datItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use cloneof tags to remove roms from the children
|
||||
/// </summary>
|
||||
private void RemoveRomsFromChild()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
// If the game has no items in it, we want to continue
|
||||
if (this[game].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine if the game has a parent or not
|
||||
string parent = null;
|
||||
if (!String.IsNullOrEmpty(this[game][0].CloneOf))
|
||||
{
|
||||
parent = this[game][0].CloneOf;
|
||||
}
|
||||
|
||||
// If the parent doesnt exist, we want to continue
|
||||
if (String.IsNullOrEmpty(parent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent doesn't have any items, we want to continue
|
||||
if (this[parent].Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the parent exists and has items, we copy the items from the parent to the current game
|
||||
List<DatItem> parentItems = this[parent];
|
||||
foreach (DatItem item in parentItems)
|
||||
{
|
||||
DatItem datItem = (DatItem)item.Clone();
|
||||
Remove(game, datItem);
|
||||
}
|
||||
|
||||
// Now we want to get the parent romof tag and put it in each of the items
|
||||
List<DatItem> items = this[game];
|
||||
string romof = this[parent][0].RomOf;
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
item.RomOf = romof;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove all romof and cloneof tags from all games
|
||||
/// </summary>
|
||||
private void RemoveTagsFromChild()
|
||||
{
|
||||
List<string> games = Keys.ToList();
|
||||
foreach (string game in games)
|
||||
{
|
||||
List<DatItem> items = this[game];
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
item.CloneOf = null;
|
||||
item.RomOf = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Instance Methods
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,636 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.Items;
|
||||
using SabreTools.Library.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
|
||||
namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a format-agnostic DAT
|
||||
/// </summary>
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Splitting
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by input extensions
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <param name="extA">List of extensions to split on (first DAT)</param>
|
||||
/// <param name="extB">List of extensions to split on (second DAT)</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB)
|
||||
{
|
||||
// Make sure all of the extensions have a dot at the beginning
|
||||
List<string> newExtA = new List<string>();
|
||||
foreach (string s in extA)
|
||||
{
|
||||
newExtA.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||
}
|
||||
string newExtAString = string.Join(",", newExtA);
|
||||
|
||||
List<string> newExtB = new List<string>();
|
||||
foreach (string s in extB)
|
||||
{
|
||||
newExtB.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||
}
|
||||
string newExtBString = string.Join(",", newExtB);
|
||||
|
||||
// Set all of the appropriate outputs for each of the subsets
|
||||
DatFile datdataA = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (" + newExtAString + ")",
|
||||
Name = this.Name + " (" + newExtAString + ")",
|
||||
Description = this.Description + " (" + newExtAString + ")",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
DatFormat = this.DatFormat,
|
||||
};
|
||||
DatFile datdataB = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (" + newExtBString + ")",
|
||||
Name = this.Name + " (" + newExtBString + ")",
|
||||
Description = this.Description + " (" + newExtBString + ")",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
DatFormat = this.DatFormat,
|
||||
};
|
||||
|
||||
// If roms is empty, return false
|
||||
if (Count == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Now separate the roms accordingly
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
if (newExtA.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
|
||||
{
|
||||
datdataA.Add(key, item);
|
||||
}
|
||||
else if (newExtB.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
|
||||
{
|
||||
datdataB.Add(key, item);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdataA.Add(key, item);
|
||||
datdataB.Add(key, item);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Then write out both files
|
||||
bool success = datdataA.WriteToFile(outDir);
|
||||
success &= datdataB.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by best available hashes
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByHash(string outDir, string basepath)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// Create each of the respective output DATs
|
||||
Globals.Logger.User("Creating and populating new DATs");
|
||||
DatFile nodump = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Nodump)",
|
||||
Name = this.Name + " (Nodump)",
|
||||
Description = this.Description + " (Nodump)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile sha512 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-512)",
|
||||
Name = this.Name + " (SHA-512)",
|
||||
Description = this.Description + " (SHA-512)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile sha384 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-384)",
|
||||
Name = this.Name + " (SHA-384)",
|
||||
Description = this.Description + " (SHA-384)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile sha256 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-256)",
|
||||
Name = this.Name + " (SHA-256)",
|
||||
Description = this.Description + " (SHA-256)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile sha1 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-1)",
|
||||
Name = this.Name + " (SHA-1)",
|
||||
Description = this.Description + " (SHA-1)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile md5 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (MD5)",
|
||||
Name = this.Name + " (MD5)",
|
||||
Description = this.Description + " (MD5)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile crc = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (CRC)",
|
||||
Name = this.Name + " (CRC)",
|
||||
Description = this.Description + " (CRC)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile other = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Other)",
|
||||
Name = this.Name + " (Other)",
|
||||
Description = this.Description + " (Other)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
|
||||
// Now populate each of the DAT objects in turn
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
// If the file is not a Rom or Disk, continue
|
||||
if (item.Type != ItemType.Disk && item.Type != ItemType.Rom)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the file is a nodump
|
||||
if ((item.Type == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|
||||
|| (item.Type == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
|
||||
{
|
||||
nodump.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-512
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA512))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA512)))
|
||||
{
|
||||
sha512.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-384
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA384))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA384)))
|
||||
{
|
||||
sha384.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-256
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA256))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA256)))
|
||||
{
|
||||
sha256.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-1
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
|
||||
{
|
||||
sha1.Add(key, item);
|
||||
}
|
||||
// If the file has no SHA-1 but has an MD5
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).MD5))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).MD5)))
|
||||
{
|
||||
md5.Add(key, item);
|
||||
}
|
||||
// If the file has no MD5 but a CRC
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
|
||||
{
|
||||
crc.Add(key, item);
|
||||
}
|
||||
else
|
||||
{
|
||||
other.Add(key, item);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Now, output all of the files to the output directory
|
||||
Globals.Logger.User("DAT information created, outputting new files");
|
||||
bool success = true;
|
||||
success &= nodump.WriteToFile(outDir);
|
||||
success &= sha512.WriteToFile(outDir);
|
||||
success &= sha384.WriteToFile(outDir);
|
||||
success &= sha256.WriteToFile(outDir);
|
||||
success &= sha1.WriteToFile(outDir);
|
||||
success &= md5.WriteToFile(outDir);
|
||||
success &= crc.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a SuperDAT by lowest available directory level
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <param name="shortname">True if short names should be used, false otherwise</param>
|
||||
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// First, organize by games so that we can do the right thing
|
||||
BucketBy(SortedBy.Game, DedupeType.None, lower: false, norename: true);
|
||||
|
||||
// Create a temporary DAT to add things to
|
||||
DatFile tempDat = new DatFile(this)
|
||||
{
|
||||
Name = null,
|
||||
};
|
||||
|
||||
// Sort the input keys
|
||||
List<string> keys = Keys.ToList();
|
||||
keys.Sort(SplitByLevelSort);
|
||||
|
||||
// Then, we loop over the games
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Here, the key is the name of the game to be used for comparison
|
||||
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
|
||||
{
|
||||
// Process and output the DAT
|
||||
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
|
||||
|
||||
// Reset the DAT for the next items
|
||||
tempDat = new DatFile(this)
|
||||
{
|
||||
Name = null,
|
||||
};
|
||||
}
|
||||
|
||||
// Clean the input list and set all games to be pathless
|
||||
List<DatItem> items = this[key];
|
||||
items.ForEach(item => item.MachineName = Style.GetFileName(item.MachineName));
|
||||
items.ForEach(item => item.MachineDescription = Style.GetFileName(item.MachineDescription));
|
||||
|
||||
// Now add the game to the output DAT
|
||||
tempDat.AddRange(key, items);
|
||||
|
||||
// Then set the DAT name to be the parent directory name
|
||||
tempDat.Name = Style.GetDirectoryName(key);
|
||||
});
|
||||
|
||||
// Then we write the last DAT out since it would be skipped otherwise
|
||||
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper function for SplitByLevel to sort the input game names
|
||||
/// </summary>
|
||||
/// <param name="a">First string to compare</param>
|
||||
/// <param name="b">Second string to compare</param>
|
||||
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
|
||||
private int SplitByLevelSort(string a, string b)
|
||||
{
|
||||
NaturalComparer nc = new NaturalComparer();
|
||||
int adeep = a.Count(c => c == '/' || c == '\\');
|
||||
int bdeep = b.Count(c => c == '/' || c == '\\');
|
||||
|
||||
if (adeep == bdeep)
|
||||
{
|
||||
return nc.Compare(a, b);
|
||||
}
|
||||
return adeep - bdeep;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper function for SplitByLevel to clean and write out a DAT
|
||||
/// </summary>
|
||||
/// <param name="datFile">DAT to clean and write out</param>
|
||||
/// <param name="outDir">Directory to write out to</param>
|
||||
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
|
||||
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
|
||||
{
|
||||
// Get the name from the DAT to use separately
|
||||
string name = datFile.Name;
|
||||
string expName = name.Replace("/", " - ").Replace("\\", " - ");
|
||||
|
||||
// Get the path that the file will be written out to
|
||||
string path = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||
? outDir
|
||||
: Path.Combine(outDir, name));
|
||||
|
||||
// Now set the new output values
|
||||
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||
? FileName
|
||||
: (shortname
|
||||
? Style.GetFileName(name)
|
||||
: expName
|
||||
)
|
||||
);
|
||||
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
|
||||
datFile.Name = Name + " (" + expName + ")";
|
||||
datFile.Description = (String.IsNullOrEmpty(Description) ? datFile.Name : Description + " (" + expName + ")");
|
||||
datFile.Type = null;
|
||||
|
||||
// Write out the temporary DAT to the proper directory
|
||||
datFile.WriteToFile(path);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by type of Rom
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByType(string outDir, string basepath)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// Create each of the respective output DATs
|
||||
Globals.Logger.User("Creating and populating new DATs");
|
||||
DatFile romdat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (ROM)",
|
||||
Name = this.Name + " (ROM)",
|
||||
Description = this.Description + " (ROM)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile diskdat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Disk)",
|
||||
Name = this.Name + " (Disk)",
|
||||
Description = this.Description + " (Disk)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
DatFile sampledat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Sample)",
|
||||
Name = this.Name + " (Sample)",
|
||||
Description = this.Description + " (Sample)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
DedupeRoms = this.DedupeRoms,
|
||||
};
|
||||
|
||||
// Now populate each of the DAT objects in turn
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
foreach(DatItem item in items)
|
||||
{
|
||||
// If the file is a Rom
|
||||
if (item.Type == ItemType.Rom)
|
||||
{
|
||||
romdat.Add(key, item);
|
||||
}
|
||||
// If the file is a Disk
|
||||
else if (item.Type == ItemType.Disk)
|
||||
{
|
||||
diskdat.Add(key, item);
|
||||
}
|
||||
// If the file is a Sample
|
||||
else if (item.Type == ItemType.Sample)
|
||||
{
|
||||
sampledat.Add(key, item);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Now, output all of the files to the output directory
|
||||
Globals.Logger.User("DAT information created, outputting new files");
|
||||
bool success = true;
|
||||
success &= romdat.WriteToFile(outDir);
|
||||
success &= diskdat.WriteToFile(outDir);
|
||||
success &= sampledat.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,642 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.Items;
|
||||
using SabreTools.Library.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
using StreamWriter = System.IO.StreamWriter;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Library.DatFiles
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a format-agnostic DAT
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// TODO: Make output standard width (HTML, without making the entire thing a table)
|
||||
/// TODO: Multithreading? Either StringBuilder or locking
|
||||
/// </remarks>
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Instance Methods
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Recalculate the statistics for the Dat
|
||||
/// </summary>
|
||||
public void RecalculateStats()
|
||||
{
|
||||
// Wipe out any stats already there
|
||||
_datStats.Reset();
|
||||
|
||||
// If we have a blank Dat in any way, return
|
||||
if (this == null || Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Loop through and add
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
foreach(DatItem item in items)
|
||||
{
|
||||
_datStats.AddItem(item);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output the stats for the Dat in a human-readable format
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">Set the statistics output format to use</param>
|
||||
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
|
||||
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
|
||||
public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat,
|
||||
bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
|
||||
{
|
||||
// If we're supposed to recalculate the statistics, do so
|
||||
if (recalculate)
|
||||
{
|
||||
RecalculateStats();
|
||||
}
|
||||
|
||||
BucketBy(SortedBy.Game, DedupeType.None, norename: true);
|
||||
if (TotalSize < 0)
|
||||
{
|
||||
TotalSize = Int64.MaxValue + TotalSize;
|
||||
}
|
||||
|
||||
// Log the results to screen
|
||||
string results = @"For '" + FileName + @"':
|
||||
--------------------------------------------------
|
||||
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||
Games found: " + (game == -1 ? Keys.Count() : game) + @"
|
||||
Roms found: " + RomCount + @"
|
||||
Disks found: " + DiskCount + @"
|
||||
Roms with CRC: " + CRCCount + @"
|
||||
Roms with MD5: " + MD5Count + @"
|
||||
Roms with SHA-1: " + SHA1Count + @"
|
||||
Roms with SHA-256: " + SHA256Count + @"
|
||||
Roms with SHA-384: " + SHA384Count + @"
|
||||
Roms with SHA-512: " + SHA512Count + "\n";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
results += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
results += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||
}
|
||||
|
||||
// For spacing between DATs
|
||||
results += "\n\n";
|
||||
|
||||
Globals.Logger.User(results);
|
||||
|
||||
// Now write it out to file as well
|
||||
string line = "";
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
line = @"'" + FileName + @"':
|
||||
--------------------------------------------------
|
||||
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||
Games found: " + (game == -1 ? Keys.Count() : game) + @"
|
||||
Roms found: " + RomCount + @"
|
||||
Disks found: " + DiskCount + @"
|
||||
Roms with CRC: " + CRCCount + @"
|
||||
Roms with SHA-1: " + SHA1Count + @"
|
||||
Roms with SHA-256: " + SHA256Count + @"
|
||||
Roms with SHA-384: " + SHA384Count + @"
|
||||
Roms with SHA-512: " + SHA512Count + "\n";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||
}
|
||||
|
||||
// For spacing between DATs
|
||||
line += "\n\n";
|
||||
|
||||
outputs[StatDatFormat.None].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
line = "\"" + FileName + "\","
|
||||
+ "\"" + TotalSize + "\","
|
||||
+ "\"" + (game == -1 ? Keys.Count() : game) + "\","
|
||||
+ "\"" + RomCount + "\","
|
||||
+ "\"" + DiskCount + "\","
|
||||
+ "\"" + CRCCount + "\","
|
||||
+ "\"" + MD5Count + "\","
|
||||
+ "\"" + SHA1Count + "\","
|
||||
+ "\"" + SHA256Count + "\","
|
||||
+ "\"" + SHA384Count + "\","
|
||||
+ "\"" + SHA512Count + "\"";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += ",\"" + BaddumpCount + "\"";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += ",\"" + NodumpCount + "\"";
|
||||
}
|
||||
|
||||
line += "\n";
|
||||
outputs[StatDatFormat.CSV].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
line = "\t\t\t<tr" + (FileName.StartsWith("DIR: ")
|
||||
? " class=\"dir\"><td>" + HttpUtility.HtmlEncode(FileName.Remove(0, 5))
|
||||
: "><td>" + HttpUtility.HtmlEncode(FileName)) + "</td>"
|
||||
+ "<td align=\"right\">" + Style.GetBytesReadable(TotalSize) + "</td>"
|
||||
+ "<td align=\"right\">" + (game == -1 ? Keys.Count() : game) + "</td>"
|
||||
+ "<td align=\"right\">" + RomCount + "</td>"
|
||||
+ "<td align=\"right\">" + DiskCount + "</td>"
|
||||
+ "<td align=\"right\">" + CRCCount + "</td>"
|
||||
+ "<td align=\"right\">" + MD5Count + "</td>"
|
||||
+ "<td align=\"right\">" + SHA1Count + "</td>"
|
||||
+ "<td align=\"right\">" + SHA256Count + "</td>";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += "<td align=\"right\">" + BaddumpCount + "</td>";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += "<td align=\"right\">" + NodumpCount + "</td>";
|
||||
}
|
||||
|
||||
line += "</tr>\n";
|
||||
outputs[StatDatFormat.HTML].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
line = "\"" + FileName + "\"\t"
|
||||
+ "\"" + TotalSize + "\"\t"
|
||||
+ "\"" + (game == -1 ? Keys.Count() : game) + "\"\t"
|
||||
+ "\"" + RomCount + "\"\t"
|
||||
+ "\"" + DiskCount + "\"\t"
|
||||
+ "\"" + CRCCount + "\"\t"
|
||||
+ "\"" + MD5Count + "\"\t"
|
||||
+ "\"" + SHA1Count + "\"\t"
|
||||
+ "\"" + SHA256Count + "\"\t"
|
||||
+ "\"" + SHA384Count + "\"\t"
|
||||
+ "\"" + SHA512Count + "\"";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += "\t\"" + BaddumpCount + "\"";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += "\t\"" + NodumpCount + "\"";
|
||||
}
|
||||
|
||||
line += "\n";
|
||||
outputs[StatDatFormat.TSV].Write(line);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Instance Methods
|
||||
|
||||
#region Static Methods
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Output the stats for a list of input dats as files in a human-readable format
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of input files and folders</param>
|
||||
/// <param name="reportName">Name of the output file</param>
|
||||
/// <param name="single">True if single DAT stats are output, false otherwise</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
/// <param name="statDatFormat" > Set the statistics output format to use</param>
|
||||
public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single,
|
||||
bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
|
||||
{
|
||||
// If there's no output format, set the default
|
||||
if (statDatFormat == 0x0)
|
||||
{
|
||||
statDatFormat = StatDatFormat.None;
|
||||
}
|
||||
|
||||
// Get the proper output file name
|
||||
if (String.IsNullOrEmpty(reportName))
|
||||
{
|
||||
reportName = "report";
|
||||
}
|
||||
outDir = Path.GetFullPath(outDir);
|
||||
|
||||
// Get the dictionary of desired outputs
|
||||
Dictionary<StatDatFormat, StreamWriter> outputs = OutputStatsGetOutputWriters(statDatFormat, reportName, outDir);
|
||||
|
||||
// Make sure we have all files
|
||||
List<Tuple<string, string>> newinputs = new List<Tuple<string, string>>(); // item, basepath
|
||||
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
|
||||
{
|
||||
if (File.Exists(input))
|
||||
{
|
||||
lock (newinputs)
|
||||
{
|
||||
newinputs.Add(Tuple.Create(Path.GetFullPath(input), Path.GetDirectoryName(Path.GetFullPath(input))));
|
||||
}
|
||||
}
|
||||
if (Directory.Exists(input))
|
||||
{
|
||||
foreach (string file in Directory.GetFiles(input, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
lock (newinputs)
|
||||
{
|
||||
newinputs.Add(Tuple.Create(Path.GetFullPath(file), Path.GetFullPath(input)));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
newinputs = newinputs
|
||||
.OrderBy(i => Path.GetDirectoryName(i.Item1))
|
||||
.ThenBy(i => Path.GetFileName(i.Item1))
|
||||
.ToList();
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Init all total variables
|
||||
DatStats totalStats = new DatStats();
|
||||
long totalGame = 0;
|
||||
|
||||
// Init directory-level variables
|
||||
string lastdir = null;
|
||||
string basepath = null;
|
||||
DatStats dirStats = new DatStats();
|
||||
long dirGame = 0;
|
||||
|
||||
// Now process each of the input files
|
||||
foreach (Tuple<string, string> filename in newinputs)
|
||||
{
|
||||
// Get the directory for the current file
|
||||
string thisdir = Path.GetDirectoryName(filename.Item1);
|
||||
basepath = Path.GetDirectoryName(filename.Item2);
|
||||
|
||||
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
|
||||
if (lastdir != null && thisdir != lastdir)
|
||||
{
|
||||
// Output separator if needed
|
||||
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
DatFile lastdirdat = new DatFile
|
||||
{
|
||||
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||
_datStats = dirStats,
|
||||
};
|
||||
|
||||
lastdirdat.OutputStats(outputs, statDatFormat,
|
||||
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
|
||||
// Write the mid-footer, if any
|
||||
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Reset the directory stats
|
||||
dirStats.Reset();
|
||||
dirGame = 0;
|
||||
}
|
||||
|
||||
Globals.Logger.Verbose("Beginning stat collection for '{0}'", false, filename.Item1);
|
||||
List<string> games = new List<string>();
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(filename.Item1, 0, 0);
|
||||
datdata.BucketBy(SortedBy.Game, DedupeType.None, norename: true);
|
||||
|
||||
// Output single DAT stats (if asked)
|
||||
Globals.Logger.User("Adding stats for file '{0}'\n", false, filename.Item1);
|
||||
if (single)
|
||||
{
|
||||
datdata.OutputStats(outputs, statDatFormat,
|
||||
baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
}
|
||||
|
||||
// Add single DAT stats to dir
|
||||
dirStats.AddStats(datdata._datStats);
|
||||
dirGame += datdata.Keys.Count();
|
||||
|
||||
// Add single DAT stats to totals
|
||||
totalStats.AddStats(datdata._datStats);
|
||||
totalGame += datdata.Keys.Count();
|
||||
|
||||
// Make sure to assign the new directory
|
||||
lastdir = thisdir;
|
||||
}
|
||||
|
||||
// Output the directory stats one last time
|
||||
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
if (single)
|
||||
{
|
||||
DatFile dirdat = new DatFile
|
||||
{
|
||||
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||
_datStats = dirStats,
|
||||
};
|
||||
|
||||
dirdat.OutputStats(outputs, statDatFormat,
|
||||
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
}
|
||||
|
||||
// Write the mid-footer, if any
|
||||
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Reset the directory stats
|
||||
dirStats.Reset();
|
||||
dirGame = 0;
|
||||
|
||||
// Output total DAT stats
|
||||
DatFile totaldata = new DatFile
|
||||
{
|
||||
FileName = "DIR: All DATs",
|
||||
_datStats = totalStats,
|
||||
};
|
||||
|
||||
totaldata.OutputStats(outputs, statDatFormat,
|
||||
game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
|
||||
// Output footer if needed
|
||||
OutputStatsWriteFooter(outputs, statDatFormat);
|
||||
|
||||
// Flush and dispose of the stream writers
|
||||
foreach (StatDatFormat format in outputs.Keys)
|
||||
{
|
||||
outputs[format].Flush();
|
||||
outputs[format].Dispose();
|
||||
}
|
||||
|
||||
Globals.Logger.User(@"
|
||||
Please check the log folder if the stats scrolled offscreen", false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the proper extension for the stat output format
|
||||
/// </summary>
|
||||
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
|
||||
/// <param name="reportName">Name of the input file to use</param>
|
||||
/// <param name="outDir">Output path to use</param>
|
||||
/// <returns>Dictionary of file types to StreamWriters</returns>
|
||||
private static Dictionary<StatDatFormat, StreamWriter> OutputStatsGetOutputWriters(StatDatFormat statDatFormat, string reportName, string outDir)
|
||||
{
|
||||
Dictionary<StatDatFormat, StreamWriter> output = new Dictionary<StatDatFormat, StreamWriter>();
|
||||
|
||||
// First try to create the output directory if we need to
|
||||
if (!Directory.Exists(outDir))
|
||||
{
|
||||
Directory.CreateDirectory(outDir);
|
||||
}
|
||||
|
||||
// For each output format, get the appropriate stream writer
|
||||
if ((statDatFormat & StatDatFormat.None) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".txt";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.None, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.CSV) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.CSV, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.HTML) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".html";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.HTML, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.TSV) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.TSV, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the header to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
outputs[StatDatFormat.CSV].Write("\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\",\"# with SHA-256\""
|
||||
+ (baddumpCol ? ",\"BadDumps\"" : "") + (nodumpCol ? ",\"Nodumps\"" : "") + "\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@"<!DOCTYPE html>
|
||||
<html>
|
||||
<header>
|
||||
<title>DAT Statistics Report</title>
|
||||
<style>
|
||||
body {
|
||||
background-color: lightgray;
|
||||
}
|
||||
.dir {
|
||||
color: #0088FF;
|
||||
}
|
||||
.right {
|
||||
align: right;
|
||||
}
|
||||
</style>
|
||||
</header>
|
||||
<body>
|
||||
<h2>DAT Statistics Report (" + DateTime.Now.ToShortDateString() + @")</h2>
|
||||
<table border=""1"" cellpadding=""5"" cellspacing=""0"">
|
||||
");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
outputs[StatDatFormat.TSV].Write("\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\"\t\"# with SHA-256\""
|
||||
+ (baddumpCol ? "\t\"BadDumps\"" : "") + (nodumpCol ? "\t\"Nodumps\"" : "") + "\n");
|
||||
}
|
||||
|
||||
// Now write the mid header for those who need it
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the mid-header to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
|
||||
+ @"<th align=""right"">Disks</th><th align=""right""># with CRC</th><th align=""right""># with MD5</th><th align=""right""># with SHA-1</th><th align=""right""># with SHA-256</th>"
|
||||
+ (baddumpCol ? "<th class=\".right\">Baddumps</th>" : "") + (nodumpCol ? "<th class=\".right\">Nodumps</th>" : "") + "</tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the separator to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidSeparator(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write("<tr><td colspan=\""
|
||||
+ (baddumpCol && nodumpCol
|
||||
? "12"
|
||||
: (baddumpCol ^ nodumpCol
|
||||
? "11"
|
||||
: "10")
|
||||
)
|
||||
+ "\"></td></tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the footer-separator to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
outputs[StatDatFormat.None].Write("\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
outputs[StatDatFormat.CSV].Write("\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write("<tr border=\"0\"><td colspan=\""
|
||||
+ (baddumpCol && nodumpCol
|
||||
? "12"
|
||||
: (baddumpCol ^ nodumpCol
|
||||
? "11"
|
||||
: "10")
|
||||
)
|
||||
+ "\"></td></tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
outputs[StatDatFormat.TSV].Write("\n");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the footer to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="sw">StreamWriter representing the output</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
private static void OutputStatsWriteFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@" </table>
|
||||
</body>
|
||||
</html>
|
||||
");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Static Methods
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -119,14 +119,8 @@
|
||||
<Compile Include="DatFiles\ClrMamePro.cs" />
|
||||
<Compile Include="DatFiles\DatHeader.cs" />
|
||||
<Compile Include="DatFiles\DatStats.cs" />
|
||||
<Compile Include="DatFiles\DatFile.Manipulate.cs" />
|
||||
<Compile Include="DatFiles\DatFile.ConvertUpdate.cs" />
|
||||
<Compile Include="DatFiles\DatFile.DFD.cs" />
|
||||
<Compile Include="DatFiles\DosCenter.cs" />
|
||||
<Compile Include="DatFiles\Filter.cs" />
|
||||
<Compile Include="DatFiles\DatFile.Rebuild.cs" />
|
||||
<Compile Include="DatFiles\DatFile.Splitters.cs" />
|
||||
<Compile Include="DatFiles\DatFile.Statistics.cs" />
|
||||
<Compile Include="DatFiles\Hashfile.cs" />
|
||||
<Compile Include="DatFiles\Listroms.cs" />
|
||||
<Compile Include="DatFiles\Logiqx.cs" />
|
||||
|
||||
Reference in New Issue
Block a user