[SabreTools.Library] Update folder name

This commit is contained in:
Matt Nadareski
2017-05-04 02:38:56 -07:00
parent 6b5a23247c
commit f72b06a561
111 changed files with 98 additions and 3 deletions

View File

@@ -0,0 +1,539 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using SearchOption = System.IO.SearchOption;
#endif
using NaturalSort;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Converting and Updating
/// <summary>
/// Determine if input files should be merged, diffed, or processed invidually
/// </summary>
/// <param name="inputPaths">Names of the input files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
{
// If we're in merging or diffing mode, use the full list of inputs
if (merge || diff != 0)
{
// Make sure there are no folders in inputs
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
// If we're in inverse cascade, reverse the list
if ((diff & DiffMode.ReverseCascade) != 0)
{
newInputFileNames.Reverse();
}
// Create a dictionary of all ROMs from the input DATs
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean,
remUnicode, descAsName, outDir, filter, splitType, trim, single, root);
// Modify the Dictionary if necessary and output the results
if (diff != 0 && diff < DiffMode.Cascade)
{
DiffNoCascade(diff, outDir, newInputFileNames);
}
// If we're in cascade and diff, output only cascaded diffs
else if (diff != 0 && diff >= DiffMode.Cascade)
{
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip);
}
// Output all entries with user-defined merge
else
{
MergeNoDiff(outDir, newInputFileNames, datHeaders);
}
}
// Otherwise, loop through all of the inputs individually
else
{
Update(inputPaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
}
return;
}
/// <summary>
/// Populate the user DatData object from the input files
/// </summary>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
/// <returns>List of DatData objects representing headers</returns>
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
string outDir, Filter filter, SplitType splitType, bool trim, bool single, string root)
{
DatFile[] datHeaders = new DatFile[inputs.Count];
DateTime start = DateTime.Now;
Globals.Logger.User("Processing individual DATs");
// Parse all of the DATs into their own DatFiles in the array
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
string input = inputs[i];
Globals.Logger.User("Adding DAT: " + input.Split('¬')[0]);
datHeaders[i] = new DatFile
{
DatFormat = (DatFormat != 0 ? DatFormat : 0),
MergeRoms = MergeRoms,
};
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, true, clean, descAsName);
});
Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
Globals.Logger.User("Populating internal DAT");
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
// Get the list of keys from the DAT
List<string> keys = datHeaders[i].Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Add everything from the key to the internal DAT
AddRange(key, datHeaders[i][key]);
// Now remove the key from the source DAT
lock (datHeaders)
{
datHeaders[i].Remove(key);
}
});
// Now remove the file dictionary from the souce DAT to save memory
datHeaders[i].Delete();
});
// Now that we have a merged DAT, filter it
Filter(filter, single, trim, root);
Globals.Logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
return datHeaders.ToList();
}
/// <summary>
/// Output non-cascading diffs
/// </summary>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs)
{
DateTime start = DateTime.Now;
Globals.Logger.User("Initializing all output DATs");
// Default vars for use
string post = "";
DatFile outerDiffData = new DatFile();
DatFile dupeData = new DatFile();
// Fill in any information not in the base DAT
if (String.IsNullOrEmpty(_fileName))
{
_fileName = "All DATs";
}
if (String.IsNullOrEmpty(_name))
{
_name = "All DATs";
}
if (String.IsNullOrEmpty(_description))
{
_description = "All DATs";
}
// Don't have External dupes
if ((diff & DiffMode.NoDupes) != 0)
{
post = " (No Duplicates)";
outerDiffData = new DatFile(this);
outerDiffData.FileName += post;
outerDiffData.Name += post;
outerDiffData.Description += post;
outerDiffData.Reset();
}
// Have External dupes
if ((diff & DiffMode.Dupes) != 0)
{
post = " (Duplicates)";
dupeData = new DatFile(this);
dupeData.FileName += post;
dupeData.Name += post;
dupeData.Description += post;
dupeData.Reset();
}
// Create a list of DatData objects representing individual output files
List<DatFile> outDats = new List<DatFile>();
// Loop through each of the inputs and get or create a new DatData object
if ((diff & DiffMode.Individuals) != 0)
{
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
DatFile diffData = new DatFile(this);
diffData.FileName += innerpost;
diffData.Name += innerpost;
diffData.Description += innerpost;
diffData.Reset();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
}
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now;
Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
// Loop through and add the items correctly
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// No duplicates
if ((diff & DiffMode.NoDupes) != 0 || (diff & DiffMode.Individuals) != 0)
{
if ((item.Dupe & DupeType.Internal) != 0)
{
// Individual DATs that are output
if ((diff & DiffMode.Individuals) != 0)
{
outDats[item.SystemID].Add(key, item);
}
// Merged no-duplicates DAT
if ((diff & DiffMode.NoDupes) != 0)
{
DatItem newrom = item;
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
outerDiffData.Add(key, newrom);
}
}
}
// Duplicates only
if ((diff & DiffMode.Dupes) != 0)
{
if ((item.Dupe & DupeType.External) != 0)
{
DatItem newrom = item;
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
dupeData.Add(key, newrom);
}
}
});
});
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs
start = DateTime.Now;
Globals.Logger.User("Outputting all created DATs");
// Output the difflist (a-b)+(b-a) diff
if ((diff & DiffMode.NoDupes) != 0)
{
outerDiffData.WriteToFile(outDir);
}
// Output the (ab) diff
if ((diff & DiffMode.Dupes) != 0)
{
dupeData.WriteToFile(outDir);
}
// Output the individual (a-b) DATs
if ((diff & DiffMode.Individuals) != 0)
{
Parallel.For(0, inputs.Count, j =>
{
// If we have an output directory set, replace the path
string[] split = inputs[j].Split('¬');
string path = outDir + (split[0] == split[1]
? Path.GetFileName(split[0])
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
// Try to output the file
outDats[j].WriteToFile(path);
});
}
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
}
/// <summary>
/// Output cascading diffs
/// </summary>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
{
string post = "";
// Create a list of DatData objects representing output files
List<DatFile> outDats = new List<DatFile>();
// Loop through each of the inputs and get or create a new DatData object
DateTime start = DateTime.Now;
Globals.Logger.User("Initializing all output DATs");
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
DatFile diffData;
// If we're in inplace mode, take the appropriate DatData object already stored
if (inplace || !String.IsNullOrEmpty(outDir))
{
diffData = datHeaders[j];
}
else
{
diffData = new DatFile(this);
diffData.FileName += post;
diffData.Name += post;
diffData.Description += post;
}
diffData.Reset();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now;
Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// There's odd cases where there are items with System ID < 0. Skip them for now
if (item.SystemID < 0)
{
Globals.Logger.Warning("Item found with a <0 SystemID: " + item.Name);
return;
}
outDats[item.SystemID].Add(key, item);
});
});
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs
start = DateTime.Now;
Globals.Logger.User("Outputting all created DATs");
Parallel.For((skip ? 1 : 0), inputs.Count, j =>
{
// If we have an output directory set, replace the path
string path = "";
if (inplace)
{
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
}
else if (!String.IsNullOrEmpty(outDir))
{
string[] split = inputs[j].Split('¬');
path = outDir + (split[0] == split[1]
? Path.GetFileName(split[0])
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
}
// Try to output the file
outDats[j].WriteToFile(path);
});
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
}
/// <summary>
/// Output user defined merge
/// </summary>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders)
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (Type == "SuperDAT")
{
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key].ToList();
List<DatItem> newItems = new List<DatItem>();
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
DatItem newItem = item;
string filename = inputs[newItem.SystemID].Split('¬')[0];
string rootpath = inputs[newItem.SystemID].Split('¬')[1];
rootpath += (rootpath == "" ? "" : Path.DirectorySeparatorChar.ToString());
filename = filename.Remove(0, rootpath.Length);
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ newItem.Machine.Name;
lock (newItems)
{
newItems.Add(newItem);
}
});
Remove(key);
AddRange(key, newItems);
});
}
// Try to output the file
WriteToFile(outDir);
}
/// <summary>
/// Convert, update, and filter a DAT file or set of files using a base
/// </summary>
/// <param name="inputFileNames">Names of the input files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
public void Update(List<string> inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
Filter filter, SplitType splitType, bool trim, bool single, string root)
{
Parallel.ForEach(inputFileNames, Globals.ParallelOptions, inputFileName =>
{
// Clean the input string
if (inputFileName != "")
{
inputFileName = Path.GetFullPath(inputFileName);
}
if (File.Exists(inputFileName))
{
// If inplace is set, override the output dir
string realOutDir = outDir;
if (inplace)
{
realOutDir = Path.GetDirectoryName(inputFileName);
}
DatFile innerDatdata = new DatFile(this);
Globals.Logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
innerDatdata.Parse(inputFileName, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
innerDatdata.Filter(filter, trim, single, root);
// Try to output the file
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(inputFileName) : realOutDir), overwrite: (realOutDir != ""));
}
else if (Directory.Exists(inputFileName))
{
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
// If inplace is set, override the output dir
string realOutDir = outDir;
if (inplace)
{
realOutDir = Path.GetDirectoryName(inputFileName);
}
List<string> subFiles = Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(subFiles, Globals.ParallelOptions, file =>
{
Globals.Logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
DatFile innerDatdata = new DatFile(this);
innerDatdata.Parse(file, 0, 0, splitType, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
innerDatdata.Filter(filter, trim, single, root);
// Try to output the file
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(file) : realOutDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
overwrite: (realOutDir != ""));
});
}
else
{
Globals.Logger.Error("I'm sorry but " + inputFileName + " doesn't exist!");
return;
}
});
}
#endregion
}
}

View File

@@ -0,0 +1,433 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using IOException = System.IO.IOException;
using SearchOption = System.IO.SearchOption;
#endif
using SharpCompress.Common;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Populate DAT from Directory [MODULAR DONE, FOR NOW]
/// <summary>
/// Create a new Dat from a directory
/// </summary>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, bool enableGzip,
SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
{
// If the description is defined but not the name, set the name from the description
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
{
Name = Description;
}
// If the name is defined but not the description, set the description from the name
else if (!String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
{
Description = Name + (bare ? "" : " (" + Date + ")");
}
// If neither the name or description are defined, set them from the automatic values
else if (String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
{
Name = basePath.Split(Path.DirectorySeparatorChar).Last();
Description = Name + (bare ? "" : " (" + Date + ")");
}
// Process the input
if (Directory.Exists(basePath))
{
Globals.Logger.Verbose("Folder found: " + basePath);
// Process the files in the main folder
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
});
// Find all top-level subfolders
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(subfiles, Globals.ParallelOptions, subitem =>
{
PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
});
});
// Now find all folders that are empty, if we are supposed to
if (!Romba && addBlanks)
{
List<string> empties = FileTools.GetEmptyDirectories(basePath).ToList();
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
{
// Get the full path for the directory
string fulldir = Path.GetFullPath(dir);
// Set the temporary variables
string gamename = "";
string romname = "";
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = fulldir.Remove(0, basePath.Length + 1);
romname = "_";
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
}
// Sanitize the names
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
Globals.Logger.Verbose("Adding blank empty folder: " + gamename);
this["null"].Add(new Rom(romname, gamename, omitFromScan));
});
}
}
else if (File.Exists(basePath))
{
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip,
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
}
// Now that we're done, delete the temp folder (if it's not the default)
Globals.Logger.User("Cleaning temp folder");
if (tempDir != Path.GetTempPath())
{
FileTools.TryDeleteDirectory(tempDir);
}
return true;
}
/// <summary>
/// Check a given file for hashes, based on current settings
/// </summary>
/// <param name="item">Filename of the item to be checked</param>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
bool enableGzip, SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
{
// Define the temporary directory
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if (Romba)
{
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item);
// If the rom is valid, write it out
if (rom != null && rom.Name != null)
{
// Add the list if it doesn't exist already
Add(rom.Size + "-" + rom.CRC, rom);
Globals.Logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
}
else
{
Globals.Logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
return;
}
return;
}
// If we're copying files, copy it first and get the new filename
string newItem = item;
string newBasePath = basePath;
if (copyFiles)
{
newBasePath = Path.Combine(tempDir, Path.GetRandomFileName());
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
Directory.CreateDirectory(Path.GetDirectoryName(newItem));
File.Copy(item, newItem, true);
}
// Create a list for all found items
List<Rom> extracted = null;
// Temporarily set the archivesAsFiles if we have a GZip archive and we're not supposed to use it as one
if (archivesAsFiles && !enableGzip && newItem.EndsWith(".gz"))
{
archivesAsFiles = false;
}
// If we don't have archives as files, try to scan the file as an archive
if (!archivesAsFiles)
{
// If all deep hash skip flags are set, do a quickscan
if (omitFromScan == Hash.SecureHashes)
{
extracted = ArchiveTools.GetArchiveFileInfo(newItem, date: addDate);
}
// Otherwise, get the list with whatever hashes are wanted
else
{
extracted = ArchiveTools.GetExtendedArchiveFileInfo(newItem, omitFromScan: omitFromScan, date: addDate);
}
}
// If the file should be skipped based on type, do so now
if ((extracted != null && skipFileType == SkipFileType.Archive)
|| (extracted == null && skipFileType == SkipFileType.File))
{
return;
}
// If the extracted list is null, just scan the item itself
if (extracted == null || archivesAsFiles)
{
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
}
// Otherwise, add all of the found items
else
{
// First take care of the found items
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
{
PopulateFromDirProcessFileHelper(newItem,
rom,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
// Then, if we're looking for blanks, get all of the blank folders and add them
if (addBlanks)
{
List<string> empties = ArchiveTools.GetEmptyFoldersInArchive(newItem);
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
{
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
PopulateFromDirProcessFileHelper(newItem,
emptyRom,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
}
}
// Cue to delete the file if it's a copy
if (copyFiles && item != newItem)
{
FileTools.TryDeleteDirectory(newBasePath);
}
// Delete the sub temp directory
FileTools.TryDeleteDirectory(tempSubDir);
}
/// <summary>
/// Process a single file as a file
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="parent">Parent game to be used</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan,
bool addDate, string headerToCheckAgainst)
{
Globals.Logger.Verbose(Path.GetFileName(item) + " treated like a file");
Rom rom = FileTools.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst);
PopulateFromDirProcessFileHelper(item, rom, basePath, parent);
}
/// <summary>
/// Process a single file as a file (with found Rom data)
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
{
// If the datItem isn't a Rom or Disk, return
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
{
return;
}
string key = "";
if (datItem.Type == ItemType.Rom)
{
key = ((Rom)datItem).Size + "-" + ((Rom)datItem).CRC;
}
else
{
key = ((Disk)datItem).MD5;
}
// Add the list if it doesn't exist already
Add(key);
try
{
// If the basepath ends with a directory separator, remove it
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
basepath += Path.DirectorySeparatorChar.ToString();
}
// Make sure we have the full item path
item = Path.GetFullPath(item);
// Get the data to be added as game and item names
string gamename = "";
string romname = "";
// If the parent is blank, then we have a non-archive file
if (parent == "")
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
romname = Path.GetFileName(item);
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
}
}
// Otherwise, we assume that we have an archive
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
if (Type == "SuperDAT")
{
gamename = parent;
romname = datItem.Name;
}
// Otherwise, we want the archive name as the game, and the file as everything else
else
{
gamename = parent;
romname = datItem.Name;
}
}
// Sanitize the names
if (romname == null)
{
romname = "";
}
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
if (!String.IsNullOrEmpty(gamename) && String.IsNullOrEmpty(romname))
{
romname = gamename;
gamename = "Default";
}
// Update rom information
datItem.Name = romname;
if (datItem.Machine == null)
{
datItem.Machine = new Machine
{
Name = gamename,
Description = gamename,
};
}
else
{
datItem.Machine.Name = gamename;
datItem.Machine.Description = gamename;
}
// Add the file information to the DAT
Add(key, datItem);
Globals.Logger.User("File added: " + romname + Environment.NewLine);
}
catch (IOException ex)
{
Globals.Logger.Error(ex.ToString());
return;
}
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,638 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
#endif
using NaturalSort;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Splitting
/// <summary>
/// Split a DAT by input extensions
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB)
{
// Make sure all of the extensions have a dot at the beginning
List<string> newExtA = new List<string>();
Parallel.ForEach(extA, Globals.ParallelOptions, s =>
{
lock (newExtA)
{
newExtA.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
}
});
string newExtAString = string.Join(",", newExtA);
List<string> newExtB = new List<string>();
Parallel.ForEach(extB, Globals.ParallelOptions, s =>
{
lock (newExtB)
{
newExtB.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
}
});
string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets
DatFile datdataA = new DatFile
{
FileName = this.FileName + " (" + newExtAString + ")",
Name = this.Name + " (" + newExtAString + ")",
Description = this.Description + " (" + newExtAString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
DatFile datdataB = new DatFile
{
FileName = this.FileName + " (" + newExtBString + ")",
Name = this.Name + " (" + newExtBString + ")",
Description = this.Description + " (" + newExtBString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
// If roms is empty, return false
if (Count == 0)
{
return false;
}
// Now separate the roms accordingly
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
if (newExtA.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
{
datdataA.Add(key, item);
}
else if (newExtB.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
{
datdataB.Add(key, item);
}
else
{
datdataA.Add(key, item);
datdataB.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Then write out both files
bool success = datdataA.WriteToFile(outDir);
success &= datdataB.WriteToFile(outDir);
return success;
}
/// <summary>
/// Split a DAT by best available hashes
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByHash(string outDir, string basepath)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile nodump = new DatFile
{
FileName = this.FileName + " (Nodump)",
Name = this.Name + " (Nodump)",
Description = this.Description + " (Nodump)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha512 = new DatFile
{
FileName = this.FileName + " (SHA-512)",
Name = this.Name + " (SHA-512)",
Description = this.Description + " (SHA-512)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha384 = new DatFile
{
FileName = this.FileName + " (SHA-384)",
Name = this.Name + " (SHA-384)",
Description = this.Description + " (SHA-384)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha256 = new DatFile
{
FileName = this.FileName + " (SHA-256)",
Name = this.Name + " (SHA-256)",
Description = this.Description + " (SHA-256)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha1 = new DatFile
{
FileName = this.FileName + " (SHA-1)",
Name = this.Name + " (SHA-1)",
Description = this.Description + " (SHA-1)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile md5 = new DatFile
{
FileName = this.FileName + " (MD5)",
Name = this.Name + " (MD5)",
Description = this.Description + " (MD5)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile crc = new DatFile
{
FileName = this.FileName + " (CRC)",
Name = this.Name + " (CRC)",
Description = this.Description + " (CRC)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile other = new DatFile
{
FileName = this.FileName + " (Other)",
Name = this.Name + " (Other)",
Description = this.Description + " (Other)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
// Now populate each of the DAT objects in turn
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// If the file is not a Rom or Disk, continue
if (item.Type != ItemType.Disk && item.Type != ItemType.Rom)
{
return;
}
// If the file is a nodump
if ((item.Type == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|| (item.Type == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
{
nodump.Add(key, item);
}
// If the file has a SHA-512
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA512))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA512)))
{
sha512.Add(key, item);
}
// If the file has a SHA-384
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA384))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA384)))
{
sha384.Add(key, item);
}
// If the file has a SHA-256
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA256))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA256)))
{
sha256.Add(key, item);
}
// If the file has a SHA-1
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
{
sha1.Add(key, item);
}
// If the file has no SHA-1 but has an MD5
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).MD5))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).MD5)))
{
md5.Add(key, item);
}
// If the file has no MD5 but a CRC
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
{
crc.Add(key, item);
}
else
{
other.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= nodump.WriteToFile(outDir);
success &= sha512.WriteToFile(outDir);
success &= sha384.WriteToFile(outDir);
success &= sha256.WriteToFile(outDir);
success &= sha1.WriteToFile(outDir);
success &= md5.WriteToFile(outDir);
success &= crc.WriteToFile(outDir);
return success;
}
/// <summary>
/// Split a SuperDAT by lowest available directory level
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// First, organize by games so that we can do the right thing
BucketBy(SortedBy.Game, false /* mergeroms */, lower: false, norename: true);
// Create a temporary DAT to add things to
DatFile tempDat = new DatFile(this)
{
Name = null,
};
// Sort the input keys
List<string> keys = Keys.ToList();
keys.Sort(SplitByLevelSort);
// Then, we loop over the games
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Here, the key is the name of the game to be used for comparison
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
{
// Process and output the DAT
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
// Reset the DAT for the next items
tempDat = new DatFile(this)
{
Name = null,
};
}
// Clean the input list and set all games to be pathless
List<DatItem> items = this[key];
items.ForEach(item => item.Machine.Name = Style.GetFileName(item.Machine.Name));
items.ForEach(item => item.Machine.Description = Style.GetFileName(item.Machine.Description));
// Now add the game to the output DAT
tempDat.AddRange(key, items);
// Then set the DAT name to be the parent directory name
tempDat.Name = Style.GetDirectoryName(key);
});
// Then we write the last DAT out since it would be skipped otherwise
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
return true;
}
/// <summary>
/// Helper function for SplitByLevel to sort the input game names
/// </summary>
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
int adeep = a.Count(c => c == '/' || c == '\\');
int bdeep = b.Count(c => c == '/' || c == '\\');
if (adeep == bdeep)
{
return nc.Compare(a, b);
}
return adeep - bdeep;
}
/// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT
/// </summary>
/// <param name="datFile">DAT to clean and write out</param>
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
{
// Get the name from the DAT to use separately
string name = datFile.Name;
string expName = name.Replace("/", " - ").Replace("\\", " - ");
// Get the path that the file will be written out to
string path = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
? outDir
: Path.Combine(outDir, name));
// Now set the new output values
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
? FileName
: (shortname
? Style.GetFileName(name)
: expName
)
);
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
datFile.Name = Name + " (" + expName + ")";
datFile.Description = (String.IsNullOrEmpty(Description) ? datFile.Name : Description + " (" + expName + ")");
datFile.Type = null;
// Write out the temporary DAT to the proper directory
datFile.WriteToFile(path);
}
/// <summary>
/// Split a DAT by type of Rom
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByType(string outDir, string basepath)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile romdat = new DatFile
{
FileName = this.FileName + " (ROM)",
Name = this.Name + " (ROM)",
Description = this.Description + " (ROM)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile diskdat = new DatFile
{
FileName = this.FileName + " (Disk)",
Name = this.Name + " (Disk)",
Description = this.Description + " (Disk)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sampledat = new DatFile
{
FileName = this.FileName + " (Sample)",
Name = this.Name + " (Sample)",
Description = this.Description + " (Sample)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
// Now populate each of the DAT objects in turn
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// If the file is a Rom
if (item.Type == ItemType.Rom)
{
romdat.Add(key, item);
}
// If the file is a Disk
else if (item.Type == ItemType.Disk)
{
diskdat.Add(key, item);
}
// If the file is a Sample
else if (item.Type == ItemType.Sample)
{
sampledat.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= romdat.WriteToFile(outDir);
success &= diskdat.WriteToFile(outDir);
success &= sampledat.WriteToFile(outDir);
return success;
}
#endregion
}
}

View File

@@ -0,0 +1,752 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using SearchOption = System.IO.SearchOption;
using StreamWriter = System.IO.StreamWriter;
#endif
namespace SabreTools.Helper.Dats
{
/*
* TODO: Make output standard width (HTML, without making the entire thing a table)
* TODO: Multithreading? Either StringBuilder or locking
*/
public partial class DatFile
{
#region Instance Methods
#region Statistics
/// <summary>
/// Recalculate the statistics for the Dat
/// </summary>
public void RecalculateStats()
{
// Wipe out any stats already there
RomCount = 0;
DiskCount = 0;
TotalSize = 0;
CRCCount = 0;
MD5Count = 0;
SHA1Count = 0;
SHA256Count = 0;
BaddumpCount = 0;
NodumpCount = 0;
// If we have a blank Dat in any way, return
if (this == null || Count == 0)
{
return;
}
// Loop through and add
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
switch (item.Type)
{
case ItemType.Archive:
break;
case ItemType.BiosSet:
break;
case ItemType.Disk:
Disk disk = (Disk)item;
DiskCount += 1;
MD5Count += (String.IsNullOrEmpty(disk.MD5) ? 0 : 1);
SHA1Count += (String.IsNullOrEmpty(disk.SHA1) ? 0 : 1);
SHA256Count += (String.IsNullOrEmpty(disk.SHA256) ? 0 : 1);
SHA384Count += (String.IsNullOrEmpty(disk.SHA384) ? 0 : 1);
SHA512Count += (String.IsNullOrEmpty(disk.SHA512) ? 0 : 1);
BaddumpCount += (disk.ItemStatus == ItemStatus.BadDump ? 1 : 0);
NodumpCount += (disk.ItemStatus == ItemStatus.Nodump ? 1 : 0);
break;
case ItemType.Release:
break;
case ItemType.Rom:
Rom rom = (Rom)item;
RomCount += 1;
TotalSize += (rom.ItemStatus == ItemStatus.Nodump ? 0 : rom.Size);
CRCCount += (String.IsNullOrEmpty(rom.CRC) ? 0 : 1);
MD5Count += (String.IsNullOrEmpty(rom.MD5) ? 0 : 1);
SHA1Count += (String.IsNullOrEmpty(rom.SHA1) ? 0 : 1);
SHA256Count += (String.IsNullOrEmpty(rom.SHA256) ? 0 : 1);
SHA384Count += (String.IsNullOrEmpty(rom.SHA384) ? 0 : 1);
SHA512Count += (String.IsNullOrEmpty(rom.SHA512) ? 0 : 1);
BaddumpCount += (rom.ItemStatus == ItemStatus.BadDump ? 1 : 0);
NodumpCount += (rom.ItemStatus == ItemStatus.Nodump ? 1 : 0);
break;
case ItemType.Sample:
break;
}
});
});
}
/// <summary>
/// Output the stats for the Dat in a human-readable format
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">Set the statistics output format to use</param>
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat,
bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
{
// If we're supposed to recalculate the statistics, do so
if (recalculate)
{
RecalculateStats();
}
BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
if (TotalSize < 0)
{
TotalSize = Int64.MaxValue + TotalSize;
}
// Log the results to screen
string results = @"For '" + FileName + @"':
--------------------------------------------------
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
Games found: " + (game == -1 ? Keys.Count() : game) + @"
Roms found: " + RomCount + @"
Disks found: " + DiskCount + @"
Roms with CRC: " + CRCCount + @"
Roms with MD5: " + MD5Count + @"
Roms with SHA-1: " + SHA1Count + @"
Roms with SHA-256: " + SHA256Count + @"
Roms with SHA-384: " + SHA384Count + @"
Roms with SHA-512: " + SHA512Count + "\n";
if (baddumpCol)
{
results += " Roms with BadDump status: " + BaddumpCount + "\n";
}
if (nodumpCol)
{
results += " Roms with Nodump status: " + NodumpCount + "\n";
}
// For spacing between DATs
results += "\n\n";
Globals.Logger.User(results);
// Now write it out to file as well
string line = "";
if (outputs.ContainsKey(StatDatFormat.None))
{
line = @"'" + FileName + @"':
--------------------------------------------------
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
Games found: " + (game == -1 ? Keys.Count() : game) + @"
Roms found: " + RomCount + @"
Disks found: " + DiskCount + @"
Roms with CRC: " + CRCCount + @"
Roms with SHA-1: " + SHA1Count + @"
Roms with SHA-256: " + SHA256Count + @"
Roms with SHA-384: " + SHA384Count + @"
Roms with SHA-512: " + SHA512Count + "\n";
if (baddumpCol)
{
line += " Roms with BadDump status: " + BaddumpCount + "\n";
}
if (nodumpCol)
{
line += " Roms with Nodump status: " + NodumpCount + "\n";
}
// For spacing between DATs
line += "\n\n";
outputs[StatDatFormat.None].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
line = "\"" + FileName + "\","
+ "\"" + TotalSize + "\","
+ "\"" + (game == -1 ? Keys.Count() : game) + "\","
+ "\"" + RomCount + "\","
+ "\"" + DiskCount + "\","
+ "\"" + CRCCount + "\","
+ "\"" + MD5Count + "\","
+ "\"" + SHA1Count + "\","
+ "\"" + SHA256Count + "\","
+ "\"" + SHA384Count + "\","
+ "\"" + SHA512Count + "\"";
if (baddumpCol)
{
line += ",\"" + BaddumpCount + "\"";
}
if (nodumpCol)
{
line += ",\"" + NodumpCount + "\"";
}
line += "\n";
outputs[StatDatFormat.CSV].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
line = "\t\t\t<tr" + (FileName.StartsWith("DIR: ")
? " class=\"dir\"><td>" + HttpUtility.HtmlEncode(FileName.Remove(0, 5))
: "><td>" + HttpUtility.HtmlEncode(FileName)) + "</td>"
+ "<td align=\"right\">" + Style.GetBytesReadable(TotalSize) + "</td>"
+ "<td align=\"right\">" + (game == -1 ? Keys.Count() : game) + "</td>"
+ "<td align=\"right\">" + RomCount + "</td>"
+ "<td align=\"right\">" + DiskCount + "</td>"
+ "<td align=\"right\">" + CRCCount + "</td>"
+ "<td align=\"right\">" + MD5Count + "</td>"
+ "<td align=\"right\">" + SHA1Count + "</td>"
+ "<td align=\"right\">" + SHA256Count + "</td>";
if (baddumpCol)
{
line += "<td align=\"right\">" + BaddumpCount + "</td>";
}
if (nodumpCol)
{
line += "<td align=\"right\">" + NodumpCount + "</td>";
}
line += "</tr>\n";
outputs[StatDatFormat.HTML].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
line = "\"" + FileName + "\"\t"
+ "\"" + TotalSize + "\"\t"
+ "\"" + (game == -1 ? Keys.Count() : game) + "\"\t"
+ "\"" + RomCount + "\"\t"
+ "\"" + DiskCount + "\"\t"
+ "\"" + CRCCount + "\"\t"
+ "\"" + MD5Count + "\"\t"
+ "\"" + SHA1Count + "\"\t"
+ "\"" + SHA256Count + "\"\t"
+ "\"" + SHA384Count + "\"\t"
+ "\"" + SHA512Count + "\"";
if (baddumpCol)
{
line += "\t\"" + BaddumpCount + "\"";
}
if (nodumpCol)
{
line += "\t\"" + NodumpCount + "\"";
}
line += "\n";
outputs[StatDatFormat.TSV].Write(line);
}
}
#endregion
#endregion // Instance Methods
#region Static Methods
#region Statistics
/// <summary>
/// Output the stats for a list of input dats as files in a human-readable format
/// </summary>
/// <param name="inputs">List of input files and folders</param>
/// <param name="reportName">Name of the output file</param>
/// <param name="single">True if single DAT stats are output, false otherwise</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat" > Set the statistics output format to use</param>
public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single,
bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
{
// If there's no output format, set the default
if (statDatFormat == 0x0)
{
statDatFormat = StatDatFormat.None;
}
// Get the proper output file name
if (String.IsNullOrEmpty(outDir))
{
outDir = Environment.CurrentDirectory;
}
if (String.IsNullOrEmpty(reportName))
{
reportName = "report";
}
outDir = Path.GetFullPath(outDir);
// Get the dictionary of desired outputs
Dictionary<StatDatFormat, StreamWriter> outputs = OutputStatsGetOutputWriters(statDatFormat, reportName, outDir);
// Make sure we have all files
List<Tuple<string, string>> newinputs = new List<Tuple<string, string>>(); // item, basepath
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
{
if (File.Exists(input))
{
lock (newinputs)
{
newinputs.Add(Tuple.Create(Path.GetFullPath(input), Path.GetDirectoryName(Path.GetFullPath(input))));
}
}
if (Directory.Exists(input))
{
foreach (string file in Directory.GetFiles(input, "*", SearchOption.AllDirectories))
{
lock (newinputs)
{
newinputs.Add(Tuple.Create(Path.GetFullPath(file), Path.GetFullPath(input)));
}
}
}
});
newinputs = newinputs
.OrderBy(i => Path.GetDirectoryName(i.Item1))
.ThenBy(i => Path.GetFileName(i.Item1))
.ToList();
// Write the header, if any
OutputStatsWriteHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Init all total variables
long totalSize = 0;
long totalGame = 0;
long totalRom = 0;
long totalDisk = 0;
long totalCRC = 0;
long totalMD5 = 0;
long totalSHA1 = 0;
long totalSHA256 = 0;
long totalBaddump = 0;
long totalNodump = 0;
// Init directory-level variables
string lastdir = null;
string basepath = null;
long dirSize = 0;
long dirGame = 0;
long dirRom = 0;
long dirDisk = 0;
long dirCRC = 0;
long dirMD5 = 0;
long dirSHA1 = 0;
long dirSHA256 = 0;
long dirBaddump = 0;
long dirNodump = 0;
// Now process each of the input files
foreach (Tuple<string, string> filename in newinputs)
{
// Get the directory for the current file
string thisdir = Path.GetDirectoryName(filename.Item1);
basepath = Path.GetDirectoryName(filename.Item2);
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
if (lastdir != null && thisdir != lastdir)
{
// Output separator if needed
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
DatFile lastdirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
TotalSize = dirSize,
RomCount = dirRom,
DiskCount = dirDisk,
CRCCount = dirCRC,
MD5Count = dirMD5,
SHA1Count = dirSHA1,
SHA256Count = dirSHA256,
BaddumpCount = dirBaddump,
NodumpCount = dirNodump,
};
lastdirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Write the mid-footer, if any
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
// Write the header, if any
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Reset the directory stats
dirSize = 0;
dirGame = 0;
dirRom = 0;
dirDisk = 0;
dirCRC = 0;
dirMD5 = 0;
dirSHA1 = 0;
dirSHA256 = 0;
dirBaddump = 0;
dirNodump = 0;
}
Globals.Logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false);
List<string> games = new List<string>();
DatFile datdata = new DatFile();
datdata.Parse(filename.Item1, 0, 0);
datdata.BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
// Output single DAT stats (if asked)
Globals.Logger.User("Adding stats for file '" + filename.Item1 + "'\n", false);
if (single)
{
datdata.OutputStats(outputs, statDatFormat,
baddumpCol: baddumpCol, nodumpCol: nodumpCol);
}
// Add single DAT stats to dir
dirSize += datdata.TotalSize;
dirGame += datdata.Keys.Count();
dirRom += datdata.RomCount;
dirDisk += datdata.DiskCount;
dirCRC += datdata.CRCCount;
dirMD5 += datdata.MD5Count;
dirSHA1 += datdata.SHA1Count;
dirSHA256 += datdata.SHA256Count;
dirBaddump += datdata.BaddumpCount;
dirNodump += datdata.NodumpCount;
// Add single DAT stats to totals
totalSize += datdata.TotalSize;
totalGame += datdata.Keys.Count();
totalRom += datdata.RomCount;
totalDisk += datdata.DiskCount;
totalCRC += datdata.CRCCount;
totalMD5 += datdata.MD5Count;
totalSHA1 += datdata.SHA1Count;
totalSHA256 += datdata.SHA256Count;
totalBaddump += datdata.BaddumpCount;
totalNodump += datdata.NodumpCount;
// Make sure to assign the new directory
lastdir = thisdir;
}
// Output the directory stats one last time
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
if (single)
{
DatFile dirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
TotalSize = dirSize,
RomCount = dirRom,
DiskCount = dirDisk,
CRCCount = dirCRC,
MD5Count = dirMD5,
SHA1Count = dirSHA1,
SHA256Count = dirSHA256,
BaddumpCount = dirBaddump,
NodumpCount = dirNodump,
};
dirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
}
// Write the mid-footer, if any
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
// Write the header, if any
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Reset the directory stats
dirSize = 0;
dirGame = 0;
dirRom = 0;
dirDisk = 0;
dirCRC = 0;
dirMD5 = 0;
dirSHA1 = 0;
dirSHA256 = 0;
dirNodump = 0;
// Output total DAT stats
DatFile totaldata = new DatFile
{
FileName = "DIR: All DATs",
TotalSize = totalSize,
RomCount = totalRom,
DiskCount = totalDisk,
CRCCount = totalCRC,
MD5Count = totalMD5,
SHA1Count = totalSHA1,
SHA256Count = totalSHA256,
BaddumpCount = totalBaddump,
NodumpCount = totalNodump,
};
totaldata.OutputStats(outputs, statDatFormat,
game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Output footer if needed
OutputStatsWriteFooter(outputs, statDatFormat);
// Flush and dispose of the stream writers
foreach (StatDatFormat format in outputs.Keys)
{
outputs[format].Flush();
outputs[format].Dispose();
}
Globals.Logger.User(@"
Please check the log folder if the stats scrolled offscreen", false);
}
/// <summary>
/// Get the proper extension for the stat output format
/// </summary>
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
/// <param name="reportName">Name of the input file to use</param>
/// <param name="outDir">Output path to use</param>
/// <returns>Dictionary of file types to StreamWriters</returns>
private static Dictionary<StatDatFormat, StreamWriter> OutputStatsGetOutputWriters(StatDatFormat statDatFormat, string reportName, string outDir)
{
Dictionary<StatDatFormat, StreamWriter> output = new Dictionary<StatDatFormat, StreamWriter>();
// First try to create the output directory if we need to
if (!Directory.Exists(outDir))
{
Directory.CreateDirectory(outDir);
}
// For each output format, get the appropriate stream writer
if ((statDatFormat & StatDatFormat.None) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".txt";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.None, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.CSV) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.CSV, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.HTML) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".html";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.HTML, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.TSV) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.TSV, new StreamWriter(FileTools.TryCreate(reportName)));
}
return output;
}
/// <summary>
/// Write out the header to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
outputs[StatDatFormat.CSV].Write("\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\",\"# with SHA-256\""
+ (baddumpCol ? ",\"BadDumps\"" : "") + (nodumpCol ? ",\"Nodumps\"" : "") + "\n");
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@"<!DOCTYPE html>
<html>
<header>
<title>DAT Statistics Report</title>
<style>
body {
background-color: lightgray;
}
.dir {
color: #0088FF;
}
.right {
align: right;
}
</style>
</header>
<body>
<h2>DAT Statistics Report (" + DateTime.Now.ToShortDateString() + @")</h2>
<table border=""1"" cellpadding=""5"" cellspacing=""0"">
");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
outputs[StatDatFormat.TSV].Write("\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\"\t\"# with SHA-256\""
+ (baddumpCol ? "\t\"BadDumps\"" : "") + (nodumpCol ? "\t\"Nodumps\"" : "") + "\n");
}
// Now write the mid header for those who need it
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
}
/// <summary>
/// Write out the mid-header to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
+ @"<th align=""right"">Disks</th><th align=""right"">&#35; with CRC</th><th align=""right"">&#35; with MD5</th><th align=""right"">&#35; with SHA-1</th><th align=""right"">&#35; with SHA-256</th>"
+ (baddumpCol ? "<th class=\".right\">Baddumps</th>" : "") + (nodumpCol ? "<th class=\".right\">Nodumps</th>" : "") + "</tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
/// <summary>
/// Write out the separator to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidSeparator(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write("<tr><td colspan=\""
+ (baddumpCol && nodumpCol
? "12"
: (baddumpCol ^ nodumpCol
? "11"
: "10")
)
+ "\"></td></tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
/// <summary>
/// Write out the footer-separator to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
outputs[StatDatFormat.None].Write("\n");
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
outputs[StatDatFormat.CSV].Write("\n");
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write("<tr border=\"0\"><td colspan=\""
+ (baddumpCol && nodumpCol
? "12"
: (baddumpCol ^ nodumpCol
? "11"
: "10")
)
+ "\"></td></tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
outputs[StatDatFormat.TSV].Write("\n");
}
}
/// <summary>
/// Write out the footer to the stream, if any exists
/// </summary>
/// <param name="sw">StreamWriter representing the output</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
private static void OutputStatsWriteFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@" </table>
</body>
</html>
");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
#endregion
#endregion // Static Methods
}
}

File diff suppressed because it is too large Load Diff