using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
using SabreTools.Library.Data;
using SabreTools.Library.FileTypes;
using SabreTools.Library.DatItems;
using SabreTools.Library.Reports;
using SabreTools.Library.Skippers;
using SabreTools.Library.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using FileStream = System.IO.FileStream;
using IOException = System.IO.IOException;
using MemoryStream = System.IO.MemoryStream;
using SearchOption = System.IO.SearchOption;
using SeekOrigin = System.IO.SeekOrigin;
using Stream = System.IO.Stream;
#endif
using NaturalSort;
namespace SabreTools.Library.DatFiles
{
///
/// Represents a format-agnostic DAT
///
public class DatFile
{
#region Private instance variables
// Internal DatHeader values
internal DatHeader _datHeader = new DatHeader();
// DatItems dictionary
internal SortedDictionary> _items = new SortedDictionary>();
// Internal statistical data
internal DatStats _datStats = new DatStats();
#endregion
#region Publicly facing variables
#region Data common to most DAT types
///
/// External name of the DAT
///
public string FileName
{
get
{
EnsureDatHeader();
return _datHeader.FileName;
}
set
{
EnsureDatHeader();
_datHeader.FileName = value;
}
}
///
/// Internal name of the DAT
///
public string Name
{
get
{
EnsureDatHeader();
return _datHeader.Name;
}
set
{
EnsureDatHeader();
_datHeader.Name = value;
}
}
///
/// DAT description
///
public string Description
{
get
{
EnsureDatHeader();
return _datHeader.Description;
}
set
{
EnsureDatHeader();
_datHeader.Description = value;
}
}
///
/// Root directory for the files; currently TruRip/EmuARC-exclusive
///
public string RootDir
{
get
{
EnsureDatHeader();
return _datHeader.RootDir;
}
set
{
EnsureDatHeader();
_datHeader.RootDir = value;
}
}
///
/// General category of items found in the DAT
///
public string Category
{
get
{
EnsureDatHeader();
return _datHeader.Category;
}
set
{
EnsureDatHeader();
_datHeader.Category = value;
}
}
///
/// Version of the DAT
///
public string Version
{
get
{
EnsureDatHeader();
return _datHeader.Version;
}
set
{
EnsureDatHeader();
_datHeader.Version = value;
}
}
///
/// Creation or modification date
///
public string Date
{
get
{
EnsureDatHeader();
return _datHeader.Date;
}
set
{
EnsureDatHeader();
_datHeader.Date = value;
}
}
///
/// List of authors who contributed to the DAT
///
public string Author
{
get
{
EnsureDatHeader();
return _datHeader.Author;
}
set
{
EnsureDatHeader();
_datHeader.Author = value;
}
}
///
/// Email address for DAT author(s)
///
public string Email
{
get
{
EnsureDatHeader();
return _datHeader.Email;
}
set
{
EnsureDatHeader();
_datHeader.Email = value;
}
}
///
/// Author or distribution homepage name
///
public string Homepage
{
get
{
EnsureDatHeader();
return _datHeader.Homepage;
}
set
{
EnsureDatHeader();
_datHeader.Homepage = value;
}
}
///
/// Author or distribution URL
///
public string Url
{
get
{
EnsureDatHeader();
return _datHeader.Url;
}
set
{
EnsureDatHeader();
_datHeader.Url = value;
}
}
///
/// Any comment that does not already fit an existing field
///
public string Comment
{
get
{
EnsureDatHeader();
return _datHeader.Comment;
}
set
{
EnsureDatHeader();
_datHeader.Comment = value;
}
}
///
/// Header skipper to be used when loading the DAT
///
public string Header
{
get
{
EnsureDatHeader();
return _datHeader.Header;
}
set
{
EnsureDatHeader();
_datHeader.Header = value;
}
}
///
/// Classification of the DAT. Generally only used for SuperDAT
///
public string Type
{
get
{
EnsureDatHeader();
return _datHeader.Type;
}
set
{
EnsureDatHeader();
_datHeader.Type = value;
}
}
///
/// Force a merging style when loaded
///
public ForceMerging ForceMerging
{
get
{
EnsureDatHeader();
return _datHeader.ForceMerging;
}
set
{
EnsureDatHeader();
_datHeader.ForceMerging = value;
}
}
///
/// Force nodump handling when loaded
///
public ForceNodump ForceNodump
{
get
{
EnsureDatHeader();
return _datHeader.ForceNodump;
}
set
{
EnsureDatHeader();
_datHeader.ForceNodump = value;
}
}
///
/// Force output packing when loaded
///
public ForcePacking ForcePacking
{
get
{
EnsureDatHeader();
return _datHeader.ForcePacking;
}
set
{
EnsureDatHeader();
_datHeader.ForcePacking = value;
}
}
///
/// Read or write format
///
public DatFormat DatFormat
{
get
{
EnsureDatHeader();
return _datHeader.DatFormat;
}
set
{
EnsureDatHeader();
_datHeader.DatFormat = value;
}
}
///
/// List of fields in machine and items to exclude from writing
///
public bool[] ExcludeFields
{
get
{
EnsureDatHeader();
return _datHeader.ExcludeFields;
}
set
{
EnsureDatHeader();
_datHeader.ExcludeFields = value;
}
}
///
/// Enable "One Rom, One Region (1G1R)" mode
///
public bool OneRom
{
get
{
EnsureDatHeader();
return _datHeader.OneRom;
}
set
{
EnsureDatHeader();
_datHeader.OneRom = value;
}
}
///
/// Keep machines that don't contain any items
///
public bool KeepEmptyGames
{
get
{
EnsureDatHeader();
return _datHeader.KeepEmptyGames;
}
set
{
EnsureDatHeader();
_datHeader.KeepEmptyGames = value;
}
}
///
/// Remove scene dates from the beginning of machine names
///
public bool SceneDateStrip
{
get
{
EnsureDatHeader();
return _datHeader.SceneDateStrip;
}
set
{
EnsureDatHeader();
_datHeader.SceneDateStrip = value;
}
}
///
/// Deduplicate items using the given method
///
public DedupeType DedupeRoms
{
get
{
EnsureDatHeader();
return _datHeader.DedupeRoms;
}
set
{
EnsureDatHeader();
_datHeader.DedupeRoms = value;
}
}
///
/// Strip hash types from items
///
public Hash StripHash
{
get
{
EnsureDatHeader();
return _datHeader.StripHash;
}
}
///
/// Determine the sorting key for all items
///
public SortedBy SortedBy { get; private set; }
///
/// Determine merging type for all items
///
public DedupeType MergedBy { get; private set; }
#endregion
#region Write pre-processing
///
/// Text to prepend to all outputted lines
///
public string Prefix
{
get
{
EnsureDatHeader();
return _datHeader.Prefix;
}
set
{
EnsureDatHeader();
_datHeader.Prefix = value;
}
}
///
/// Text to append to all outputted lines
///
public string Postfix
{
get
{
EnsureDatHeader();
return _datHeader.Postfix;
}
set
{
EnsureDatHeader();
_datHeader.Postfix = value;
}
}
///
/// Add a new extension to all items
///
public string AddExtension
{
get
{
EnsureDatHeader();
return _datHeader.AddExtension;
}
set
{
EnsureDatHeader();
_datHeader.AddExtension = value;
}
}
///
/// Replace all item extensions
///
public string ReplaceExtension
{
get
{
EnsureDatHeader();
return _datHeader.ReplaceExtension;
}
set
{
EnsureDatHeader();
_datHeader.ReplaceExtension = value;
}
}
///
/// Remove all item extensions
///
public bool RemoveExtension
{
get
{
EnsureDatHeader();
return _datHeader.RemoveExtension;
}
set
{
EnsureDatHeader();
_datHeader.RemoveExtension = value;
}
}
///
/// Romba output mode
///
public bool Romba
{
get
{
EnsureDatHeader();
return _datHeader.Romba;
}
set
{
EnsureDatHeader();
_datHeader.Romba = value;
}
}
///
/// Output the machine name
///
public bool GameName
{
get
{
EnsureDatHeader();
return _datHeader.GameName;
}
set
{
EnsureDatHeader();
_datHeader.GameName = value;
}
}
///
/// Wrap quotes around the entire line, sans prefix and postfix
///
public bool Quotes
{
get
{
EnsureDatHeader();
return _datHeader.Quotes;
}
set
{
EnsureDatHeader();
_datHeader.Quotes = value;
}
}
#endregion
#region Data specific to the Miss DAT type
///
/// Output the item name
///
public bool UseRomName
{
get
{
EnsureDatHeader();
return _datHeader.UseRomName;
}
set
{
EnsureDatHeader();
_datHeader.UseRomName = value;
}
}
#endregion
#region Statistical data related to the DAT
///
/// Statistics writing format
///
public StatReportFormat ReportFormat
{
get
{
EnsureDatStats();
return _datStats.ReportFormat;
}
set
{
EnsureDatStats();
_datStats.ReportFormat = value;
}
}
///
/// Overall item count
///
public long Count
{
get
{
EnsureDatStats();
return _datStats.Count;
}
private set
{
EnsureDatStats();
_datStats.Count = value;
}
}
///
/// Number of Archive items
///
public long ArchiveCount
{
get
{
EnsureDatStats();
return _datStats.ArchiveCount;
}
private set
{
EnsureDatStats();
_datStats.ArchiveCount = value;
}
}
///
/// Number of BiosSet items
///
public long BiosSetCount
{
get
{
EnsureDatStats();
return _datStats.BiosSetCount;
}
private set
{
EnsureDatStats();
_datStats.BiosSetCount = value;
}
}
///
/// Number of Disk items
///
public long DiskCount
{
get
{
EnsureDatStats();
return _datStats.DiskCount;
}
private set
{
EnsureDatStats();
_datStats.DiskCount = value;
}
}
///
/// Number of Release items
///
public long ReleaseCount
{
get
{
EnsureDatStats();
return _datStats.ReleaseCount;
}
private set
{
EnsureDatStats();
_datStats.ReleaseCount = value;
}
}
///
/// Number of Rom items
///
public long RomCount
{
get
{
EnsureDatStats();
return _datStats.RomCount;
}
private set
{
EnsureDatStats();
_datStats.RomCount = value;
}
}
///
/// Number of Sample items
///
public long SampleCount
{
get
{
EnsureDatStats();
return _datStats.SampleCount;
}
private set
{
EnsureDatStats();
_datStats.SampleCount = value;
}
}
///
/// Total uncompressed size
///
public long TotalSize
{
get
{
EnsureDatStats();
return _datStats.TotalSize;
}
private set
{
EnsureDatStats();
_datStats.TotalSize = value;
}
}
///
/// Number of items with a CRC hash
///
public long CRCCount
{
get
{
EnsureDatStats();
return _datStats.CRCCount;
}
private set
{
EnsureDatStats();
_datStats.CRCCount = value;
}
}
///
/// Number of items with an MD5 hash
///
public long MD5Count
{
get
{
EnsureDatStats();
return _datStats.MD5Count;
}
private set
{
EnsureDatStats();
_datStats.MD5Count = value;
}
}
///
/// Number of items with a SHA-1 hash
///
public long SHA1Count
{
get
{
EnsureDatStats();
return _datStats.SHA1Count;
}
private set
{
EnsureDatStats();
_datStats.SHA1Count = value;
}
}
///
/// Number of items with a SHA-256 hash
///
public long SHA256Count
{
get
{
EnsureDatStats();
return _datStats.SHA256Count;
}
private set
{
EnsureDatStats();
_datStats.SHA256Count = value;
}
}
///
/// Number of items with a SHA-384 hash
///
public long SHA384Count
{
get
{
EnsureDatStats();
return _datStats.SHA384Count;
}
private set
{
EnsureDatStats();
_datStats.SHA384Count = value;
}
}
///
/// Number of items with a SHA-512 hash
///
public long SHA512Count
{
get
{
EnsureDatStats();
return _datStats.SHA512Count;
}
private set
{
EnsureDatStats();
_datStats.SHA512Count = value;
}
}
///
/// Number of items with the baddump status
///
public long BaddumpCount
{
get
{
EnsureDatStats();
return _datStats.BaddumpCount;
}
private set
{
EnsureDatStats();
_datStats.BaddumpCount = value;
}
}
///
/// Number of items with the good status
///
public long GoodCount
{
get
{
EnsureDatStats();
return _datStats.GoodCount;
}
private set
{
EnsureDatStats();
_datStats.GoodCount = value;
}
}
///
/// Number of items with the nodump status
///
public long NodumpCount
{
get
{
EnsureDatStats();
return _datStats.NodumpCount;
}
private set
{
EnsureDatStats();
_datStats.NodumpCount = value;
}
}
///
/// Number of items with the verified status
///
public long VerifiedCount
{
get
{
EnsureDatStats();
return _datStats.VerifiedCount;
}
private set
{
EnsureDatStats();
_datStats.VerifiedCount = value;
}
}
#endregion
#endregion
#region Instance Methods
#region Accessors
///
/// Passthrough to access the file dictionary
///
/// Key in the dictionary to reference
/// We don't want to allow direct setting of values because it bypasses the statistics
public List this[string key]
{
get
{
// Ensure the dictionary is created
EnsureDictionary();
lock (_items)
{
// Ensure the key exists
EnsureKey(key);
// Now return the value
return _items[key];
}
}
}
///
/// Add a new key to the file dictionary
///
/// Key in the dictionary to add
public void Add(string key)
{
// Ensure the dictionary is created
EnsureDictionary();
lock (_items)
{
// Ensure the key exists
EnsureKey(key);
}
}
///
/// Add a value to the file dictionary
///
/// Key in the dictionary to add to
/// Value to add to the dictionary
public void Add(string key, DatItem value)
{
// Ensure the dictionary is created
EnsureDictionary();
// Add the key, if necessary
Add(key);
lock (_items)
{
// Now add the value
_items[key].Add(value);
// Now update the statistics
_datStats.AddItem(value);
}
}
///
/// Add a range of values to the file dictionary
///
/// Key in the dictionary to add to
/// Value to add to the dictionary
public void AddRange(string key, List value)
{
// Ensure the dictionary is created
EnsureDictionary();
// Add the key, if necessary
Add(key);
lock (_items)
{
// Now add the value
_items[key].AddRange(value);
// Now update the statistics
foreach (DatItem item in value)
{
_datStats.AddItem(item);
}
}
}
///
/// Get if the file dictionary contains the key
///
/// Key in the dictionary to check
/// True if the key exists, false otherwise
public bool Contains(string key)
{
bool contains = false;
// Ensure the dictionary is created
EnsureDictionary();
// If the key is null, we return false since keys can't be null
if (key == null)
{
return contains;
}
lock (_items)
{
contains = _items.ContainsKey(key);
}
return contains;
}
///
/// Get if the file dictionary contains the key and value
///
/// Key in the dictionary to check
/// Value in the dictionary to check
/// True if the key exists, false otherwise
public bool Contains(string key, DatItem value)
{
bool contains = false;
// Ensure the dictionary is created
EnsureDictionary();
// If the key is null, we return false since keys can't be null
if (key == null)
{
return contains;
}
lock (_items)
{
if (_items.ContainsKey(key))
{
contains = _items[key].Contains(value);
}
}
return contains;
}
///
/// Get the keys from the file dictionary
///
/// List of the keys
public List Keys
{
get
{
// Ensure the dictionary is created
EnsureDictionary();
lock (_items)
{
return _items.Keys.Select(item => (String)item.Clone()).ToList();
}
}
}
///
/// Remove a key from the file dictionary if it exists
///
/// Key in the dictionary to remove
public void Remove(string key)
{
// Ensure the dictionary is created
EnsureDictionary();
// If the key doesn't exist, return
if (!Contains(key))
{
return;
}
lock (_items)
{
// Remove the statistics first
foreach (DatItem item in _items[key])
{
_datStats.RemoveItem(item);
}
// Remove the key from the dictionary
_items.Remove(key);
}
}
///
/// Remove the first instance of a value from the file dictionary if it exists
///
/// Key in the dictionary to remove from
/// Value to remove from the dictionary
public void Remove(string key, DatItem value)
{
// Ensure the dictionary is created
EnsureDictionary();
// If the key and value doesn't exist, return
if (!Contains(key, value))
{
return;
}
lock (_items)
{
// Remove the statistics first
_datStats.RemoveItem(value);
_items[key].Remove(value);
}
}
///
/// Remove a range of values from the file dictionary if they exists
///
/// Key in the dictionary to remove from
/// Value to remove from the dictionary
public void RemoveRange(string key, List value)
{
foreach(DatItem item in value)
{
Remove(key, item);
}
}
///
/// Ensure the DatHeader
///
private void EnsureDatHeader()
{
if (_datHeader == null)
{
_datHeader = new DatHeader();
}
}
///
/// Ensure the DatStats
///
private void EnsureDatStats()
{
if (_datStats == null)
{
_datStats = new DatStats();
}
}
///
/// Ensure the items dictionary
///
private void EnsureDictionary()
{
// If the dictionary is null, create it
if (_items == null)
{
_items = new SortedDictionary>();
}
}
///
/// Ensure the key exists in the items dictionary
///
/// Key to ensure
private void EnsureKey(string key)
{
// If the key is missing from the dictionary, add it
if (!_items.ContainsKey(key))
{
_items.Add(key, new List());
}
}
#endregion
#region Bucketing
///
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by a user-defined method
///
/// SortedBy enum representing how to sort the individual items
/// Dedupe type that should be used
/// True if the key should be lowercased (default), false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
public void BucketBy(SortedBy bucketBy, DedupeType deduperoms, bool lower = true, bool norename = true)
{
// If we have a situation where there's no dictionary or no keys at all, we skip
if (_items == null || _items.Count == 0)
{
return;
}
// If the sorted type isn't the same, we want to sort the dictionary accordingly
if (this.SortedBy != bucketBy)
{
Globals.Logger.User("Organizing roms by {0}", bucketBy);
// Set the sorted type
this.SortedBy = bucketBy;
// Reset the merged type since this might change the merge
this.MergedBy = DedupeType.None;
// First do the initial sort of all of the roms inplace
List oldkeys = Keys;
for (int k = 0; k < oldkeys.Count; k++)
{
string key = oldkeys[k];
// Get the unsorted current list
List roms = this[key];
// Now add each of the roms to their respective keys
for (int i = 0; i < roms.Count; i++)
{
DatItem rom = roms[i];
// We want to get the key most appropriate for the given sorting type
string newkey = Utilities.GetKeyFromDatItem(rom, bucketBy, lower, norename);
// If the key is different, move the item to the new key
if (newkey != key)
{
Add(newkey, rom);
Remove(key, rom);
i--; // This make sure that the pointer stays on the correct since one was removed
}
}
// If the key is now empty, remove it
if (this[key].Count == 0)
{
Remove(key);
}
}
}
// If the merge type isn't the same, we want to merge the dictionary accordingly
if (this.MergedBy != deduperoms)
{
Globals.Logger.User("Deduping roms by {0}", deduperoms);
// Set the sorted type
this.MergedBy = deduperoms;
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Get the possibly unsorted list
List sortedlist = this[key];
// Sort the list of items to be consistent
DatItem.Sort(ref sortedlist, false);
// If we're merging the roms, do so
if (deduperoms == DedupeType.Full || (deduperoms == DedupeType.Game && bucketBy == SortedBy.Game))
{
sortedlist = DatItem.Merge(sortedlist);
}
// Add the list back to the dictionary
Remove(key);
AddRange(key, sortedlist);
});
}
// If the merge type is the same, we want to sort the dictionary to be consistent
else
{
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Get the possibly unsorted list
List sortedlist = this[key];
// Sort the list of items to be consistent
DatItem.Sort(ref sortedlist, false);
});
}
// Now clean up all empty keys
CleanEmptyKeys();
}
///
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by the highest available hash
///
/// Dedupe type that should be used (default none)
/// True if the key should be lowercased (default), false otherwise
/// True if games should only be compared on game and file name, false if system and source are counted
public void BucketByBestAvailable(DedupeType deduperoms = DedupeType.None, bool lower = true, bool norename = true)
{
// If all items are supposed to have a SHA-512, we sort by that
if (RomCount + DiskCount - NodumpCount == SHA512Count)
{
BucketBy(SortedBy.SHA512, deduperoms, lower, norename);
}
// If all items are supposed to have a SHA-384, we sort by that
else if (RomCount + DiskCount - NodumpCount == SHA384Count)
{
BucketBy(SortedBy.SHA384, deduperoms, lower, norename);
}
// If all items are supposed to have a SHA-256, we sort by that
else if (RomCount + DiskCount - NodumpCount == SHA256Count)
{
BucketBy(SortedBy.SHA256, deduperoms, lower, norename);
}
// If all items are supposed to have a SHA-1, we sort by that
else if (RomCount + DiskCount - NodumpCount == SHA1Count)
{
BucketBy(SortedBy.SHA1, deduperoms, lower, norename);
}
// If all items are supposed to have a MD5, we sort by that
else if (RomCount + DiskCount - NodumpCount == MD5Count)
{
BucketBy(SortedBy.MD5, deduperoms, lower, norename);
}
// Otherwise, we sort by CRC
else
{
BucketBy(SortedBy.CRC, deduperoms, lower, norename);
}
}
///
/// Clean out all empty keys in the dictionary
///
private void CleanEmptyKeys()
{
List keys = Keys;
foreach(string key in keys)
{
if (this[key].Count == 0)
{
Remove(key);
}
}
}
#endregion
#region Constructors
///
/// Create a new, empty DatFile object
///
public DatFile()
{
_items = new SortedDictionary>();
}
///
/// Create a new DatFile from an existing one
///
/// DatFile to get the values from
/// True if only the header should be cloned (default), false if this should be a reference to another DatFile
public DatFile(DatFile datFile, bool cloneHeader = true)
{
if (cloneHeader)
{
this._datHeader = (DatHeader)datFile._datHeader.Clone();
}
else
{
this._datHeader = datFile._datHeader;
this._items = datFile._items;
this.SortedBy = datFile.SortedBy;
this.MergedBy = datFile.MergedBy;
this._datStats = datFile._datStats;
}
}
///
/// Create a new DatFile from an existing DatHeader
///
/// DatHeader to get the values from
public DatFile(DatHeader datHeader)
{
_datHeader = (DatHeader)datHeader.Clone();
}
#endregion
#region Converting and Updating
///
/// Determine if input files should be merged, diffed, or processed invidually
///
/// Names of the input files and/or folders
/// Names of base files and/or folders
/// Optional param for output directory
/// Non-zero flag for diffing mode, zero otherwise
/// True if the output files should overwrite their inputs, false otherwise
/// True if the first cascaded diff file should be skipped on output, false otherwise
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True to use game descriptions as the names, false otherwise (default)
/// Filter object to be passed to the DatItem level
/// Type of the split that should be performed (split, merged, fully merged)
/// ReplaceMode representing what should be updated [only for base replacement]
/// True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]
public void DetermineUpdateType(List inputPaths, List basePaths, string outDir, UpdateMode updateMode, bool inplace, bool skip,
bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
{
// Ensure we only have files in the inputs
List inputFileNames = Utilities.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
List baseFileNames = Utilities.GetOnlyFilesFromInputs(basePaths);
// If we're in standard update mode, run through all of the inputs
if (updateMode == UpdateMode.None)
{
Update(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
return;
}
// Reverse inputs if we're in a required mode
if ((updateMode & UpdateMode.DiffReverseCascade) != 0)
{
inputFileNames.Reverse();
}
if ((updateMode & UpdateMode.ReverseBaseReplace) != 0)
{
baseFileNames.Reverse();
}
// If we're in merging mode
if ((updateMode & UpdateMode.Merge) != 0)
{
// Populate the combined data and get the headers
List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
MergeNoDiff(inputFileNames, datHeaders, outDir);
}
// If we have one of the standard diffing modes
else if ((updateMode & UpdateMode.DiffDupesOnly) != 0
|| (updateMode & UpdateMode.DiffNoDupesOnly) != 0
|| (updateMode & UpdateMode.DiffIndividualsOnly) != 0)
{
// Populate the combined data
PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
DiffNoCascade(inputFileNames, outDir, filter, updateMode);
}
// If we have one of the cascaded diffing modes
else if ((updateMode & UpdateMode.DiffCascade) != 0
|| (updateMode & UpdateMode.DiffReverseCascade) != 0)
{
// Populate the combined data and get the headers
List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip);
}
// If we have diff against mode
else if ((updateMode & UpdateMode.DiffAgainst) != 0)
{
// Populate the combined data
PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
DiffAgainst(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
}
// If we have one of the base replacement modes
else if ((updateMode & UpdateMode.BaseReplace) != 0
|| (updateMode & UpdateMode.ReverseBaseReplace) != 0)
{
// Populate the combined data
PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
BaseReplace(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
}
return;
}
///
/// Populate the user DatData object from the input files
///
/// Paths to DATs to parse
/// True if the output files should overwrite their inputs, false otherwise
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True to use game descriptions as the names, false otherwise (default)
/// Optional param for output directory
/// Filter object to be passed to the DatItem level
/// Type of the split that should be performed (split, merged, fully merged)
/// List of DatData objects representing headers
private List PopulateUserData(List inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
string outDir, Filter filter, SplitType splitType)
{
DatFile[] datHeaders = new DatFile[inputs.Count];
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
// Parse all of the DATs into their own DatFiles in the array
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
string input = inputs[i];
Globals.Logger.User("Adding DAT: {0}", input.Split('¬')[0]);
datHeaders[i] = new DatFile()
{
DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
// Filtering that needs to be copied over
ExcludeFields = (bool[])this.ExcludeFields.Clone(),
OneRom = this.OneRom,
KeepEmptyGames = this.KeepEmptyGames,
SceneDateStrip = this.SceneDateStrip,
DedupeRoms = this.DedupeRoms,
Prefix = this.Prefix,
Postfix = this.Postfix,
AddExtension = this.AddExtension,
ReplaceExtension = this.ReplaceExtension,
RemoveExtension = this.RemoveExtension,
Romba = this.Romba,
GameName = this.GameName,
Quotes = this.Quotes,
UseRomName = this.UseRomName,
};
datHeaders[i].Parse(input, i, i, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
});
watch.Stop();
watch.Start("Populating internal DAT");
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
// Get the list of keys from the DAT
List keys = datHeaders[i].Keys;
foreach (string key in keys)
{
// Add everything from the key to the internal DAT
AddRange(key, datHeaders[i][key]);
// Now remove the key from the source DAT
datHeaders[i].Remove(key);
}
// Now remove the file dictionary from the source DAT to save memory
datHeaders[i].DeleteDictionary();
});
// Now that we have a merged DAT, filter it
filter.FilterDatFile(this);
watch.Stop();
return datHeaders.ToList();
}
///
/// Replace item values from the base set represented by the current DAT
///
/// Names of the input files
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)
/// Filter object to be passed to the DatItem level
/// Type of the split that should be performed (split, merged, fully merged)
/// ReplaceMode representing what should be updated
/// True if descriptions should only be replaced if the game name is the same, false otherwise
public void BaseReplace(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
{
// We want to try to replace each item in each input DAT from the base
foreach (string path in inputFileNames)
{
Globals.Logger.User("Replacing items in '{0}' from the base DAT", path.Split('¬')[0]);
// First we parse in the DAT internally
DatFile intDat = new DatFile()
{
DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
// Filtering that needs to be copied over
ExcludeFields = (bool[])this.ExcludeFields.Clone(),
OneRom = this.OneRom,
KeepEmptyGames = this.KeepEmptyGames,
SceneDateStrip = this.SceneDateStrip,
DedupeRoms = this.DedupeRoms,
Prefix = this.Prefix,
Postfix = this.Postfix,
AddExtension = this.AddExtension,
ReplaceExtension = this.ReplaceExtension,
RemoveExtension = this.RemoveExtension,
Romba = this.Romba,
GameName = this.GameName,
Quotes = this.Quotes,
UseRomName = this.UseRomName,
};
intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
filter.FilterDatFile(intDat);
// If we are matching based on hashes of any sort
if ((replaceMode & ReplaceMode.ItemName) != 0
|| (replaceMode & ReplaceMode.Hash) != 0)
{
// For comparison's sake, we want to use CRC as the base ordering
BucketBy(SortedBy.CRC, DedupeType.Full);
intDat.BucketBy(SortedBy.CRC, DedupeType.None);
// Then we do a hashwise comparison against the base DAT
List keys = intDat.Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List datItems = intDat[key];
List newDatItems = new List();
foreach (DatItem datItem in datItems)
{
// If we have something other than a Rom or Disk, then this doesn't do anything
if (datItem.ItemType != ItemType.Disk && datItem.ItemType != ItemType.Rom)
{
newDatItems.Add((DatItem)datItem.Clone());
continue;
}
List dupes = datItem.GetDuplicates(this, sorted: true);
DatItem newDatItem = (DatItem)datItem.Clone();
if (dupes.Count > 0)
{
// If we're updating names, replace using the first found name
if ((replaceMode & ReplaceMode.ItemName) != 0)
{
newDatItem.Name = dupes[0].Name;
}
// If we're updating hashes, only replace if the current item doesn't have them
if ((replaceMode & ReplaceMode.Hash) != 0)
{
if (newDatItem.ItemType == ItemType.Rom)
{
Rom newRomItem = (Rom)newDatItem;
if (String.IsNullOrEmpty(newRomItem.CRC) && !String.IsNullOrEmpty(((Rom)dupes[0]).CRC))
{
newRomItem.CRC = ((Rom)dupes[0]).CRC;
}
if (String.IsNullOrEmpty(newRomItem.MD5) && !String.IsNullOrEmpty(((Rom)dupes[0]).MD5))
{
newRomItem.MD5 = ((Rom)dupes[0]).MD5;
}
if (String.IsNullOrEmpty(newRomItem.SHA1) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA1))
{
newRomItem.SHA1 = ((Rom)dupes[0]).SHA1;
}
if (String.IsNullOrEmpty(newRomItem.SHA256) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA256))
{
newRomItem.SHA256 = ((Rom)dupes[0]).SHA256;
}
if (String.IsNullOrEmpty(newRomItem.SHA384) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA384))
{
newRomItem.SHA384 = ((Rom)dupes[0]).SHA384;
}
if (String.IsNullOrEmpty(newRomItem.SHA512) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA512))
{
newRomItem.SHA512 = ((Rom)dupes[0]).SHA512;
}
newDatItem = (Rom)newRomItem.Clone();
}
else if (newDatItem.ItemType == ItemType.Disk)
{
Disk newDiskItem = (Disk)newDatItem;
if (String.IsNullOrEmpty(newDiskItem.MD5) && !String.IsNullOrEmpty(((Disk)dupes[0]).MD5))
{
newDiskItem.MD5 = ((Disk)dupes[0]).MD5;
}
if (String.IsNullOrEmpty(newDiskItem.SHA1) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA1))
{
newDiskItem.SHA1 = ((Disk)dupes[0]).SHA1;
}
if (String.IsNullOrEmpty(newDiskItem.SHA256) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA256))
{
newDiskItem.SHA256 = ((Disk)dupes[0]).SHA256;
}
if (String.IsNullOrEmpty(newDiskItem.SHA384) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA384))
{
newDiskItem.SHA384 = ((Disk)dupes[0]).SHA384;
}
if (String.IsNullOrEmpty(newDiskItem.SHA512) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA512))
{
newDiskItem.SHA512 = ((Disk)dupes[0]).SHA512;
}
newDatItem = (Disk)newDiskItem.Clone();
}
}
}
newDatItems.Add(newDatItem);
}
// Now add the new list to the key
intDat.Remove(key);
intDat.AddRange(key, newDatItems);
});
}
// If we are matching based on names of any sort
if ((replaceMode & ReplaceMode.Description) != 0
|| (replaceMode & ReplaceMode.MachineType) != 0
|| (replaceMode & ReplaceMode.Year) != 0
|| (replaceMode & ReplaceMode.Manufacturer) != 0
|| (replaceMode & ReplaceMode.Parents) != 0)
{
// For comparison's sake, we want to use Machine Name as the base ordering
BucketBy(SortedBy.Game, DedupeType.Full);
intDat.BucketBy(SortedBy.Game, DedupeType.None);
// Then we do a namewise comparison against the base DAT
List keys = intDat.Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List datItems = intDat[key];
List newDatItems = new List();
foreach (DatItem datItem in datItems)
{
DatItem newDatItem = (DatItem)datItem.Clone();
if (Contains(key) && this[key].Count() > 0)
{
if ((replaceMode & ReplaceMode.Description) != 0)
{
if (!onlySame || (onlySame && newDatItem.MachineName == newDatItem.MachineDescription))
{
newDatItem.MachineDescription = this[key][0].MachineDescription;
}
}
if ((replaceMode & ReplaceMode.MachineType) != 0)
{
newDatItem.MachineType = this[key][0].MachineType;
}
if ((replaceMode & ReplaceMode.Year) != 0)
{
newDatItem.Year = this[key][0].Year;
}
if ((replaceMode & ReplaceMode.Manufacturer) != 0)
{
newDatItem.Manufacturer = this[key][0].Manufacturer;
}
if ((replaceMode & ReplaceMode.Parents) != 0)
{
newDatItem.CloneOf = this[key][0].CloneOf;
newDatItem.RomOf = this[key][0].RomOf;
newDatItem.SampleOf = this[key][0].SampleOf;
}
}
newDatItems.Add(newDatItem);
}
// Now add the new list to the key
intDat.Remove(key);
intDat.AddRange(key, newDatItems);
});
}
// Determine the output path for the DAT
string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
// Once we're done, try writing out
intDat.Write(interOutDir, overwrite: inplace);
// Due to possible memory requirements, we force a garbage collection
GC.Collect();
}
}
///
/// Output diffs against a base set represented by the current DAT
///
/// Names of the input files
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True to use game descriptions as the names, false otherwise (default)
/// Filter object to be passed to the DatItem level
/// Type of the split that should be performed (split, merged, fully merged)
public void DiffAgainst(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
bool descAsName, Filter filter, SplitType splitType)
{
// For comparison's sake, we want to use CRC as the base ordering
BucketBy(SortedBy.CRC, DedupeType.Full);
// Now we want to compare each input DAT against the base
foreach (string path in inputFileNames)
{
Globals.Logger.User("Comparing '{0}'' to base DAT", path.Split('¬')[0]);
// First we parse in the DAT internally
DatFile intDat = new DatFile();
intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
// For comparison's sake, we want to use CRC as the base ordering
intDat.BucketBy(SortedBy.CRC, DedupeType.Full);
// Then we do a hashwise comparison against the base DAT
List keys = intDat.Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List datItems = intDat[key];
List keepDatItems = new List();
foreach (DatItem datItem in datItems)
{
if (!datItem.HasDuplicates(this, true))
{
keepDatItems.Add(datItem);
}
}
// Now add the new list to the key
intDat.Remove(key);
intDat.AddRange(key, keepDatItems);
});
// Determine the output path for the DAT
string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
// Once we're done, try writing out
intDat.Write(interOutDir, overwrite: inplace);
// Due to possible memory requirements, we force a garbage collection
GC.Collect();
}
}
///
/// Output cascading diffs
///
/// List of inputs to write out from
/// Dat headers used optionally
/// Output directory to write the DATs to
/// True if cascaded diffs are outputted in-place, false otherwise
/// True if the first cascaded diff file should be skipped on output, false otherwise
public void DiffCascade(List inputs, List datHeaders, string outDir, bool inplace, bool skip)
{
// Create a list of DatData objects representing output files
List outDats = new List();
// Loop through each of the inputs and get or create a new DatData object
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
DatFile diffData;
// If we're in inplace mode or the output directory is set, take the appropriate DatData object already stored
if (inplace || outDir != Environment.CurrentDirectory)
{
diffData = datHeaders[j];
}
else
{
diffData = new DatFile(this);
diffData.FileName += innerpost;
diffData.Name += innerpost;
diffData.Description += innerpost;
}
diffData.ResetDictionary();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
watch.Stop();
// Then, ensure that the internal dat can be sorted in the best possible way
BucketBy(SortedBy.CRC, DedupeType.None);
// Now, loop through the dictionary and populate the correct DATs
watch.Start("Populating all output DATs");
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
foreach (DatItem item in items)
{
// There's odd cases where there are items with System ID < 0. Skip them for now
if (item.SystemID < 0)
{
Globals.Logger.Warning("Item found with a <0 SystemID: {0}", item.Name);
continue;
}
outDats[item.SystemID].Add(key, item);
}
});
watch.Stop();
// Finally, loop through and output each of the DATs
watch.Start("Outputting all created DATs");
Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
{
string path = Utilities.GetOutputPath(outDir, inputs[j], inplace);
// Try to output the file
outDats[j].Write(path, overwrite: inplace);
});
watch.Stop();
}
///
/// Output non-cascading diffs
///
/// List of inputs to write out from
/// Output directory to write the DATs to
/// Filter object to be passed to the DatItem level
/// Non-zero flag for diffing mode, zero otherwise
public void DiffNoCascade(List inputs, string outDir, Filter filter, UpdateMode diff)
{
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
// Default vars for use
string post = "";
DatFile outerDiffData = new DatFile();
DatFile dupeData = new DatFile();
// Fill in any information not in the base DAT
if (String.IsNullOrWhiteSpace(FileName))
{
FileName = "All DATs";
}
if (String.IsNullOrWhiteSpace(Name))
{
Name = "All DATs";
}
if (String.IsNullOrWhiteSpace(Description))
{
Description = "All DATs";
}
// Don't have External dupes
if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
{
post = " (No Duplicates)";
outerDiffData = new DatFile(this);
outerDiffData.FileName += post;
outerDiffData.Name += post;
outerDiffData.Description += post;
outerDiffData.ResetDictionary();
}
// Have External dupes
if ((diff & UpdateMode.DiffDupesOnly) != 0)
{
post = " (Duplicates)";
dupeData = new DatFile(this);
dupeData.FileName += post;
dupeData.Name += post;
dupeData.Description += post;
dupeData.ResetDictionary();
}
// Create a list of DatData objects representing individual output files
List outDats = new List();
// Loop through each of the inputs and get or create a new DatData object
if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
{
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
DatFile diffData = new DatFile(this);
diffData.FileName += innerpost;
diffData.Name += innerpost;
diffData.Description += innerpost;
diffData.ResetDictionary();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
}
watch.Stop();
// Now, loop through the dictionary and populate the correct DATs
watch.Start("Populating all output DATs");
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
// Loop through and add the items correctly
foreach (DatItem item in items)
{
// No duplicates
if ((diff & UpdateMode.DiffNoDupesOnly) != 0 || (diff & UpdateMode.DiffIndividualsOnly) != 0)
{
if ((item.DupeType & DupeType.Internal) != 0 || item.DupeType == 0x00)
{
// Individual DATs that are output
if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
{
outDats[item.SystemID].Add(key, item);
}
// Merged no-duplicates DAT
if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
{
DatItem newrom = item.Clone() as DatItem;
newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
outerDiffData.Add(key, newrom);
}
}
}
// Duplicates only
if ((diff & UpdateMode.DiffDupesOnly) != 0)
{
if ((item.DupeType & DupeType.External) != 0)
{
DatItem newrom = item.Clone() as DatItem;
newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
dupeData.Add(key, newrom);
}
}
}
});
watch.Stop();
// Finally, loop through and output each of the DATs
watch.Start("Outputting all created DATs");
// Output the difflist (a-b)+(b-a) diff
if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
{
outerDiffData.Write(outDir, overwrite: false);
}
// Output the (ab) diff
if ((diff & UpdateMode.DiffDupesOnly) != 0)
{
dupeData.Write(outDir, overwrite: false);
}
// Output the individual (a-b) DATs
if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
{
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string path = Utilities.GetOutputPath(outDir, inputs[j], false /* inplace */);
// Try to output the file
outDats[j].Write(path, overwrite: false);
});
}
watch.Stop();
}
///
/// Output user defined merge
///
/// List of inputs to write out from
/// Dat headers used optionally
/// Output directory to write the DATs to
public void MergeNoDiff(List inputs, List datHeaders, string outDir)
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (Type == "SuperDAT")
{
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key].ToList();
List newItems = new List();
foreach (DatItem item in items)
{
DatItem newItem = item;
string filename = inputs[newItem.SystemID].Split('¬')[0];
string rootpath = inputs[newItem.SystemID].Split('¬')[1];
rootpath += (String.IsNullOrWhiteSpace(rootpath) ? "" : Path.DirectorySeparatorChar.ToString());
filename = filename.Remove(0, rootpath.Length);
newItem.MachineName = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ newItem.MachineName;
newItems.Add(newItem);
}
Remove(key);
AddRange(key, newItems);
});
}
// Try to output the file
Write(outDir, overwrite: false);
}
///
/// Convert, update, and filter a DAT file or set of files
///
/// Names of the input files and/or folders
/// Optional param for output directory
/// True if the output files should overwrite their inputs, false otherwise
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True to use game descriptions as the names, false otherwise (default)
/// Filter object to be passed to the DatItem level
/// Type of the split that should be performed (split, merged, fully merged)
public void Update(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
Filter filter, SplitType splitType)
{
// Iterate over the files
foreach (string file in inputFileNames)
{
DatFile innerDatdata = new DatFile(this);
Globals.Logger.User("Processing '{0}'", Path.GetFileName(file.Split('¬')[0]));
innerDatdata.Parse(file, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0
|| (innerDatdata.DatFormat & DatFormat.CSV) != 0
|| (innerDatdata.DatFormat & DatFormat.SSV) != 0));
filter.FilterDatFile(innerDatdata);
// Get the correct output path
string realOutDir = Utilities.GetOutputPath(outDir, file, inplace);
// Try to output the file, overwriting only if it's not in the current directory
innerDatdata.Write(realOutDir, overwrite: inplace);
}
}
#endregion
#region Dictionary Manipulation
///
/// Clones the files dictionary
///
/// A new files dictionary instance
public SortedDictionary> CloneDictionary()
{
// Create the placeholder dictionary to be used
SortedDictionary> sorted = new SortedDictionary>();
// Now perform a deep clone on the entire dictionary
List keys = Keys;
foreach (string key in keys)
{
// Clone each list of DATs in the dictionary
List olditems = this[key];
List newitems = new List();
foreach (DatItem item in olditems)
{
newitems.Add((DatItem)item.Clone());
}
// If the key is missing from the new dictionary, add it
if (!sorted.ContainsKey(key))
{
sorted.Add(key, new List());
}
// Now add the list of items
sorted[key].AddRange(newitems);
}
return sorted;
}
///
/// Delete the file dictionary
///
public void DeleteDictionary()
{
_items = null;
this.SortedBy = SortedBy.Default;
this.MergedBy = DedupeType.None;
// Reset statistics
_datStats.Reset();
}
///
/// Reset the file dictionary
///
public void ResetDictionary()
{
_items = new SortedDictionary>();
this.SortedBy = SortedBy.Default;
this.MergedBy = DedupeType.None;
// Reset statistics
_datStats.Reset();
}
#endregion
#region Filtering
///
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
///
private void MachineDescriptionToName()
{
try
{
// First we want to get a mapping for all games to description
ConcurrentDictionary mapping = new ConcurrentDictionary();
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
// If the key mapping doesn't exist, add it
if (!mapping.ContainsKey(item.MachineName))
{
mapping.TryAdd(item.MachineName, item.MachineDescription.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
}
}
});
// Now we loop through every item and update accordingly
keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
List newItems = new List();
foreach (DatItem item in items)
{
// Update machine name
if (!String.IsNullOrWhiteSpace(item.MachineName) && mapping.ContainsKey(item.MachineName))
{
item.MachineName = mapping[item.MachineName];
}
// Update cloneof
if (!String.IsNullOrWhiteSpace(item.CloneOf) && mapping.ContainsKey(item.CloneOf))
{
item.CloneOf = mapping[item.CloneOf];
}
// Update romof
if (!String.IsNullOrWhiteSpace(item.RomOf) && mapping.ContainsKey(item.RomOf))
{
item.RomOf = mapping[item.RomOf];
}
// Update sampleof
if (!String.IsNullOrWhiteSpace(item.SampleOf) && mapping.ContainsKey(item.SampleOf))
{
item.SampleOf = mapping[item.SampleOf];
}
// Add the new item to the output list
newItems.Add(item);
}
// Replace the old list of roms with the new one
Remove(key);
AddRange(key, newItems);
});
}
catch (Exception ex)
{
Globals.Logger.Warning(ex.ToString());
}
}
///
/// Ensure that all roms are in their own game (or at least try to ensure)
///
private void OneRomPerGame()
{
// For each rom, we want to update the game to be "/"
Parallel.ForEach(Keys, Globals.ParallelOptions, key =>
{
List items = this[key];
for (int i = 0; i < items.Count; i++)
{
string[] splitname = items[i].Name.Split('.');
items[i].MachineName += "/" + string.Join(".", splitname.Take(splitname.Length > 1 ? splitname.Length - 1 : 1));
}
});
}
///
/// Remove all items marked for removal from the DAT
///
private void RemoveMarkedItems()
{
List keys = Keys;
foreach (string key in keys)
{
List items = this[key];
List newItems = new List();
foreach (DatItem item in items)
{
if (!item.Remove)
{
newItems.Add(item);
}
}
Remove(key);
AddRange(key, newItems);
}
}
///
/// Strip the dates from the beginning of scene-style set names
///
private void StripSceneDatesFromItems()
{
// Output the logging statement
Globals.Logger.User("Stripping scene-style dates");
// Set the regex pattern to use
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
// Now process all of the roms
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
for (int j = 0; j < items.Count; j++)
{
DatItem item = items[j];
if (Regex.IsMatch(item.MachineName, pattern))
{
item.MachineName = Regex.Replace(item.MachineName, pattern, "$2");
}
if (Regex.IsMatch(item.MachineDescription, pattern))
{
item.MachineDescription = Regex.Replace(item.MachineDescription, pattern, "$2");
}
items[j] = item;
}
Remove(key);
AddRange(key, items);
});
}
#endregion
#region Internal Merging/Splitting
///
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
///
/// Dedupe type to be used
public void CreateDeviceNonMergedSets(DedupeType mergeroms)
{
Globals.Logger.User("Creating device non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, norename: true);
// Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(false, false));
while (AddRomsFromDevices(true, false));
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild();
}
///
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
///
/// Dedupe type to be used
public void CreateFullyNonMergedSets(DedupeType mergeroms)
{
Globals.Logger.User("Creating fully non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, norename: true);
// Now we want to loop through all of the games and set the correct information
while (AddRomsFromDevices(true, true));
AddRomsFromDevices(false, true);
AddRomsFromParent();
// Now that we have looped through the cloneof tags, we loop through the romof tags
AddRomsFromBios();
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild();
}
///
/// Use cloneof tags to create merged sets and remove the tags
///
/// Dedupe type to be used
public void CreateMergedSets(DedupeType mergeroms)
{
Globals.Logger.User("Creating merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, norename: true);
// Now we want to loop through all of the games and set the correct information
AddRomsFromChildren();
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(false);
RemoveBiosRomsFromChild(true);
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild();
}
///
/// Use cloneof tags to create non-merged sets and remove the tags
///
/// Dedupe type to be used
public void CreateNonMergedSets(DedupeType mergeroms)
{
Globals.Logger.User("Creating non-merged sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, norename: true);
// Now we want to loop through all of the games and set the correct information
AddRomsFromParent();
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(false);
RemoveBiosRomsFromChild(true);
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild();
}
///
/// Use cloneof and romof tags to create split sets and remove the tags
///
/// Dedupe type to be used
public void CreateSplitSets(DedupeType mergeroms)
{
Globals.Logger.User("Creating split sets from the DAT");
// For sake of ease, the first thing we want to do is sort by game
BucketBy(SortedBy.Game, mergeroms, norename: true);
// Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild();
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild(false);
RemoveBiosRomsFromChild(true);
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild();
}
///
/// Use romof tags to add roms to the children
///
private void AddRomsFromBios()
{
List games = Keys;
foreach (string game in games)
{
// If the game has no items in it, we want to continue
if (this[game].Count == 0)
{
continue;
}
// Determine if the game has a parent or not
string parent = null;
if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
{
parent = this[game][0].RomOf;
}
// If the parent doesnt exist, we want to continue
if (String.IsNullOrWhiteSpace(parent))
{
continue;
}
// If the parent doesn't have any items, we want to continue
if (this[parent].Count == 0)
{
continue;
}
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = this[game][0];
List parentItems = this[parent];
foreach (DatItem item in parentItems)
{
DatItem datItem = (DatItem)item.Clone();
datItem.CopyMachineInformation(copyFrom);
if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
{
Add(game, datItem);
}
}
}
}
///
/// Use device_ref and optionally slotoption tags to add roms to the children
///
/// True if only child device sets are touched, false for non-device sets (default)
/// True if slotoptions tags are used as well, false otherwise
private bool AddRomsFromDevices(bool dev = false, bool slotoptions = false)
{
bool foundnew = false;
List games = Keys;
foreach (string game in games)
{
// If the game doesn't have items, we continue
if (this[game] == null || this[game].Count == 0)
{
continue;
}
// If the game (is/is not) a bios, we want to continue
if (dev ^ (this[game][0].MachineType & MachineType.Device) != 0)
{
continue;
}
// If the game has no devices, we continue
if (this[game][0].Devices == null
|| this[game][0].Devices.Count == 0
|| (slotoptions && this[game][0].SlotOptions == null)
|| (slotoptions && this[game][0].SlotOptions.Count == 0))
{
continue;
}
// Determine if the game has any devices or not
List devices = this[game][0].Devices;
List newdevs = new List();
foreach (string device in devices)
{
// If the device doesn't exist then we continue
if (this[device].Count == 0)
{
continue;
}
// Otherwise, copy the items from the device to the current game
DatItem copyFrom = this[game][0];
List devItems = this[device];
foreach (DatItem item in devItems)
{
DatItem datItem = (DatItem)item.Clone();
newdevs.AddRange(datItem.Devices ?? new List());
datItem.CopyMachineInformation(copyFrom);
if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
{
foundnew = true;
Add(game, datItem);
}
}
}
// Now that every device is accounted for, add the new list of devices, if they don't already exist
foreach (string device in newdevs)
{
if (!this[game][0].Devices.Contains(device))
{
this[game][0].Devices.Add(device);
}
}
// If we're checking slotoptions too
if (slotoptions)
{
// Determine if the game has any slotoptions or not
List slotopts = this[game][0].SlotOptions;
List newslotopts = new List();
foreach (string slotopt in slotopts)
{
// If the slotoption doesn't exist then we continue
if (this[slotopt].Count == 0)
{
continue;
}
// Otherwise, copy the items from the slotoption to the current game
DatItem copyFrom = this[game][0];
List slotItems = this[slotopt];
foreach (DatItem item in slotItems)
{
DatItem datItem = (DatItem)item.Clone();
newslotopts.AddRange(datItem.SlotOptions ?? new List());
datItem.CopyMachineInformation(copyFrom);
if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
{
foundnew = true;
Add(game, datItem);
}
}
}
// Now that every slotoption is accounted for, add the new list of slotoptions, if they don't already exist
foreach (string slotopt in newslotopts)
{
if (!this[game][0].SlotOptions.Contains(slotopt))
{
this[game][0].SlotOptions.Add(slotopt);
}
}
}
}
return foundnew;
}
///
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
///
private void AddRomsFromParent()
{
List games = Keys;
foreach (string game in games)
{
// If the game has no items in it, we want to continue
if (this[game].Count == 0)
{
continue;
}
// Determine if the game has a parent or not
string parent = null;
if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
{
parent = this[game][0].CloneOf;
}
// If the parent doesnt exist, we want to continue
if (String.IsNullOrWhiteSpace(parent))
{
continue;
}
// If the parent doesn't have any items, we want to continue
if (this[parent].Count == 0)
{
continue;
}
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = this[game][0];
List parentItems = this[parent];
foreach (DatItem item in parentItems)
{
DatItem datItem = (DatItem)item.Clone();
datItem.CopyMachineInformation(copyFrom);
if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0
&& !this[game].Contains(datItem))
{
Add(game, datItem);
}
}
// Now we want to get the parent romof tag and put it in each of the items
List items = this[game];
string romof = this[parent][0].RomOf;
foreach (DatItem item in items)
{
item.RomOf = romof;
}
}
}
///
/// Use cloneof tags to add roms to the parents, removing the child sets in the process
///
private void AddRomsFromChildren()
{
List games = Keys;
foreach (string game in games)
{
// If the game has no items in it, we want to continue
if (this[game].Count == 0)
{
continue;
}
// Determine if the game has a parent or not
string parent = null;
if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
{
parent = this[game][0].CloneOf;
}
// If there is no parent, then we continue
if (String.IsNullOrWhiteSpace(parent))
{
continue;
}
// Otherwise, move the items from the current game to a subfolder of the parent game
DatItem copyFrom = this[parent].Count == 0 ? new Rom { MachineName = parent, MachineDescription = parent } : this[parent][0];
List items = this[game];
foreach (DatItem item in items)
{
// If the disk doesn't have a valid merge tag OR the merged file doesn't exist in the parent, then add it
if (item.ItemType == ItemType.Disk && (((Disk)item).MergeTag == null || !this[parent].Select(i => i.Name).Contains(((Disk)item).MergeTag)))
{
item.CopyMachineInformation(copyFrom);
Add(parent, item);
}
// Otherwise, if the parent doesn't already contain the non-disk (or a merge-equivalent), add it
else if (item.ItemType != ItemType.Disk && !this[parent].Contains(item))
{
// Rename the child so it's in a subfolder
item.Name = item.MachineName + "\\" + item.Name;
// Update the machine to be the new parent
item.CopyMachineInformation(copyFrom);
// Add the rom to the parent set
Add(parent, item);
}
}
// Then, remove the old game so it's not picked up by the writer
Remove(game);
}
}
///
/// Remove all BIOS and device sets
///
private void RemoveBiosAndDeviceSets()
{
List games = Keys;
foreach (string game in games)
{
if (this[game].Count > 0
&& ((this[game][0].MachineType & MachineType.Bios) != 0
|| (this[game][0].MachineType & MachineType.Device) != 0))
{
Remove(game);
}
}
}
///
/// Use romof tags to remove bios roms from children
///
/// True if only child Bios sets are touched, false for non-bios sets (default)
private void RemoveBiosRomsFromChild(bool bios = false)
{
// Loop through the romof tags
List games = Keys;
foreach (string game in games)
{
// If the game has no items in it, we want to continue
if (this[game].Count == 0)
{
continue;
}
// If the game (is/is not) a bios, we want to continue
if (bios ^ (this[game][0].MachineType & MachineType.Bios) != 0)
{
continue;
}
// Determine if the game has a parent or not
string parent = null;
if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
{
parent = this[game][0].RomOf;
}
// If the parent doesnt exist, we want to continue
if (String.IsNullOrWhiteSpace(parent))
{
continue;
}
// If the parent doesn't have any items, we want to continue
if (this[parent].Count == 0)
{
continue;
}
// If the parent exists and has items, we remove the items that are in the parent from the current game
List parentItems = this[parent];
foreach (DatItem item in parentItems)
{
DatItem datItem = (DatItem)item.Clone();
while (this[game].Contains(datItem))
{
Remove(game, datItem);
}
}
}
}
///
/// Use cloneof tags to remove roms from the children
///
private void RemoveRomsFromChild()
{
List games = Keys;
foreach (string game in games)
{
// If the game has no items in it, we want to continue
if (this[game].Count == 0)
{
continue;
}
// Determine if the game has a parent or not
string parent = null;
if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
{
parent = this[game][0].CloneOf;
}
// If the parent doesnt exist, we want to continue
if (String.IsNullOrWhiteSpace(parent))
{
continue;
}
// If the parent doesn't have any items, we want to continue
if (this[parent].Count == 0)
{
continue;
}
// If the parent exists and has items, we remove the parent items from the current game
List parentItems = this[parent];
foreach (DatItem item in parentItems)
{
DatItem datItem = (DatItem)item.Clone();
while (this[game].Contains(datItem))
{
Remove(game, datItem);
}
}
// Now we want to get the parent romof tag and put it in each of the remaining items
List items = this[game];
string romof = this[parent][0].RomOf;
foreach (DatItem item in items)
{
item.RomOf = romof;
}
}
}
///
/// Remove all romof and cloneof tags from all games
///
private void RemoveTagsFromChild()
{
List games = Keys;
foreach (string game in games)
{
List items = this[game];
foreach (DatItem item in items)
{
item.CloneOf = null;
item.RomOf = null;
}
}
}
#endregion
#region Parsing
///
/// Parse a DAT and return all found games and roms within
///
/// Name of the file to be parsed
/// System ID for the DAT
/// Source ID for the DAT
/// The DatData object representing found roms to this point
/// True if full pathnames are to be kept, false otherwise (default)
/// True if game names are sanitized, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True if descriptions should be used as names, false otherwise (default)
/// True if original extension should be kept, false otherwise (default)
/// True if tags from the DAT should be used to merge the output, false otherwise (default)
public void Parse(string filename, int sysid, int srcid, bool keep = false, bool clean = false,
bool remUnicode = false, bool descAsName = false, bool keepext = false, bool useTags = false)
{
Parse(filename, sysid, srcid, SplitType.None, keep: keep, clean: clean,
remUnicode: remUnicode, descAsName: descAsName, keepext: keepext, useTags: useTags);
}
///
/// Parse a DAT and return all found games and roms within
///
/// Name of the file to be parsed
/// System ID for the DAT
/// Source ID for the DAT>
/// Type of the split that should be performed (split, merged, fully merged)
/// True if full pathnames are to be kept, false otherwise (default)
/// True if game names are sanitized, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True if descriptions should be used as names, false otherwise (default)
/// True if original extension should be kept, false otherwise (default)
/// True if tags from the DAT should be used to merge the output, false otherwise (default)
public void Parse(
// Standard Dat parsing
string filename,
int sysid,
int srcid,
// Rom renaming
SplitType splitType,
// Miscellaneous
bool keep = false,
bool clean = false,
bool remUnicode = false,
bool descAsName = false,
bool keepext = false,
bool useTags = false)
{
// Check if we have a split path and get the filename accordingly
if (filename.Contains("¬"))
{
filename = filename.Split('¬')[0];
}
// Check the file extension first as a safeguard
if (!Utilities.HasValidDatExtension(filename))
{
return;
}
// If the output filename isn't set already, get the internal filename
FileName = (String.IsNullOrWhiteSpace(FileName) ? (keepext ? Path.GetFileName(filename) : Path.GetFileNameWithoutExtension(filename)) : FileName);
// If the output type isn't set already, get the internal output type
DatFormat = (DatFormat == 0 ? Utilities.GetDatFormatFromFile(filename) : DatFormat);
this.SortedBy = SortedBy.CRC; // Setting this because it can reduce issues later
// Now parse the correct type of DAT
try
{
Utilities.GetDatFile(filename, this)?.ParseFile(filename, sysid, srcid, keep, clean, remUnicode);
}
catch (Exception ex)
{
Globals.Logger.Error("Error with file '{0}': {1}", filename, ex);
}
// If we want to use descriptions as names, update everything
if (descAsName)
{
MachineDescriptionToName();
}
// If we are using tags from the DAT, set the proper input for split type unless overridden
if (useTags && splitType == SplitType.None)
{
splitType = Utilities.GetSplitType(ForceMerging);
}
// Now we pre-process the DAT with the splitting/merging mode
switch (splitType)
{
case SplitType.None:
// No-op
break;
case SplitType.DeviceNonMerged:
CreateDeviceNonMergedSets(DedupeType.None);
break;
case SplitType.FullNonMerged:
CreateFullyNonMergedSets(DedupeType.None);
break;
case SplitType.NonMerged:
CreateNonMergedSets(DedupeType.None);
break;
case SplitType.Merged:
CreateMergedSets(DedupeType.None);
break;
case SplitType.Split:
CreateSplitSets(DedupeType.None);
break;
}
// Finally, we remove any blanks, if we aren't supposed to have any
if (!KeepEmptyGames)
{
foreach (string key in Keys)
{
List items = this[key];
List newitems = new List();
foreach (DatItem item in items)
{
if (item.ItemType != ItemType.Blank)
{
newitems.Add(item);
}
}
this.Remove(key);
this.AddRange(key, newitems);
}
}
}
///
/// Add a rom to the Dat after checking
///
/// Item data to check against
/// True if the names should be cleaned to WoD standards, false otherwise
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// The key for the item
public string ParseAddHelper(DatItem item, bool clean, bool remUnicode)
{
string key = "";
// If there's no name in the rom, we log and skip it
if (item.Name == null)
{
Globals.Logger.Warning("{0}: Rom with no name found! Skipping...", FileName);
return key;
}
// If we're in cleaning mode, sanitize the game name
item.MachineName = (clean ? Utilities.CleanGameName(item.MachineName) : item.MachineName);
// If we're stripping unicode characters, do so from all relevant things
if (remUnicode)
{
item.Name = Utilities.RemoveUnicodeCharacters(item.Name);
item.MachineName = Utilities.RemoveUnicodeCharacters(item.MachineName);
item.MachineDescription = Utilities.RemoveUnicodeCharacters(item.MachineDescription);
}
// If we have a Rom or a Disk, clean the hash data
if (item.ItemType == ItemType.Rom)
{
Rom itemRom = (Rom)item;
// Sanitize the hashes from null, hex sizes, and "true blank" strings
itemRom.CRC = Utilities.CleanHashData(itemRom.CRC, Constants.CRCLength);
itemRom.MD5 = Utilities.CleanHashData(itemRom.MD5, Constants.MD5Length);
itemRom.SHA1 = Utilities.CleanHashData(itemRom.SHA1, Constants.SHA1Length);
itemRom.SHA256 = Utilities.CleanHashData(itemRom.SHA256, Constants.SHA256Length);
itemRom.SHA384 = Utilities.CleanHashData(itemRom.SHA384, Constants.SHA384Length);
itemRom.SHA512 = Utilities.CleanHashData(itemRom.SHA512, Constants.SHA512Length);
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
if (itemRom.Size == -1
&& String.IsNullOrWhiteSpace(itemRom.CRC)
&& String.IsNullOrWhiteSpace(itemRom.MD5)
&& !String.IsNullOrWhiteSpace(itemRom.SHA1)
&& String.IsNullOrWhiteSpace(itemRom.SHA256)
&& String.IsNullOrWhiteSpace(itemRom.SHA384)
&& String.IsNullOrWhiteSpace(itemRom.SHA512))
{
// No-op, just catch it so it doesn't go further
Globals.Logger.Verbose("{0}: Entry with only SHA-1 found - '{1}'", FileName, itemRom.Name);
}
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
else if ((itemRom.Size == 0 || itemRom.Size == -1)
&& ((itemRom.CRC == Constants.CRCZero || String.IsNullOrWhiteSpace(itemRom.CRC))
|| itemRom.MD5 == Constants.MD5Zero
|| itemRom.SHA1 == Constants.SHA1Zero
|| itemRom.SHA256 == Constants.SHA256Zero
|| itemRom.SHA384 == Constants.SHA384Zero
|| itemRom.SHA512 == Constants.SHA512Zero))
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
itemRom.Size = Constants.SizeZero;
itemRom.CRC = Constants.CRCZero;
itemRom.MD5 = Constants.MD5Zero;
itemRom.SHA1 = Constants.SHA1Zero;
itemRom.SHA256 = null;
itemRom.SHA384 = null;
itemRom.SHA512 = null;
//itemRom.SHA256 = Constants.SHA256Zero;
//itemRom.SHA384 = Constants.SHA384Zero;
//itemRom.SHA512 = Constants.SHA512Zero;
}
// If the file has no size and it's not the above case, skip and log
else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1))
{
Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
itemRom.ItemStatus = ItemStatus.Nodump;
}
// If the file has a size but aboslutely no hashes, skip and log
else if (itemRom.ItemStatus != ItemStatus.Nodump
&& itemRom.Size > 0
&& String.IsNullOrWhiteSpace(itemRom.CRC)
&& String.IsNullOrWhiteSpace(itemRom.MD5)
&& String.IsNullOrWhiteSpace(itemRom.SHA1)
&& String.IsNullOrWhiteSpace(itemRom.SHA256)
&& String.IsNullOrWhiteSpace(itemRom.SHA384)
&& String.IsNullOrWhiteSpace(itemRom.SHA512))
{
Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
itemRom.ItemStatus = ItemStatus.Nodump;
}
item = itemRom;
}
else if (item.ItemType == ItemType.Disk)
{
Disk itemDisk = (Disk)item;
// Sanitize the hashes from null, hex sizes, and "true blank" strings
itemDisk.MD5 = Utilities.CleanHashData(itemDisk.MD5, Constants.MD5Length);
itemDisk.SHA1 = Utilities.CleanHashData(itemDisk.SHA1, Constants.SHA1Length);
itemDisk.SHA256 = Utilities.CleanHashData(itemDisk.SHA256, Constants.SHA256Length);
itemDisk.SHA384 = Utilities.CleanHashData(itemDisk.SHA384, Constants.SHA384Length);
itemDisk.SHA512 = Utilities.CleanHashData(itemDisk.SHA512, Constants.SHA512Length);
// If the file has aboslutely no hashes, skip and log
if (itemDisk.ItemStatus != ItemStatus.Nodump
&& String.IsNullOrWhiteSpace(itemDisk.MD5)
&& String.IsNullOrWhiteSpace(itemDisk.SHA1)
&& String.IsNullOrWhiteSpace(itemDisk.SHA256)
&& String.IsNullOrWhiteSpace(itemDisk.SHA384)
&& String.IsNullOrWhiteSpace(itemDisk.SHA512))
{
Globals.Logger.Verbose("Incomplete entry for '{0}' will be output as nodump", itemDisk.Name);
itemDisk.ItemStatus = ItemStatus.Nodump;
}
item = itemDisk;
}
// Get the key and add the file
key = Utilities.GetKeyFromDatItem(item, SortedBy.CRC);
Add(key, item);
return key;
}
///
/// Add a rom to the Dat after checking
///
/// Item data to check against
/// True if the names should be cleaned to WoD standards, false otherwise
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// The key for the item
public async Task ParseAddHelperAsync(DatItem item, bool clean, bool remUnicode)
{
return await Task.Run(() => ParseAddHelper(item, clean, remUnicode));
}
///
/// Parse DatFile and return all found games and roms within
///
/// Name of the file to be parsed
/// System ID for the DAT
/// Source ID for the DAT
/// True if full pathnames are to be kept, false otherwise (default)
/// True if game names are sanitized, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
public virtual void ParseFile(
// Standard Dat parsing
string filename,
int sysid,
int srcid,
// Miscellaneous
bool keep,
bool clean,
bool remUnicode)
{
throw new NotImplementedException();
}
#endregion
#region Populate DAT from Directory
///
/// Create a new Dat from a directory
///
/// Base folder to be used in creating the DAT
/// Hash flag saying what hashes should not be calculated
/// True if the date should be omitted from the DAT, false otherwise
/// True if archives should be treated as files, false otherwise
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// True if dates should be archived for all files, false otherwise
/// Name of the directory to create a temp folder in (blank is current directory)
/// Output directory to
/// True if files should be copied to the temp directory before hashing, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
/// Filter object to be passed to the DatItem level
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType,
bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
{
// If the description is defined but not the name, set the name from the description
if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
{
Name = Description;
}
// If the name is defined but not the description, set the description from the name
else if (!String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
Description = Name + (bare ? "" : " (" + Date + ")");
}
// If neither the name or description are defined, set them from the automatic values
else if (String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
string[] splitpath = basePath.Split(Path.DirectorySeparatorChar);
Name = String.IsNullOrWhiteSpace(splitpath.Last()) ? splitpath[splitpath.Length - 2] : splitpath.Last();
Description = Name + (bare ? "" : " (" + Date + ")");
}
// Clean the temp directory path
tempDir = Utilities.EnsureTempDirectory(tempDir);
// Process the input
if (Directory.Exists(basePath))
{
Globals.Logger.Verbose("Folder found: {0}", basePath);
// Process the files in the main folder or any subfolder
List files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
CheckFileForHashes(item, basePath, omitFromScan, bare, archivesAsFiles, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
});
// Now find all folders that are empty, if we are supposed to
if (!Romba && addBlanks)
{
List empties = Utilities.GetEmptyDirectories(basePath).ToList();
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
{
// Get the full path for the directory
string fulldir = Path.GetFullPath(dir);
// Set the temporary variables
string gamename = "";
string romname = "";
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = fulldir.Remove(0, basePath.Length + 1);
romname = "_";
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
}
// Sanitize the names
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
Globals.Logger.Verbose("Adding blank empty folder: {0}", gamename);
this["null"].Add(new Rom(romname, gamename, omitFromScan));
});
}
}
else if (File.Exists(basePath))
{
CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles,
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
}
// Now that we're done, delete the temp folder (if it's not the default)
Globals.Logger.User("Cleaning temp folder");
if (tempDir != Path.GetTempPath())
{
Utilities.TryDeleteDirectory(tempDir);
}
// If we have a valid filter, perform the filtering now
if (filter != null && filter != default(Filter))
{
filter.FilterDatFile(this);
}
return true;
}
///
/// Check a given file for hashes, based on current settings
///
/// Filename of the item to be checked
/// Base folder to be used in creating the DAT
/// Hash flag saying what hashes should not be calculated
/// True if the date should be omitted from the DAT, false otherwise
/// True if archives should be treated as files, false otherwise
/// Type of files that should be skipped
/// True if blank items should be created for empty folders, false otherwise
/// True if dates should be archived for all files, false otherwise
/// Name of the directory to create a temp folder in (blank is current directory)
/// True if files should be copied to the temp directory before hashing, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
private void CheckFileForHashes(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
{
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if (Romba)
{
GZipArchive gzarc = new GZipArchive(item);
BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
// If the rom is valid, write it out
if (baseFile != null && baseFile.Filename != null)
{
// Add the list if it doesn't exist already
Rom rom = new Rom(baseFile);
Add(Utilities.GetKeyFromDatItem(rom, SortedBy.CRC), rom);
Globals.Logger.User("File added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
}
else
{
Globals.Logger.User("File not added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
return;
}
return;
}
// If we're copying files, copy it first and get the new filename
string newItem = item;
string newBasePath = basePath;
if (copyFiles)
{
newBasePath = Path.Combine(tempDir, Guid.NewGuid().ToString());
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
Utilities.TryCreateDirectory(Path.GetDirectoryName(newItem));
File.Copy(item, newItem, true);
}
// Initialize possible archive variables
BaseArchive archive = Utilities.GetArchive(newItem);
List extracted = null;
// If we have an archive and we're supposed to scan it
if (archive != null && !archivesAsFiles)
{
extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
}
// If the file should be skipped based on type, do so now
if ((extracted != null && skipFileType == SkipFileType.Archive)
|| (extracted == null && skipFileType == SkipFileType.File))
{
return;
}
// If the extracted list is null, just scan the item itself
if (extracted == null)
{
ProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst, chdsAsFiles);
}
// Otherwise, add all of the found items
else
{
// First take care of the found items
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
{
DatItem datItem = Utilities.GetDatItem(rom);
ProcessFileHelper(newItem,
datItem,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
// Then, if we're looking for blanks, get all of the blank folders and add them
if (addBlanks)
{
List empties = new List();
// Now get all blank folders from the archive
if (archive != null)
{
empties = archive.GetEmptyFolders();
}
// Add add all of the found empties to the DAT
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
{
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
ProcessFileHelper(newItem,
emptyRom,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
}
}
// Cue to delete the file if it's a copy
if (copyFiles && item != newItem)
{
Utilities.TryDeleteDirectory(newBasePath);
}
}
///
/// Process a single file as a file
///
/// File to be added
/// Parent game to be used
/// Path the represents the parent directory
/// Hash flag saying what hashes should not be calculated
/// True if dates should be archived for all files, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
private void ProcessFile(string item, string parent, string basePath, Hash omitFromScan,
bool addDate, string headerToCheckAgainst, bool chdsAsFiles)
{
Globals.Logger.Verbose("'{0}' treated like a file", Path.GetFileName(item));
BaseFile baseFile = Utilities.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
ProcessFileHelper(item, Utilities.GetDatItem(baseFile), basePath, parent);
}
///
/// Process a single file as a file (with found Rom data)
///
/// File to be added
/// Rom data to be used to write to file
/// Path the represents the parent directory
/// Parent game to be used
private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
{
// If we somehow got something other than a Rom or Disk, cancel out
if (datItem.ItemType != ItemType.Rom && datItem.ItemType != ItemType.Disk)
{
return;
}
try
{
// If the basepath ends with a directory separator, remove it
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
basepath += Path.DirectorySeparatorChar.ToString();
}
// Make sure we have the full item path
item = Path.GetFullPath(item);
// Process the item to sanitize names based on input
SetDatItemInfo(datItem, item, parent, basepath);
// Add the file information to the DAT
string key = Utilities.GetKeyFromDatItem(datItem, SortedBy.CRC);
Add(key, datItem);
Globals.Logger.User("File added: {0}", datItem.Name + Environment.NewLine);
}
catch (IOException ex)
{
Globals.Logger.Error(ex.ToString());
return;
}
}
///
/// Set proper Game and Rom names from user inputs
///
/// DatItem representing the input file
/// Item name to use
/// Parent name to use
/// Base path to use
private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
{
// Get the data to be added as game and item names
string gamename = "";
string romname = "";
// If the parent is blank, then we have a non-archive file
if (String.IsNullOrWhiteSpace(parent))
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
romname = Path.GetFileName(item);
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
}
}
// Otherwise, we assume that we have an archive
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
if (Type == "SuperDAT")
{
gamename = parent;
romname = datItem.Name;
}
// Otherwise, we want the archive name as the game, and the file as everything else
else
{
gamename = parent;
romname = datItem.Name;
}
}
// Sanitize the names
if (romname == null)
{
romname = "";
}
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
if (!String.IsNullOrWhiteSpace(gamename) && String.IsNullOrWhiteSpace(romname))
{
romname = gamename;
gamename = "Default";
}
// Update rom information
datItem.Name = romname;
datItem.MachineName = gamename;
datItem.MachineDescription = gamename;
// If we have a Disk, then the ".chd" extension needs to be removed
if (datItem.ItemType == ItemType.Disk)
{
datItem.Name = datItem.Name.Replace(".chd", "");
}
}
#endregion
#region Rebuilding and Verifying
///
/// Process the DAT and find all matches in input files and folders assuming they're a depot
///
/// List of input files/folders to check
/// Output directory to use to build to
/// True if the date from the DAT should be used if available, false otherwise
/// True if input files should be deleted, false otherwise
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if files should be output in Romba depot folders, false otherwise
/// True if the updated DAT should be output, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if rebuilding was a success, false otherwise
public bool RebuildDepot(List inputs, string outDir, bool date, bool delete,
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst)
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
if (Count == 0 && !inverse)
{
Globals.Logger.User("No entries were found to rebuild, exiting...");
return false;
}
// Check that the output directory exists
outDir = Utilities.EnsureOutputDirectory(outDir, create: true);
// Now we want to get forcepack flag if it's not overridden
if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
{
switch (ForcePacking)
{
case ForcePacking.Zip:
outputFormat = OutputFormat.TorrentZip;
break;
case ForcePacking.Unzip:
outputFormat = OutputFormat.Folder;
break;
}
}
// Preload the Skipper list
int listcount = Skipper.List.Count;
#endregion
bool success = true;
#region Rebuild from depots in order
string format = "";
switch (outputFormat)
{
case OutputFormat.Folder:
format = "directory";
break;
case OutputFormat.TapeArchive:
format = "TAR";
break;
case OutputFormat.Torrent7Zip:
format = "Torrent7Z";
break;
case OutputFormat.TorrentGzip:
format = "TorrentGZ";
break;
case OutputFormat.TorrentLRZip:
format = "TorrentLRZ";
break;
case OutputFormat.TorrentRar:
format = "TorrentRAR";
break;
case OutputFormat.TorrentXZ:
format = "TorrentXZ";
break;
case OutputFormat.TorrentZip:
format = "TorrentZip";
break;
}
InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
// Now loop through and get only directories from the input paths
List directories = new List();
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
{
// Add to the list if the input is a directory
if (Directory.Exists(input))
{
Globals.Logger.Verbose("Adding depot: {0}", input);
lock (directories)
{
directories.Add(input);
}
}
});
// If we don't have any directories, we want to exit
if (directories.Count == 0)
{
return success;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
List hashes = Keys;
foreach (string hash in hashes)
{
// Pre-empt any issues that could arise from string length
if (hash.Length != Constants.SHA1Length)
{
continue;
}
Globals.Logger.User("Checking hash '{0}'", hash);
// Get the extension path for the hash
string subpath = Utilities.GetRombaPath(hash);
// Find the first depot that includes the hash
string foundpath = null;
foreach (string directory in directories)
{
if (File.Exists(Path.Combine(directory, subpath)))
{
foundpath = Path.Combine(directory, subpath);
break;
}
}
// If we didn't find a path, then we continue
if (foundpath == null)
{
continue;
}
// If we have a path, we want to try to get the rom information
GZipArchive archive = new GZipArchive(foundpath);
BaseFile fileinfo = archive.GetTorrentGZFileInfo();
// If the file information is null, then we continue
if (fileinfo == null)
{
continue;
}
// Otherwise, we rebuild that file to all locations that we need to
if (this[hash][0].ItemType == ItemType.Disk)
{
RebuildIndividualFile(new Disk(fileinfo), foundpath, outDir, date, inverse, outputFormat, romba,
updateDat, false /* isZip */, headerToCheckAgainst);
}
else
{
RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, romba,
updateDat, false /* isZip */, headerToCheckAgainst);
}
}
watch.Stop();
#endregion
// If we're updating the DAT, output to the rebuild directory
if (updateDat)
{
FileName = "fixDAT_" + FileName;
Name = "fixDAT_" + Name;
Description = "fixDAT_" + Description;
RemoveMarkedItems();
Write(outDir);
}
return success;
}
///
/// Process the DAT and find all matches in input files and folders
///
/// List of input files/folders to check
/// Output directory to use to build to
/// True to enable external scanning of archives, false otherwise
/// True if the date from the DAT should be used if available, false otherwise
/// True if input files should be deleted, false otherwise
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if files should be output in Romba depot folders, false otherwise
/// ArchiveScanLevel representing the archive handling levels
/// True if the updated DAT should be output, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
/// True if rebuilding was a success, false otherwise
public bool RebuildGeneric(List inputs, string outDir, bool quickScan, bool date,
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
string headerToCheckAgainst, bool chdsAsFiles)
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
if (Count == 0 && !inverse)
{
Globals.Logger.User("No entries were found to rebuild, exiting...");
return false;
}
// Check that the output directory exists
if (!Directory.Exists(outDir))
{
Directory.CreateDirectory(outDir);
outDir = Path.GetFullPath(outDir);
}
// Now we want to get forcepack flag if it's not overridden
if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
{
switch (ForcePacking)
{
case ForcePacking.Zip:
outputFormat = OutputFormat.TorrentZip;
break;
case ForcePacking.Unzip:
outputFormat = OutputFormat.Folder;
break;
}
}
// Preload the Skipper list
int listcount = Skipper.List.Count;
#endregion
bool success = true;
#region Rebuild from sources in order
string format = "";
switch (outputFormat)
{
case OutputFormat.Folder:
format = "directory";
break;
case OutputFormat.TapeArchive:
format = "TAR";
break;
case OutputFormat.Torrent7Zip:
format = "Torrent7Z";
break;
case OutputFormat.TorrentGzip:
format = "TorrentGZ";
break;
case OutputFormat.TorrentLRZip:
format = "TorrentLRZ";
break;
case OutputFormat.TorrentRar:
format = "TorrentRAR";
break;
case OutputFormat.TorrentXZ:
format = "TorrentXZ";
break;
case OutputFormat.TorrentZip:
format = "TorrentZip";
break;
}
InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
// Now loop through all of the files in all of the inputs
foreach (string input in inputs)
{
// If the input is a file
if (File.Exists(input))
{
Globals.Logger.User("Checking file: {0}", input);
RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse,
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
}
// If the input is a directory
else if (Directory.Exists(input))
{
Globals.Logger.Verbose("Checking directory: {0}", input);
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
{
Globals.Logger.User("Checking file: {0}", file);
RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse,
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
}
}
}
watch.Stop();
#endregion
// If we're updating the DAT, output to the rebuild directory
if (updateDat)
{
FileName = "fixDAT_" + FileName;
Name = "fixDAT_" + Name;
Description = "fixDAT_" + Description;
RemoveMarkedItems();
Write(outDir);
}
return success;
}
///
/// Attempt to add a file to the output if it matches
///
/// Name of the file to process
/// Output directory to use to build to
/// True to enable external scanning of archives, false otherwise
/// True if the date from the DAT should be used if available, false otherwise
/// True if input files should be deleted, false otherwise
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if files should be output in Romba depot folders, false otherwise
/// ArchiveScanLevel representing the archive handling levels
/// True if the updated DAT should be output, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
private void RebuildGenericHelper(string file, string outDir, bool quickScan, bool date,
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
string headerToCheckAgainst, bool chdsAsFiles)
{
// If we somehow have a null filename, return
if (file == null)
{
return;
}
// Set the deletion variables
bool usedExternally = false;
bool usedInternally = false;
// Get the required scanning level for the file
Utilities.GetInternalExternalProcess(file, archiveScanLevel, out bool shouldExternalProcess, out bool shouldInternalProcess);
// If we're supposed to scan the file externally
if (shouldExternalProcess)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
DatItem datItem = null;
if (fileinfo.Type == FileType.CHD)
{
datItem = new Disk(fileinfo);
}
else if (fileinfo.Type == FileType.None)
{
datItem = new Rom(fileinfo);
}
usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
romba, updateDat, null /* isZip */, headerToCheckAgainst);
}
// If we're supposed to scan the file internally
if (shouldInternalProcess)
{
// Create an empty list of BaseFile for archive entries
List entries = null;
usedInternally = true;
// Get the TGZ status for later
GZipArchive tgz = new GZipArchive(file);
bool isTorrentGzip = tgz.IsTorrent();
// Get the base archive first
BaseArchive archive = Utilities.GetArchive(file);
// Now get all extracted items from the archive
if (archive != null)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
entries = archive.GetChildren(omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), date: date);
}
// If the entries list is null, we encountered an error and should scan exteranlly
if (entries == null && File.Exists(file))
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: chdsAsFiles);
DatItem datItem = null;
if (fileinfo.Type == FileType.CHD)
{
datItem = new Disk(fileinfo);
}
else if (fileinfo.Type == FileType.None)
{
datItem = new Rom(fileinfo);
}
usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
romba, updateDat, null /* isZip */, headerToCheckAgainst);
}
// Otherwise, loop through the entries and try to match
else
{
foreach (BaseFile entry in entries)
{
DatItem datItem = Utilities.GetDatItem(entry);
usedInternally &= RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
romba, updateDat, !isTorrentGzip /* isZip */, headerToCheckAgainst);
}
}
}
// If we are supposed to delete the file, do so
if (delete && (usedExternally || usedInternally))
{
Utilities.TryDeleteFile(file);
}
}
///
/// Find duplicates and rebuild individual files to output
///
/// Information for the current file to rebuild from
/// Name of the file to process
/// Output directory to use to build to
/// True if the date from the DAT should be used if available, false otherwise
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if files should be output in Romba depot folders, false otherwise
/// True if the updated DAT should be output, false otherwise
/// True if the input file is an archive, false if the file is TGZ, null otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if the file was able to be rebuilt, false otherwise
private bool RebuildIndividualFile(DatItem datItem, string file, string outDir, bool date,
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool? isZip, string headerToCheckAgainst)
{
// Set the output value
bool rebuilt = true;
// If the DatItem is a Disk, force rebuilding to a folder except if TGZ
if (datItem.ItemType == ItemType.Disk && outputFormat != OutputFormat.TorrentGzip)
{
outputFormat = OutputFormat.Folder;
}
// Prepopluate a few key strings based on DatItem type
string crc = null;
string sha1 = null;
if (datItem.ItemType == ItemType.Rom)
{
crc = ((Rom)datItem).CRC;
sha1 = ((Rom)datItem).SHA1;
}
else if (datItem.ItemType == ItemType.Disk)
{
crc = "";
sha1 = ((Disk)datItem).SHA1;
}
// Find if the file has duplicates in the DAT
bool hasDuplicates = datItem.HasDuplicates(this);
// If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse)
{
// Get the list of duplicates to rebuild to
List dupes = datItem.GetDuplicates(this, remove: updateDat);
// If we don't have any duplicates, continue
if (dupes.Count == 0)
{
return false;
}
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive(file);
BaseFile rom = tgz.GetTorrentGZFileInfo();
if (isZip == false && rom != null && outputFormat == OutputFormat.TorrentGzip)
{
Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
// Get the proper output path
if (romba)
{
outDir = Path.Combine(outDir, Utilities.GetRombaPath(sha1));
}
else
{
outDir = Path.Combine(outDir, sha1 + ".gz");
}
// Make sure the output folder is created
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
// Now copy the file over
try
{
File.Copy(file, outDir);
rebuilt &= true;
}
catch
{
rebuilt = false;
}
return rebuilt;
}
// Get a generic stream for the file
Stream fileStream = new MemoryStream();
// If we have a zipfile, extract the stream to memory
if (isZip != null)
{
string realName = null;
BaseArchive archive = Utilities.GetArchive(file);
if (archive != null)
{
(fileStream, realName) = archive.CopyToStream(datItem.Name);
}
}
// Otherwise, just open the filestream
else
{
fileStream = Utilities.TryOpenRead(file);
}
// If the stream is null, then continue
if (fileStream == null)
{
return false;
}
// Seek to the beginning of the stream
fileStream.Seek(0, SeekOrigin.Begin);
Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
rebuilt = true;
// Now loop through the list and rebuild accordingly
foreach (DatItem item in dupes)
{
// Get the output archive, if possible
Folder outputArchive = Utilities.GetArchive(outputFormat);
// Now rebuild to the output file
outputArchive.Write(fileStream, outDir, (Rom)item, date: date, romba: romba);
}
// Close the input stream
fileStream?.Dispose();
}
// If we have no duplicates and we're filtering, rebuild it
else if (!hasDuplicates && inverse)
{
string machinename = null;
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive(file);
BaseFile rom = tgz.GetTorrentGZFileInfo();
if (isZip == false && rom != null && outputFormat == OutputFormat.TorrentGzip)
{
Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
// Get the proper output path
if (romba)
{
outDir = Path.Combine(outDir, Utilities.GetRombaPath(sha1));
}
else
{
outDir = Path.Combine(outDir, sha1 + ".gz");
}
// Make sure the output folder is created
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
// Now copy the file over
try
{
File.Copy(file, outDir);
rebuilt &= true;
}
catch
{
rebuilt = false;
}
return rebuilt;
}
// Get a generic stream for the file
Stream fileStream = new MemoryStream();
// If we have a zipfile, extract the stream to memory
if (isZip != null)
{
string realName = null;
BaseArchive archive = Utilities.GetArchive(file);
if (archive != null)
{
(fileStream, realName) = archive.CopyToStream(datItem.Name);
}
}
// Otherwise, just open the filestream
else
{
fileStream = Utilities.TryOpenRead(file);
}
// If the stream is null, then continue
if (fileStream == null)
{
return false;
}
// Get the item from the current file
Rom item = new Rom(Utilities.GetStreamInfo(fileStream, fileStream.Length, keepReadOpen: true));
item.MachineName = Path.GetFileNameWithoutExtension(item.Name);
item.MachineDescription = Path.GetFileNameWithoutExtension(item.Name);
// If we are coming from an archive, set the correct machine name
if (machinename != null)
{
item.MachineName = machinename;
item.MachineDescription = machinename;
}
Globals.Logger.User("No matches found for '{0}', rebuilding accordingly from inverse flag...", Path.GetFileName(datItem.Name));
// Get the output archive, if possible
Folder outputArchive = Utilities.GetArchive(outputFormat);
// Now rebuild to the output file
if (outputArchive == null)
{
string outfile = Path.Combine(outDir, Utilities.RemovePathUnsafeCharacters(item.MachineName), item.Name);
// Make sure the output folder is created
Directory.CreateDirectory(Path.GetDirectoryName(outfile));
// Now copy the file over
try
{
FileStream writeStream = Utilities.TryCreate(outfile);
// Copy the input stream to the output
int bufferSize = 4096 * 128;
byte[] ibuffer = new byte[bufferSize];
int ilen;
while ((ilen = fileStream.Read(ibuffer, 0, bufferSize)) > 0)
{
writeStream.Write(ibuffer, 0, ilen);
writeStream.Flush();
}
writeStream.Dispose();
if (date && !String.IsNullOrWhiteSpace(item.Date))
{
File.SetCreationTime(outfile, DateTime.Parse(item.Date));
}
rebuilt &= true;
}
catch
{
rebuilt &= false;
}
}
else
{
rebuilt &= outputArchive.Write(fileStream, outDir, item, date: date, romba: romba);
}
// Close the input stream
fileStream?.Dispose();
}
// Now we want to take care of headers, if applicable
if (headerToCheckAgainst != null)
{
// Get a generic stream for the file
Stream fileStream = new MemoryStream();
// If we have a zipfile, extract the stream to memory
if (isZip != null)
{
string realName = null;
BaseArchive archive = Utilities.GetArchive(file);
if (archive != null)
{
(fileStream, realName) = archive.CopyToStream(datItem.Name);
}
}
// Otherwise, just open the filestream
else
{
fileStream = Utilities.TryOpenRead(file);
}
// If the stream is null, then continue
if (fileStream == null)
{
return false;
}
// Check to see if we have a matching header first
SkipperRule rule = Skipper.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(headerToCheckAgainst));
// If there's a match, create the new file to write
if (rule.Tests != null && rule.Tests.Count != 0)
{
// If the file could be transformed correctly
MemoryStream transformStream = new MemoryStream();
if (rule.TransformStream(fileStream, transformStream, keepReadOpen: true, keepWriteOpen: true))
{
// Get the file informations that we will be using
Rom headerless = new Rom(Utilities.GetStreamInfo(transformStream, transformStream.Length, keepReadOpen: true));
// Find if the file has duplicates in the DAT
hasDuplicates = headerless.HasDuplicates(this);
// If it has duplicates and we're not filtering, rebuild it
if (hasDuplicates && !inverse)
{
// Get the list of duplicates to rebuild to
List dupes = headerless.GetDuplicates(this, remove: updateDat);
// If we don't have any duplicates, continue
if (dupes.Count == 0)
{
return false;
}
Globals.Logger.User("Headerless matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
rebuilt = true;
// Now loop through the list and rebuild accordingly
foreach (DatItem item in dupes)
{
// Create a headered item to use as well
datItem.CopyMachineInformation(item);
datItem.Name += "_" + crc;
// If either copy succeeds, then we want to set rebuilt to true
bool eitherSuccess = false;
// Get the output archive, if possible
Folder outputArchive = Utilities.GetArchive(outputFormat);
// Now rebuild to the output file
eitherSuccess |= outputArchive.Write(transformStream, outDir, (Rom)item, date: date, romba: romba);
eitherSuccess |= outputArchive.Write(fileStream, outDir, (Rom)datItem, date: date, romba: romba);
// Now add the success of either rebuild
rebuilt &= eitherSuccess;
}
}
}
// Dispose of the stream
transformStream?.Dispose();
}
// Dispose of the stream
fileStream?.Dispose();
}
return rebuilt;
}
///
/// Process the DAT and verify from the depots
///
/// List of input directories to compare against
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if verification was a success, false otherwise
public bool VerifyDepot(List inputs, string headerToCheckAgainst)
{
bool success = true;
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied depots");
// Now loop through and get only directories from the input paths
List directories = new List();
foreach (string input in inputs)
{
// Add to the list if the input is a directory
if (Directory.Exists(input))
{
Globals.Logger.Verbose("Adding depot: {0}", input);
directories.Add(input);
}
}
// If we don't have any directories, we want to exit
if (directories.Count == 0)
{
return success;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy(SortedBy.SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
List hashes = Keys;
foreach (string hash in hashes)
{
// Pre-empt any issues that could arise from string length
if (hash.Length != Constants.SHA1Length)
{
continue;
}
Globals.Logger.User("Checking hash '{0}'", hash);
// Get the extension path for the hash
string subpath = Utilities.GetRombaPath(hash);
// Find the first depot that includes the hash
string foundpath = null;
foreach (string directory in directories)
{
if (File.Exists(Path.Combine(directory, subpath)))
{
foundpath = Path.Combine(directory, subpath);
break;
}
}
// If we didn't find a path, then we continue
if (foundpath == null)
{
continue;
}
// If we have a path, we want to try to get the rom information
GZipArchive tgz = new GZipArchive(foundpath);
BaseFile fileinfo = tgz.GetTorrentGZFileInfo();
// If the file information is null, then we continue
if (fileinfo == null)
{
continue;
}
// Now we want to remove all duplicates from the DAT
new Rom(fileinfo).GetDuplicates(this, remove: true)
.AddRange(new Disk(fileinfo).GetDuplicates(this, remove: true));
}
watch.Stop();
// If there are any entries in the DAT, output to the rebuild directory
FileName = "fixDAT_" + FileName;
Name = "fixDAT_" + Name;
Description = "fixDAT_" + Description;
RemoveMarkedItems();
Write();
return success;
}
///
/// Process the DAT and verify the output directory
///
/// List of input directories to compare against
/// True if only hashes should be checked, false for full file information
/// True to enable external scanning of archives, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
/// Filter object to be passed to the DatItem level
/// True if verification was a success, false otherwise
public bool VerifyGeneric(List inputs, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
{
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
bool success = true;
// Then, loop through and check each of the inputs
Globals.Logger.User("Processing files:\n");
foreach (string input in inputs)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
SkipFileType.None, false /* addBlanks */, false /* addDate */, "" /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles, filter);
}
// Setup the fixdat
DatFile matched = new DatFile(this);
matched.ResetDictionary();
matched.FileName = "fixDat_" + matched.FileName;
matched.Name = "fixDat_" + matched.Name;
matched.Description = "fixDat_" + matched.Description;
matched.DatFormat = DatFormat.Logiqx;
// If we are checking hashes only, essentially diff the inputs
if (hashOnly)
{
// First we need to sort and dedupe by hash to get duplicates
BucketBy(SortedBy.CRC, DedupeType.Full);
// Then follow the same tactics as before
foreach (string key in Keys)
{
List roms = this[key];
foreach (DatItem rom in roms)
{
if (rom.SourceID == 99)
{
if (rom.ItemType == ItemType.Disk || rom.ItemType == ItemType.Rom)
{
matched.Add(((Disk)rom).SHA1, rom);
}
}
}
}
}
// If we are checking full names, get only files found in directory
else
{
foreach (string key in Keys)
{
List roms = this[key];
List newroms = DatItem.Merge(roms);
foreach (Rom rom in newroms)
{
if (rom.SourceID == 99)
{
matched.Add(rom.Size + "-" + rom.CRC, rom);
}
}
}
}
// Now output the fixdat to the main folder
RemoveMarkedItems();
success &= matched.Write(stats: true);
return success;
}
#endregion
#region Splitting
///
/// Split a set of input DATs based on the given information
///
/// List of inputs to be used
/// Output directory for the split files
/// True if files should be written to the source folders, false otherwise
/// Type of split to perform, if any
/// First extension to split on (Extension Split only)
/// Second extension to split on (Extension Split only)
/// True if short filenames should be used, false otherwise (Level Split only)
/// True if original filenames should be used as the base for output filename, false otherwise (Level Split only)
/// Long value representing the split point (Size Split only)
public void DetermineSplitType(List inputs, string outDir, bool inplace, SplittingMode splittingMode,
List exta, List extb, bool shortname, bool basedat, long radix)
{
// If we somehow have the "none" split type, return
if (splittingMode == SplittingMode.None)
{
return;
}
// Get only files from the inputs
List files = Utilities.GetOnlyFilesFromInputs(inputs, appendparent: true);
// Loop over the input files
foreach (string file in files)
{
// Create and fill the new DAT
Parse(file, 0, 0);
// Get the output directory
outDir = Utilities.GetOutputPath(outDir, file, inplace);
// Split and write the DAT
if ((splittingMode & SplittingMode.Extension) != 0)
{
SplitByExtension(outDir, exta, extb);
}
if ((splittingMode & SplittingMode.Hash) != 0)
{
SplitByHash(outDir);
}
if ((splittingMode & SplittingMode.Level) != 0)
{
SplitByLevel(outDir, shortname, basedat);
}
if ((splittingMode & SplittingMode.Size) != 0)
{
SplitBySize(outDir, radix);
}
if ((splittingMode & SplittingMode.Type) != 0)
{
SplitByType(outDir);
}
// Now re-empty the DAT to make room for the next one
DatFormat tempFormat = DatFormat;
_datHeader = new DatHeader();
ResetDictionary();
DatFormat = tempFormat;
}
}
///
/// Split a DAT by input extensions
///
/// Name of the directory to write the DATs out to
/// List of extensions to split on (first DAT)
/// List of extensions to split on (second DAT)
/// True if split succeeded, false otherwise
public bool SplitByExtension(string outDir, List extA, List extB)
{
// Make sure all of the extensions have a dot at the beginning
List newExtA = new List();
foreach (string s in extA)
{
newExtA.Add((s.StartsWith(".") ? s.Substring(1) : s).ToUpperInvariant());
}
string newExtAString = string.Join(",", newExtA);
List newExtB = new List();
foreach (string s in extB)
{
newExtB.Add((s.StartsWith(".") ? s.Substring(1) : s).ToUpperInvariant());
}
string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets
DatFile datdataA = new DatFile
{
FileName = this.FileName + " (" + newExtAString + ")",
Name = this.Name + " (" + newExtAString + ")",
Description = this.Description + " (" + newExtAString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
DatFile datdataB = new DatFile
{
FileName = this.FileName + " (" + newExtBString + ")",
Name = this.Name + " (" + newExtBString + ")",
Description = this.Description + " (" + newExtBString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
// If roms is empty, return false
if (Count == 0)
{
return false;
}
// Now separate the roms accordingly
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
if (newExtA.Contains(Utilities.GetExtension(item.Name.ToUpperInvariant())))
{
datdataA.Add(key, item);
}
else if (newExtB.Contains(Utilities.GetExtension(item.Name.ToUpperInvariant())))
{
datdataB.Add(key, item);
}
else
{
datdataA.Add(key, item);
datdataB.Add(key, item);
}
}
});
// Then write out both files
bool success = datdataA.Write(outDir);
success &= datdataB.Write(outDir);
return success;
}
///
/// Split a DAT by best available hashes
///
/// Name of the directory to write the DATs out to
/// True if split succeeded, false otherwise
public bool SplitByHash(string outDir)
{
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile nodump = new DatFile
{
FileName = this.FileName + " (Nodump)",
Name = this.Name + " (Nodump)",
Description = this.Description + " (Nodump)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile sha512 = new DatFile
{
FileName = this.FileName + " (SHA-512)",
Name = this.Name + " (SHA-512)",
Description = this.Description + " (SHA-512)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile sha384 = new DatFile
{
FileName = this.FileName + " (SHA-384)",
Name = this.Name + " (SHA-384)",
Description = this.Description + " (SHA-384)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile sha256 = new DatFile
{
FileName = this.FileName + " (SHA-256)",
Name = this.Name + " (SHA-256)",
Description = this.Description + " (SHA-256)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile sha1 = new DatFile
{
FileName = this.FileName + " (SHA-1)",
Name = this.Name + " (SHA-1)",
Description = this.Description + " (SHA-1)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile md5 = new DatFile
{
FileName = this.FileName + " (MD5)",
Name = this.Name + " (MD5)",
Description = this.Description + " (MD5)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile crc = new DatFile
{
FileName = this.FileName + " (CRC)",
Name = this.Name + " (CRC)",
Description = this.Description + " (CRC)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile other = new DatFile
{
FileName = this.FileName + " (Other)",
Name = this.Name + " (Other)",
Description = this.Description + " (Other)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
// Now populate each of the DAT objects in turn
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
// If the file is not a Rom or Disk, continue
if (item.ItemType != ItemType.Disk && item.ItemType != ItemType.Rom)
{
return;
}
// If the file is a nodump
if ((item.ItemType == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|| (item.ItemType == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
{
nodump.Add(key, item);
}
// If the file has a SHA-512
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).SHA512))
|| (item.ItemType == ItemType.Disk && !String.IsNullOrWhiteSpace(((Disk)item).SHA512)))
{
sha512.Add(key, item);
}
// If the file has a SHA-384
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).SHA384))
|| (item.ItemType == ItemType.Disk && !String.IsNullOrWhiteSpace(((Disk)item).SHA384)))
{
sha384.Add(key, item);
}
// If the file has a SHA-256
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).SHA256))
|| (item.ItemType == ItemType.Disk && !String.IsNullOrWhiteSpace(((Disk)item).SHA256)))
{
sha256.Add(key, item);
}
// If the file has a SHA-1
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).SHA1))
|| (item.ItemType == ItemType.Disk && !String.IsNullOrWhiteSpace(((Disk)item).SHA1)))
{
sha1.Add(key, item);
}
// If the file has no SHA-1 but has an MD5
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).MD5))
|| (item.ItemType == ItemType.Disk && !String.IsNullOrWhiteSpace(((Disk)item).MD5)))
{
md5.Add(key, item);
}
// If the file has no MD5 but a CRC
else if ((item.ItemType == ItemType.Rom && !String.IsNullOrWhiteSpace(((Rom)item).CRC)))
{
crc.Add(key, item);
}
else
{
other.Add(key, item);
}
}
});
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= nodump.Write(outDir);
success &= sha512.Write(outDir);
success &= sha384.Write(outDir);
success &= sha256.Write(outDir);
success &= sha1.Write(outDir);
success &= md5.Write(outDir);
success &= crc.Write(outDir);
return success;
}
///
/// Split a SuperDAT by lowest available directory level
///
/// Name of the directory to write the DATs out to
/// True if short names should be used, false otherwise
/// True if original filenames should be used as the base for output filename, false otherwise
/// True if split succeeded, false otherwise
public bool SplitByLevel(string outDir, bool shortname, bool basedat)
{
// First, organize by games so that we can do the right thing
BucketBy(SortedBy.Game, DedupeType.None, lower: false, norename: true);
// Create a temporary DAT to add things to
DatFile tempDat = new DatFile(this)
{
Name = null,
};
// Sort the input keys
List keys = Keys;
keys.Sort(SplitByLevelSort);
// Then, we loop over the games
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Here, the key is the name of the game to be used for comparison
if (tempDat.Name != null && tempDat.Name != Path.GetDirectoryName(key))
{
// Reset the DAT for the next items
tempDat = new DatFile(this)
{
Name = null,
};
}
// Clean the input list and set all games to be pathless
List items = this[key];
items.ForEach(item => item.MachineName = Path.GetFileName(item.MachineName));
items.ForEach(item => item.MachineDescription = Path.GetFileName(item.MachineDescription));
// Now add the game to the output DAT
tempDat.AddRange(key, items);
// Then set the DAT name to be the parent directory name
tempDat.Name = Path.GetDirectoryName(key);
});
return true;
}
///
/// Helper function for SplitByLevel to sort the input game names
///
/// First string to compare
/// Second string to compare
/// -1 for a coming before b, 0 for a == b, 1 for a coming after b
private int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
int adeep = a.Count(c => c == '/' || c == '\\');
int bdeep = b.Count(c => c == '/' || c == '\\');
if (adeep == bdeep)
{
return nc.Compare(a, b);
}
return adeep - bdeep;
}
///
/// Helper function for SplitByLevel to clean and write out a DAT
///
/// DAT to clean and write out
/// Directory to write out to
/// True if short naming scheme should be used, false otherwise
/// True if original filenames should be used as the base for output filename, false otherwise
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
{
// Get the name from the DAT to use separately
string name = datFile.Name;
string expName = name.Replace("/", " - ").Replace("\\", " - ");
// Now set the new output values
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrWhiteSpace(name)
? FileName
: (shortname
? Path.GetFileName(name)
: expName
)
);
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
datFile.Name = Name + " (" + expName + ")";
datFile.Description = (String.IsNullOrWhiteSpace(Description) ? datFile.Name : Description + " (" + expName + ")");
datFile.Type = null;
// Write out the temporary DAT to the proper directory
datFile.Write(outDir);
}
///
/// Split a DAT by size of Rom
///
/// Name of the directory to write the DATs out to
/// Long value representing the split point
/// True if split succeeded, false otherwise
public bool SplitBySize(string outDir, long radix)
{
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile lessDat = new DatFile
{
FileName = this.FileName + " (less than " + radix + " )",
Name = this.Name + " (less than " + radix + " )",
Description = this.Description + " (less than " + radix + " )",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile greaterEqualDat = new DatFile
{
FileName = this.FileName + " (equal-greater than " + radix + " )",
Name = this.Name + " (equal-greater than " + radix + " )",
Description = this.Description + " (equal-greater than " + radix + " )",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
// Now populate each of the DAT objects in turn
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
// If the file is not a Rom, it automatically goes in the "lesser" dat
if (item.ItemType != ItemType.Rom)
{
lessDat.Add(key, item);
}
// If the file is a Rom and less than the radix, put it in the "lesser" dat
else if (item.ItemType == ItemType.Rom && ((Rom)item).Size < radix)
{
lessDat.Add(key, item);
}
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
else if (item.ItemType == ItemType.Rom && ((Rom)item).Size >= radix)
{
greaterEqualDat.Add(key, item);
}
}
});
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= lessDat.Write(outDir);
success &= greaterEqualDat.Write(outDir);
return success;
}
///
/// Split a DAT by type of DatItem
///
/// Name of the directory to write the DATs out to
/// True if split succeeded, false otherwise
public bool SplitByType(string outDir)
{
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile romdat = new DatFile
{
FileName = this.FileName + " (ROM)",
Name = this.Name + " (ROM)",
Description = this.Description + " (ROM)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile diskdat = new DatFile
{
FileName = this.FileName + " (Disk)",
Name = this.Name + " (Disk)",
Description = this.Description + " (Disk)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
DatFile sampledat = new DatFile
{
FileName = this.FileName + " (Sample)",
Name = this.Name + " (Sample)",
Description = this.Description + " (Sample)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
DedupeRoms = this.DedupeRoms,
};
// Now populate each of the DAT objects in turn
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
// If the file is a Rom
if (item.ItemType == ItemType.Rom)
{
romdat.Add(key, item);
}
// If the file is a Disk
else if (item.ItemType == ItemType.Disk)
{
diskdat.Add(key, item);
}
// If the file is a Sample
else if (item.ItemType == ItemType.Sample)
{
sampledat.Add(key, item);
}
}
});
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= romdat.Write(outDir);
success &= diskdat.Write(outDir);
success &= sampledat.Write(outDir);
return success;
}
#endregion
#region Statistics
///
/// Output the stats for the Dat in a human-readable format
///
/// True if numbers should be recalculated for the DAT, false otherwise (default)
/// Number of games to use, -1 means recalculate games (default)
/// True if baddumps should be included in output, false otherwise (default)
/// True if nodumps should be included in output, false otherwise (default)
public void WriteStatsToScreen(bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
{
// If we're supposed to recalculate the statistics, do so
if (recalculate)
{
RecalculateStats();
}
BucketBy(SortedBy.Game, DedupeType.None, norename: true);
if (TotalSize < 0)
{
TotalSize = Int64.MaxValue + TotalSize;
}
// Log the results to screen
string results = @"For '" + FileName + @"':
--------------------------------------------------
Uncompressed size: " + Utilities.GetBytesReadable(TotalSize) + @"
Games found: " + (game == -1 ? Keys.Count() : game) + @"
Roms found: " + RomCount + @"
Disks found: " + DiskCount + @"
Roms with CRC: " + CRCCount + @"
Roms with MD5: " + MD5Count + @"
Roms with SHA-1: " + SHA1Count + @"
Roms with SHA-256: " + SHA256Count + @"
Roms with SHA-384: " + SHA384Count + @"
Roms with SHA-512: " + SHA512Count + "\n";
if (baddumpCol)
{
results += " Roms with BadDump status: " + BaddumpCount + "\n";
}
if (nodumpCol)
{
results += " Roms with Nodump status: " + NodumpCount + "\n";
}
// For spacing between DATs
results += "\n\n";
Globals.Logger.User(results);
}
///
/// Recalculate the statistics for the Dat
///
private void RecalculateStats()
{
// Wipe out any stats already there
_datStats.Reset();
// If we have a blank Dat in any way, return
if (this == null || Count == 0)
{
return;
}
// Loop through and add
List keys = Keys;
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List items = this[key];
foreach (DatItem item in items)
{
_datStats.AddItem(item);
}
});
}
#endregion
#region Writing
///
/// Create and open an output file for writing direct from a dictionary
///
/// Set the output directory (default current directory)
/// True if games should only be compared on game and file name (default), false if system and source are counted
/// True if DAT statistics should be output on write, false otherwise (default)
/// True if blank roms should be skipped on output, false otherwise (default)
/// True if files should be overwritten (default), false if they should be renamed instead
/// True if the DAT was written correctly, false otherwise
public bool Write(string outDir = null, bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
{
// If there's nothing there, abort
if (Count == 0)
{
Globals.Logger.User("There were no items to write out!");
return false;
}
// Ensure the output directory is set and created
outDir = Utilities.EnsureOutputDirectory(outDir, create: true);
// If the DAT has no output format, default to XML
if (DatFormat == 0)
{
Globals.Logger.Verbose("No DAT format defined, defaulting to XML");
DatFormat = DatFormat.Logiqx;
}
// Make sure that the three essential fields are filled in
if (String.IsNullOrWhiteSpace(FileName) && String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
FileName = Name = Description = "Default";
}
else if (String.IsNullOrWhiteSpace(FileName) && String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
{
FileName = Name = Description;
}
else if (String.IsNullOrWhiteSpace(FileName) && !String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
FileName = Description = Name;
}
else if (String.IsNullOrWhiteSpace(FileName) && !String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
{
FileName = Description;
}
else if (!String.IsNullOrWhiteSpace(FileName) && String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
Name = Description = FileName;
}
else if (!String.IsNullOrWhiteSpace(FileName) && String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
{
Name = Description;
}
else if (!String.IsNullOrWhiteSpace(FileName) && !String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
{
Description = Name;
}
else if (!String.IsNullOrWhiteSpace(FileName) && !String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
{
// Nothing is needed
}
// Output initial statistics, for kicks
if (stats)
{
WriteStatsToScreen(recalculate: (RomCount + DiskCount == 0), baddumpCol: true, nodumpCol: true);
}
// Run the one rom per game logic, if required
if (OneRom)
{
OneRomPerGame();
}
// Bucket and dedupe according to the flag
if (DedupeRoms == DedupeType.Full)
{
BucketBy(SortedBy.CRC, DedupeRoms, norename: norename);
}
else if (DedupeRoms == DedupeType.Game)
{
BucketBy(SortedBy.Game, DedupeRoms, norename: norename);
}
// Bucket roms by game name, if not already
BucketBy(SortedBy.Game, DedupeType.None, norename: norename);
// Output the number of items we're going to be writing
Globals.Logger.User("A total of {0} items will be written out to '{1}'", Count, FileName);
// If we are removing scene dates, do that now
if (SceneDateStrip)
{
StripSceneDatesFromItems();
}
// Get the outfile names
Dictionary outfiles = CreateOutfileNames(outDir, overwrite);
try
{
// Write out all required formats
Parallel.ForEach(outfiles.Keys, Globals.ParallelOptions, datFormat =>
{
string outfile = outfiles[datFormat];
try
{
Utilities.GetDatFile(datFormat, this)?.WriteToFile(outfile, ignoreblanks);
}
catch (Exception ex)
{
Globals.Logger.Error("Datfile {0} could not be written out: {1}", outfile, ex.ToString());
}
});
}
catch (Exception ex)
{
Globals.Logger.Error(ex.ToString());
return false;
}
return true;
}
///
/// Create and open an output file for writing direct from a dictionary
///
/// Name of the file to write to
/// True if blank roms should be skipped on output, false otherwise (default)
/// True if the DAT was written correctly, false otherwise
public virtual bool WriteToFile(string outfile, bool ignoreblanks = false)
{
throw new NotImplementedException();
}
///
/// Generate a proper outfile name based on a DAT and output directory
///
/// Output directory
/// True if we ignore existing files (default), false otherwise
/// Dictionary of output formats mapped to file names
private Dictionary CreateOutfileNames(string outDir, bool overwrite = true)
{
// Create the output dictionary
Dictionary outfileNames = new Dictionary();
// Double check the outDir for the end delim
if (!outDir.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
outDir += Path.DirectorySeparatorChar;
}
// Get the extensions from the output type
// AttractMode
if ((DatFormat & DatFormat.AttractMode) != 0)
{
outfileNames.Add(DatFormat.AttractMode, CreateOutfileNamesHelper(outDir, ".txt", overwrite));
}
// ClrMamePro
if ((DatFormat & DatFormat.ClrMamePro) != 0)
{
outfileNames.Add(DatFormat.ClrMamePro, CreateOutfileNamesHelper(outDir, ".dat", overwrite));
};
// CSV
if ((DatFormat & DatFormat.CSV) != 0)
{
outfileNames.Add(DatFormat.CSV, CreateOutfileNamesHelper(outDir, ".csv", overwrite));
};
// DOSCenter
if ((DatFormat & DatFormat.DOSCenter) != 0
&& (DatFormat & DatFormat.ClrMamePro) == 0
&& (DatFormat & DatFormat.RomCenter) == 0)
{
outfileNames.Add(DatFormat.DOSCenter, CreateOutfileNamesHelper(outDir, ".dat", overwrite));
};
if ((DatFormat & DatFormat.DOSCenter) != 0
&& ((DatFormat & DatFormat.ClrMamePro) != 0
|| (DatFormat & DatFormat.RomCenter) != 0))
{
outfileNames.Add(DatFormat.DOSCenter, CreateOutfileNamesHelper(outDir, ".dc.dat", overwrite));
}
// Logiqx XML
if ((DatFormat & DatFormat.Logiqx) != 0)
{
outfileNames.Add(DatFormat.Logiqx, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
if ((DatFormat & DatFormat.LogiqxDepreciated) != 0)
{
outfileNames.Add(DatFormat.LogiqxDepreciated, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
// MAME Listroms
if ((DatFormat & DatFormat.Listrom) != 0
&& (DatFormat & DatFormat.AttractMode) == 0)
{
outfileNames.Add(DatFormat.Listrom, CreateOutfileNamesHelper(outDir, ".txt", overwrite));
}
if ((DatFormat & DatFormat.Listrom) != 0
&& (DatFormat & DatFormat.AttractMode) != 0)
{
outfileNames.Add(DatFormat.Listrom, CreateOutfileNamesHelper(outDir, ".lr.txt", overwrite));
}
// MAME Listxml
if (((DatFormat & DatFormat.Listxml) != 0)
&& (DatFormat & DatFormat.Logiqx) == 0
&& (DatFormat & DatFormat.LogiqxDepreciated) == 0
&& (DatFormat & DatFormat.SabreDat) == 0
&& (DatFormat & DatFormat.SoftwareList) == 0)
{
outfileNames.Add(DatFormat.Listxml, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
if (((DatFormat & DatFormat.Listxml) != 0
&& ((DatFormat & DatFormat.Logiqx) != 0
|| (DatFormat & DatFormat.LogiqxDepreciated) != 0
|| (DatFormat & DatFormat.SabreDat) != 0
|| (DatFormat & DatFormat.SoftwareList) != 0)))
{
outfileNames.Add(DatFormat.Listxml, CreateOutfileNamesHelper(outDir, ".mame.xml", overwrite));
}
// Missfile
if ((DatFormat & DatFormat.MissFile) != 0
&& (DatFormat & DatFormat.AttractMode) == 0)
{
outfileNames.Add(DatFormat.MissFile, CreateOutfileNamesHelper(outDir, ".txt", overwrite));
}
if ((DatFormat & DatFormat.MissFile) != 0
&& (DatFormat & DatFormat.AttractMode) != 0)
{
outfileNames.Add(DatFormat.MissFile, CreateOutfileNamesHelper(outDir, ".miss.txt", overwrite));
}
// OfflineList
if (((DatFormat & DatFormat.OfflineList) != 0)
&& (DatFormat & DatFormat.Logiqx) == 0
&& (DatFormat & DatFormat.LogiqxDepreciated) == 0
&& (DatFormat & DatFormat.Listxml) == 0
&& (DatFormat & DatFormat.SabreDat) == 0
&& (DatFormat & DatFormat.SoftwareList) == 0)
{
outfileNames.Add(DatFormat.OfflineList, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
if (((DatFormat & DatFormat.OfflineList) != 0
&& ((DatFormat & DatFormat.Logiqx) != 0
|| (DatFormat & DatFormat.LogiqxDepreciated) != 0
|| (DatFormat & DatFormat.Listxml) != 0
|| (DatFormat & DatFormat.SabreDat) != 0
|| (DatFormat & DatFormat.SoftwareList) != 0)))
{
outfileNames.Add(DatFormat.OfflineList, CreateOutfileNamesHelper(outDir, ".ol.xml", overwrite));
}
// openMSX
if (((DatFormat & DatFormat.OpenMSX) != 0)
&& (DatFormat & DatFormat.Logiqx) == 0
&& (DatFormat & DatFormat.LogiqxDepreciated) == 0
&& (DatFormat & DatFormat.Listxml) == 0
&& (DatFormat & DatFormat.SabreDat) == 0
&& (DatFormat & DatFormat.SoftwareList) == 0
&& (DatFormat & DatFormat.OfflineList) == 0)
{
outfileNames.Add(DatFormat.OpenMSX, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
if (((DatFormat & DatFormat.OpenMSX) != 0
&& ((DatFormat & DatFormat.Logiqx) != 0
|| (DatFormat & DatFormat.LogiqxDepreciated) != 0
|| (DatFormat & DatFormat.Listxml) != 0
|| (DatFormat & DatFormat.SabreDat) != 0
|| (DatFormat & DatFormat.SoftwareList) != 0
|| (DatFormat & DatFormat.OfflineList) != 0)))
{
outfileNames.Add(DatFormat.OpenMSX, CreateOutfileNamesHelper(outDir, ".msx.xml", overwrite));
}
// Redump MD5
if ((DatFormat & DatFormat.RedumpMD5) != 0)
{
outfileNames.Add(DatFormat.RedumpMD5, CreateOutfileNamesHelper(outDir, ".md5", overwrite));
};
// Redump SFV
if ((DatFormat & DatFormat.RedumpSFV) != 0)
{
outfileNames.Add(DatFormat.RedumpSFV, CreateOutfileNamesHelper(outDir, ".sfv", overwrite));
};
// Redump SHA-1
if ((DatFormat & DatFormat.RedumpSHA1) != 0)
{
outfileNames.Add(DatFormat.RedumpSHA1, CreateOutfileNamesHelper(outDir, ".sha1", overwrite));
};
// Redump SHA-256
if ((DatFormat & DatFormat.RedumpSHA256) != 0)
{
outfileNames.Add(DatFormat.RedumpSHA256, CreateOutfileNamesHelper(outDir, ".sha256", overwrite));
};
// RomCenter
if ((DatFormat & DatFormat.RomCenter) != 0
&& (DatFormat & DatFormat.ClrMamePro) == 0)
{
outfileNames.Add(DatFormat.RomCenter, CreateOutfileNamesHelper(outDir, ".dat", overwrite));
};
if ((DatFormat & DatFormat.RomCenter) != 0
&& (DatFormat & DatFormat.ClrMamePro) != 0)
{
outfileNames.Add(DatFormat.RomCenter, CreateOutfileNamesHelper(outDir, ".rc.dat", overwrite));
};
// SabreDAT
if ((DatFormat & DatFormat.SabreDat) != 0 && ((DatFormat & DatFormat.Logiqx) == 0 || (DatFormat & DatFormat.LogiqxDepreciated) == 0))
{
outfileNames.Add(DatFormat.SabreDat, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
};
if ((DatFormat & DatFormat.SabreDat) != 0 && ((DatFormat & DatFormat.Logiqx) != 0 || (DatFormat & DatFormat.LogiqxDepreciated) != 0))
{
outfileNames.Add(DatFormat.SabreDat, CreateOutfileNamesHelper(outDir, ".sd.xml", overwrite));
};
// Software List
if ((DatFormat & DatFormat.SoftwareList) != 0
&& (DatFormat & DatFormat.Logiqx) == 0
&& (DatFormat & DatFormat.LogiqxDepreciated) == 0
&& (DatFormat & DatFormat.SabreDat) == 0)
{
outfileNames.Add(DatFormat.SoftwareList, CreateOutfileNamesHelper(outDir, ".xml", overwrite));
}
if ((DatFormat & DatFormat.SoftwareList) != 0
&& ((DatFormat & DatFormat.Logiqx) != 0
|| (DatFormat & DatFormat.LogiqxDepreciated) != 0
|| (DatFormat & DatFormat.SabreDat) != 0))
{
outfileNames.Add(DatFormat.SoftwareList, CreateOutfileNamesHelper(outDir, ".sl.xml", overwrite));
}
// SSV
if ((DatFormat & DatFormat.SSV) != 0)
{
outfileNames.Add(DatFormat.SSV, CreateOutfileNamesHelper(outDir, ".ssv", overwrite));
};
// TSV
if ((DatFormat & DatFormat.TSV) != 0)
{
outfileNames.Add(DatFormat.TSV, CreateOutfileNamesHelper(outDir, ".tsv", overwrite));
};
return outfileNames;
}
///
/// Help generating the outfile name
///
/// Output directory
/// Extension to use for the file
/// True if we ignore existing files, false otherwise
/// String containing the new filename
private string CreateOutfileNamesHelper(string outDir, string extension, bool overwrite)
{
string filename = (String.IsNullOrWhiteSpace(FileName) ? Description : FileName);
string outfile = outDir + filename + extension;
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
outfile);
if (!overwrite)
{
int i = 1;
while (File.Exists(outfile))
{
outfile = outDir + filename + "_" + i + extension;
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
outfile);
i++;
}
}
return outfile;
}
///
/// Process an item and correctly set the item name
///
/// DatItem to update
/// True if the Quotes flag should be ignored, false otherwise
/// True if the UseRomName should be always on (default), false otherwise
protected void ProcessItemName(DatItem item, bool forceRemoveQuotes, bool forceRomName = true)
{
string name = item.Name;
// Backup relevant values and set new ones accordingly
bool quotesBackup = Quotes;
bool useRomNameBackup = UseRomName;
if (forceRemoveQuotes)
{
Quotes = false;
}
if (forceRomName)
{
UseRomName = true;
}
// Create the proper Prefix and Postfix
string pre = CreatePrefixPostfix(item, true);
string post = CreatePrefixPostfix(item, false);
// If we're in Romba mode, take care of that instead
if (Romba)
{
if (item.ItemType == ItemType.Rom)
{
// We can only write out if there's a SHA-1
if (!String.IsNullOrWhiteSpace(((Rom)item).SHA1))
{
name = ((Rom)item).SHA1.Substring(0, 2)
+ "/" + ((Rom)item).SHA1.Substring(2, 2)
+ "/" + ((Rom)item).SHA1.Substring(4, 2)
+ "/" + ((Rom)item).SHA1.Substring(6, 2)
+ "/" + ((Rom)item).SHA1 + ".gz";
item.Name = pre + name + post;
}
}
else if (item.ItemType == ItemType.Disk)
{
// We can only write out if there's a SHA-1
if (!String.IsNullOrWhiteSpace(((Disk)item).SHA1))
{
name = ((Disk)item).SHA1.Substring(0, 2)
+ "/" + ((Disk)item).SHA1.Substring(2, 2)
+ "/" + ((Disk)item).SHA1.Substring(4, 2)
+ "/" + ((Disk)item).SHA1.Substring(6, 2)
+ "/" + ((Disk)item).SHA1 + ".gz";
item.Name = pre + name + post;
}
}
return;
}
if (!String.IsNullOrWhiteSpace(ReplaceExtension) || RemoveExtension)
{
if (RemoveExtension)
{
ReplaceExtension = "";
}
string dir = Path.GetDirectoryName(name);
dir = (dir.StartsWith(Path.DirectorySeparatorChar.ToString()) ? dir.Remove(0, 1) : dir);
name = Path.Combine(dir, Path.GetFileNameWithoutExtension(name) + ReplaceExtension);
}
if (!String.IsNullOrWhiteSpace(AddExtension))
{
name += AddExtension;
}
if (UseRomName && GameName)
{
name = Path.Combine(item.MachineName, name);
}
// Now assign back the item name
item.Name = pre + name + post;
// Restore all relevant values
if (forceRemoveQuotes)
{
Quotes = quotesBackup;
}
if (forceRomName)
{
UseRomName = useRomNameBackup;
}
}
///
/// Create a prefix or postfix from inputs
///
/// DatItem to create a prefix/postfix for
/// True for prefix, false for postfix
/// Sanitized string representing the postfix or prefix
protected string CreatePrefixPostfix(DatItem item, bool prefix)
{
// Initialize strings
string fix = "",
game = item.MachineName,
name = item.Name,
manufacturer = item.Manufacturer,
publisher = item.Publisher,
crc = string.Empty,
md5 = string.Empty,
sha1 = string.Empty,
sha256 = string.Empty,
sha384 = string.Empty,
sha512 = string.Empty,
size = string.Empty;
// If we have a prefix
if (prefix)
{
fix = Prefix + (Quotes ? "\"" : "");
}
// If we have a postfix
else
{
fix = (Quotes ? "\"" : "") + Postfix;
}
// Ensure we have the proper values for replacement
if (item.ItemType == ItemType.Rom)
{
crc = ((Rom)item).CRC;
md5 = ((Rom)item).MD5;
sha1 = ((Rom)item).SHA1;
sha256 = ((Rom)item).SHA256;
sha384 = ((Rom)item).SHA384;
sha512 = ((Rom)item).SHA512;
size = ((Rom)item).Size.ToString();
}
else if (item.ItemType == ItemType.Disk)
{
md5 = ((Disk)item).MD5;
sha1 = ((Disk)item).SHA1;
sha256 = ((Disk)item).SHA256;
sha384 = ((Disk)item).SHA384;
sha512 = ((Disk)item).SHA512;
}
// Now do bulk replacement where possible
fix = fix
.Replace("%game%", game)
.Replace("%machine%", game)
.Replace("%name%", name)
.Replace("%manufacturer%", manufacturer)
.Replace("%publisher%", publisher)
.Replace("%crc%", crc)
.Replace("%md5%", md5)
.Replace("%sha1%", sha1)
.Replace("%sha256%", sha256)
.Replace("%sha384%", sha384)
.Replace("%sha512%", sha512)
.Replace("%size%", size);
// TODO: Add GameName logic here too?
return fix;
}
#endregion
#endregion // Instance Methods
#region Static Methods
#region Statistics
///
/// Output the stats for a list of input dats as files in a human-readable format
///
/// List of input files and folders
/// Name of the output file
/// True if single DAT stats are output, false otherwise
/// True if baddumps should be included in output, false otherwise
/// True if nodumps should be included in output, false otherwise
/// Set the statistics output format to use
public static void OutputStats(List inputs, string reportName, string outDir, bool single,
bool baddumpCol, bool nodumpCol, StatReportFormat statDatFormat)
{
// If there's no output format, set the default
if (statDatFormat == StatReportFormat.None)
{
statDatFormat = StatReportFormat.Textfile;
}
// Get the proper output file name
if (String.IsNullOrWhiteSpace(reportName))
{
reportName = "report";
}
// Get the proper output directory name
outDir = Utilities.EnsureOutputDirectory(outDir);
// Get the dictionary of desired output report names
Dictionary outputs = CreateOutStatsNames(outDir, statDatFormat, reportName);
// Make sure we have all files and then order them
List files = Utilities.GetOnlyFilesFromInputs(inputs);
files = files
.OrderBy(i => Path.GetDirectoryName(i))
.ThenBy(i => Path.GetFileName(i))
.ToList();
// Get all of the writers that we need
List reports = new List();
// Loop through and output based on the inputs
foreach (KeyValuePair kvp in outputs)
{
reports.Add(Utilities.GetBaseReport(kvp.Key, kvp.Value, baddumpCol, nodumpCol));
}
// Write the header, if any
reports.ForEach(report => report.WriteHeader());
// Init all total variables
DatStats totalStats = new DatStats();
// Init directory-level variables
string lastdir = null;
string basepath = null;
DatStats dirStats = new DatStats();
// Now process each of the input files
foreach (string file in files)
{
// Get the directory for the current file
string thisdir = Path.GetDirectoryName(file);
basepath = Path.GetDirectoryName(Path.GetDirectoryName(file));
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
if (lastdir != null && thisdir != lastdir)
{
// Output separator if needed
reports.ForEach(report => report.WriteMidSeparator());
DatFile lastdirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir),
_datStats = dirStats,
};
lastdirdat.WriteStatsToScreen(recalculate: false, game: dirStats.GameCount, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
reports.ForEach(report => report.ReplaceDatFile(lastdirdat));
reports.ForEach(report => report.Write(game: dirStats.GameCount));
// Write the mid-footer, if any
reports.ForEach(report => report.WriteFooterSeparator());
// Write the header, if any
reports.ForEach(report => report.WriteMidHeader());
// Reset the directory stats
dirStats.Reset();
}
Globals.Logger.Verbose("Beginning stat collection for '{0}'", false, file);
List games = new List();
DatFile datdata = new DatFile();
datdata.Parse(file, 0, 0);
datdata.BucketBy(SortedBy.Game, DedupeType.None, norename: true);
// Output single DAT stats (if asked)
Globals.Logger.User("Adding stats for file '{0}'\n", false, file);
if (single)
{
datdata.WriteStatsToScreen(recalculate: false, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
reports.ForEach(report => report.ReplaceDatFile(datdata));
reports.ForEach(report => report.Write());
}
// Add single DAT stats to dir
dirStats.AddStats(datdata._datStats);
dirStats.GameCount += datdata.Keys.Count();
// Add single DAT stats to totals
totalStats.AddStats(datdata._datStats);
totalStats.GameCount += datdata.Keys.Count();
// Make sure to assign the new directory
lastdir = thisdir;
}
// Output the directory stats one last time
reports.ForEach(report => report.WriteMidSeparator());
if (single)
{
DatFile dirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir),
_datStats = dirStats,
};
dirdat.WriteStatsToScreen(recalculate: false, game: dirStats.GameCount, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
reports.ForEach(report => report.ReplaceDatFile(dirdat));
reports.ForEach(report => report.Write(dirStats.GameCount));
}
// Write the mid-footer, if any
reports.ForEach(report => report.WriteFooterSeparator());
// Write the header, if any
reports.ForEach(report => report.WriteMidHeader());
// Reset the directory stats
dirStats.Reset();
// Output total DAT stats
DatFile totaldata = new DatFile
{
FileName = "DIR: All DATs",
_datStats = totalStats,
};
totaldata.WriteStatsToScreen(recalculate: false, game: totalStats.GameCount, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
reports.ForEach(report => report.ReplaceDatFile(totaldata));
reports.ForEach(report => report.Write(totalStats.GameCount));
// Output footer if needed
reports.ForEach(report => report.WriteFooter());
Globals.Logger.User(@"
Please check the log folder if the stats scrolled offscreen", false);
}
///
/// Get the proper extension for the stat output format
///
/// Output path to use
/// StatDatFormat to get the extension for
/// Name of the input file to use
/// Dictionary of output formats mapped to file names
private static Dictionary CreateOutStatsNames(string outDir, StatReportFormat statDatFormat, string reportName, bool overwrite = true)
{
Dictionary output = new Dictionary();
// First try to create the output directory if we need to
if (!Directory.Exists(outDir))
{
Directory.CreateDirectory(outDir);
}
// Double check the outDir for the end delim
if (!outDir.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
outDir += Path.DirectorySeparatorChar;
}
// For each output format, get the appropriate stream writer
if ((statDatFormat & StatReportFormat.Textfile) != 0)
{
output.Add(StatReportFormat.Textfile, CreateOutStatsNamesHelper(outDir, ".txt", reportName, overwrite));
}
if ((statDatFormat & StatReportFormat.CSV) != 0)
{
output.Add(StatReportFormat.CSV, CreateOutStatsNamesHelper(outDir, ".csv", reportName, overwrite));
}
if ((statDatFormat & StatReportFormat.HTML) != 0)
{
output.Add(StatReportFormat.HTML, CreateOutStatsNamesHelper(outDir, ".html", reportName, overwrite));
}
if ((statDatFormat & StatReportFormat.SSV) != 0)
{
output.Add(StatReportFormat.SSV, CreateOutStatsNamesHelper(outDir, ".ssv", reportName, overwrite));
}
if ((statDatFormat & StatReportFormat.TSV) != 0)
{
output.Add(StatReportFormat.TSV, CreateOutStatsNamesHelper(outDir, ".tsv", reportName, overwrite));
}
return output;
}
///
/// Help generating the outstats name
///
/// Output directory
/// Extension to use for the file
/// Name of the input file to use
/// True if we ignore existing files, false otherwise
/// String containing the new filename
private static string CreateOutStatsNamesHelper(string outDir, string extension, string reportName, bool overwrite)
{
string outfile = outDir + reportName + extension;
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
outfile);
if (!overwrite)
{
int i = 1;
while (File.Exists(outfile))
{
outfile = outDir + reportName + "_" + i + extension;
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
outfile);
i++;
}
}
return outfile;
}
#endregion
#endregion // Static Methods
}
}