diff --git a/SabreTools.Library/DatFiles/DatFile.cs b/SabreTools.Library/DatFiles/DatFile.cs
index 34d927d8..27fe8e42 100644
--- a/SabreTools.Library/DatFiles/DatFile.cs
+++ b/SabreTools.Library/DatFiles/DatFile.cs
@@ -5,7 +5,6 @@ using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
-
using SabreTools.Library.Data;
using SabreTools.Library.FileTypes;
using SabreTools.Library.DatItems;
@@ -29,3552 +28,3776 @@ using NaturalSort;
namespace SabreTools.Library.DatFiles
{
- ///
- /// Represents a format-agnostic DAT
- ///
- public class DatFile
- {
- #region Private instance variables
-
- // Internal DatHeader values
- internal DatHeader _datHeader = new DatHeader();
-
- // DatItems dictionary
- internal SortedDictionary> _items = new SortedDictionary>();
- internal SortedBy _sortedBy;
- internal DedupeType _mergedBy;
-
- // Internal statistical data
- internal DatStats _datStats = new DatStats();
-
- #endregion
-
- #region Publicly facing variables
-
- // Data common to most DAT types
- public string FileName
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.FileName;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.FileName = value;
- }
- }
- public string Name
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Name;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Name = value;
- }
- }
- public string Description
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Description;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Description = value;
- }
- }
- public string RootDir
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.RootDir;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.RootDir = value;
- }
- }
- public string Category
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Category;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Category = value;
- }
- }
- public string Version
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Version;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Version = value;
- }
- }
- public string Date
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Date;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Date = value;
- }
- }
- public string Author
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Author;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Author = value;
- }
- }
- public string Email
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Email;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Email = value;
- }
- }
- public string Homepage
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Homepage;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Homepage = value;
- }
- }
- public string Url
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Url;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Url = value;
- }
- }
- public string Comment
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Comment;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Comment = value;
- }
- }
- public string Header
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Header;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Header = value;
- }
- }
- public string Type // Generally only used for SuperDAT
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Type;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Type = value;
- }
- }
- public ForceMerging ForceMerging
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.ForceMerging;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.ForceMerging = value;
- }
- }
- public ForceNodump ForceNodump
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.ForceNodump;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.ForceNodump = value;
- }
- }
- public ForcePacking ForcePacking
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.ForcePacking;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.ForcePacking = value;
- }
- }
- public DatFormat DatFormat
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.DatFormat;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.DatFormat = value;
- }
- }
- public bool[] ExcludeFields
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.ExcludeFields;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.ExcludeFields = value;
- }
- }
- public bool OneRom
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.OneRom;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.OneRom = value;
- }
- }
- public bool KeepEmptyGames
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.KeepEmptyGames;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.KeepEmptyGames = value;
- }
- }
- public bool SceneDateStrip
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.SceneDateStrip;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.SceneDateStrip = value;
- }
- }
- public DedupeType DedupeRoms
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.DedupeRoms;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.DedupeRoms = value;
- }
- }
- public SortedBy SortedBy
- {
- get { return _sortedBy; }
- }
- public DedupeType MergedBy
- {
- get { return _mergedBy; }
- }
-
- // Write pre-processing
- public string Prefix
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Prefix;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Prefix = value;
- }
- }
- public string Postfix
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Postfix;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Postfix = value;
- }
- }
- public string AddExtension
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.AddExtension;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.AddExtension = value;
- }
- }
- public string ReplaceExtension
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.ReplaceExtension;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.ReplaceExtension = value;
- }
- }
- public bool RemoveExtension
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.RemoveExtension;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.RemoveExtension = value;
- }
- }
- public bool Romba
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Romba;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Romba = value;
- }
- }
- public bool GameName
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.GameName;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.GameName = value;
- }
- }
- public bool Quotes
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.Quotes;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.Quotes = value;
- }
- }
-
- // Data specific to the Miss DAT type
- public bool UseRomName
- {
- get
- {
- EnsureDatHeader();
- return _datHeader.UseRomName;
- }
- set
- {
- EnsureDatHeader();
- _datHeader.UseRomName = value;
- }
- }
-
- // Statistical data related to the DAT
- public StatReportFormat ReportFormat
- {
- get
- {
- EnsureDatStats();
- return _datStats.ReportFormat;
- }
- set
- {
- EnsureDatStats();
- _datStats.ReportFormat = value;
- }
- }
- public long Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.Count = value;
- }
- }
- public long ArchiveCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.ArchiveCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.ArchiveCount = value;
- }
- }
- public long BiosSetCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.BiosSetCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.BiosSetCount = value;
- }
- }
- public long DiskCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.DiskCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.DiskCount = value;
- }
- }
- public long ReleaseCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.ReleaseCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.ReleaseCount = value;
- }
- }
- public long RomCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.RomCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.RomCount = value;
- }
- }
- public long SampleCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.SampleCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.SampleCount = value;
- }
- }
- public long TotalSize
- {
- get
- {
- EnsureDatStats();
- return _datStats.TotalSize;
- }
- private set
- {
- EnsureDatStats();
- _datStats.TotalSize = value;
- }
- }
- public long CRCCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.CRCCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.CRCCount = value;
- }
- }
- public long MD5Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.MD5Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.MD5Count = value;
- }
- }
- public long SHA1Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.SHA1Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.SHA1Count = value;
- }
- }
- public long SHA256Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.SHA256Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.SHA256Count = value;
- }
- }
- public long SHA384Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.SHA384Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.SHA384Count = value;
- }
- }
- public long SHA512Count
- {
- get
- {
- EnsureDatStats();
- return _datStats.SHA512Count;
- }
- private set
- {
- EnsureDatStats();
- _datStats.SHA512Count = value;
- }
- }
- public long BaddumpCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.BaddumpCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.BaddumpCount = value;
- }
- }
- public long GoodCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.GoodCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.GoodCount = value;
- }
- }
- public long NodumpCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.NodumpCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.NodumpCount = value;
- }
- }
- public long VerifiedCount
- {
- get
- {
- EnsureDatStats();
- return _datStats.VerifiedCount;
- }
- private set
- {
- EnsureDatStats();
- _datStats.VerifiedCount = value;
- }
- }
-
- #endregion
-
- #region Instance Methods
-
- #region Accessors
-
- ///
- /// Passthrough to access the file dictionary
- ///
- /// Key in the dictionary to reference
- /// We don't want to allow direct setting of values because it bypasses the statistics
- public List this[string key]
- {
- get
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- lock (_items)
- {
- // Ensure the key exists
- EnsureKey(key);
-
- // Now return the value
- return _items[key];
- }
- }
- }
-
- ///
- /// Add a new key to the file dictionary
- ///
- /// Key in the dictionary to add
- public void Add(string key)
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- lock (_items)
- {
- // Ensure the key exists
- EnsureKey(key);
- }
- }
-
- ///
- /// Add a value to the file dictionary
- ///
- /// Key in the dictionary to add to
- /// Value to add to the dictionary
- public void Add(string key, DatItem value)
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // Add the key, if necessary
- Add(key);
-
- lock (_items)
- {
- // Now add the value
- _items[key].Add(value);
-
- // Now update the statistics
- _datStats.AddItem(value);
- }
- }
-
- ///
- /// Add a range of values to the file dictionary
- ///
- /// Key in the dictionary to add to
- /// Value to add to the dictionary
- public void AddRange(string key, List value)
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // Add the key, if necessary
- Add(key);
-
- lock (_items)
- {
- // Now add the value
- _items[key].AddRange(value);
-
- // Now update the statistics
- foreach (DatItem item in value)
- {
- _datStats.AddItem(item);
- }
- }
- }
-
- ///
- /// Get if the file dictionary contains the key
- ///
- /// Key in the dictionary to check
- /// True if the key exists, false otherwise
- public bool Contains(string key)
- {
- bool contains = false;
-
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // If the key is null, we return false since keys can't be null
- if (key == null)
- {
- return contains;
- }
-
- lock (_items)
- {
- contains = _items.ContainsKey(key);
- }
-
- return contains;
- }
-
- ///
- /// Get if the file dictionary contains the key and value
- ///
- /// Key in the dictionary to check
- /// Value in the dictionary to check
- /// True if the key exists, false otherwise
- public bool Contains(string key, DatItem value)
- {
- bool contains = false;
-
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // If the key is null, we return false since keys can't be null
- if (key == null)
- {
- return contains;
- }
-
- lock (_items)
- {
- if (_items.ContainsKey(key))
- {
- contains = _items[key].Contains(value);
- }
- }
-
- return contains;
- }
-
- ///
- /// Get the keys from the file dictionary
- ///
- /// List of the keys
- public List Keys
- {
- get
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- lock (_items)
- {
- return _items.Keys.Select(item => (String)item.Clone()).ToList();
- }
- }
- }
-
- ///
- /// Remove a key from the file dictionary if it exists
- ///
- /// Key in the dictionary to remove
- public void Remove(string key)
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // If the key doesn't exist, return
- if (!Contains(key))
- {
- return;
- }
-
- lock (_items)
- {
- // Remove the statistics first
- foreach (DatItem item in _items[key])
- {
- _datStats.RemoveItem(item);
- }
-
- // Remove the key from the dictionary
- _items.Remove(key);
- }
- }
-
- ///
- /// Remove the first instance of a value from the file dictionary if it exists
- ///
- /// Key in the dictionary to remove from
- /// Value to remove from the dictionary
- public void Remove(string key, DatItem value)
- {
- // Ensure the dictionary is created
- EnsureDictionary();
-
- // If the key and value doesn't exist, return
- if (!Contains(key, value))
- {
- return;
- }
-
- lock (_items)
- {
- // Remove the statistics first
- _datStats.RemoveItem(value);
-
- _items[key].Remove(value);
- }
- }
-
- ///
- /// Remove a range of values from the file dictionary if they exists
- ///
- /// Key in the dictionary to remove from
- /// Value to remove from the dictionary
- public void RemoveRange(string key, List value)
- {
- foreach(DatItem item in value)
- {
- Remove(key, item);
- }
- }
-
- ///
- /// Ensure the DatHeader
- ///
- private void EnsureDatHeader()
- {
- if (_datHeader == null)
- {
- _datHeader = new DatHeader();
- }
- }
-
- ///
- /// Ensure the DatStats
- ///
- private void EnsureDatStats()
- {
- if (_datStats == null)
- {
- _datStats = new DatStats();
- }
- }
-
- ///
- /// Ensure the items dictionary
- ///
- private void EnsureDictionary()
- {
- // If the dictionary is null, create it
- if (_items == null)
- {
- _items = new SortedDictionary>();
- }
- }
-
- ///
- /// Ensure the key exists in the items dictionary
- ///
- /// Key to ensure
- private void EnsureKey(string key)
- {
- // If the key is missing from the dictionary, add it
- if (!_items.ContainsKey(key))
- {
- _items.Add(key, new List());
- }
- }
-
- #endregion
-
- #region Bucketing
-
- ///
- /// Take the arbitrarily sorted Files Dictionary and convert to one sorted by a user-defined method
- ///
- /// SortedBy enum representing how to sort the individual items
- /// Dedupe type that should be used
- /// True if the key should be lowercased (default), false otherwise
- /// True if games should only be compared on game and file name, false if system and source are counted
- public void BucketBy(SortedBy bucketBy, DedupeType deduperoms, bool lower = true, bool norename = true)
- {
- // If we have a situation where there's no dictionary or no keys at all, we skip
- if (_items == null || _items.Count == 0)
- {
- return;
- }
-
- // If the sorted type isn't the same, we want to sort the dictionary accordingly
- if (_sortedBy != bucketBy)
- {
- Globals.Logger.User("Organizing roms by {0}", bucketBy);
-
- // Set the sorted type
- _sortedBy = bucketBy;
-
- // Reset the merged type since this might change the merge
- _mergedBy = DedupeType.None;
-
- // First do the initial sort of all of the roms inplace
- List oldkeys = Keys;
- for (int k = 0; k < oldkeys.Count; k++)
- {
- string key = oldkeys[k];
-
- // Get the unsorted current list
- List roms = this[key];
-
- // Now add each of the roms to their respective keys
- for (int i = 0; i < roms.Count; i++)
- {
- DatItem rom = roms[i];
-
- // We want to get the key most appropriate for the given sorting type
- string newkey = Utilities.GetKeyFromDatItem(rom, bucketBy, lower, norename);
-
- // If the key is different, move the item to the new key
- if (newkey != key)
- {
- Add(newkey, rom);
- Remove(key, rom);
- i--; // This make sure that the pointer stays on the correct since one was removed
- }
- }
-
- // If the key is now empty, remove it
- if (this[key].Count == 0)
- {
- Remove(key);
- }
- }
- }
-
- // If the merge type isn't the same, we want to merge the dictionary accordingly
- if (_mergedBy != deduperoms)
- {
- Globals.Logger.User("Deduping roms by {0}", deduperoms);
-
- // Set the sorted type
- _mergedBy = deduperoms;
-
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- // Get the possibly unsorted list
- List sortedlist = this[key];
-
- // Sort the list of items to be consistent
- DatItem.Sort(ref sortedlist, false);
-
- // If we're merging the roms, do so
- if (deduperoms == DedupeType.Full || (deduperoms == DedupeType.Game && bucketBy == SortedBy.Game))
- {
- sortedlist = DatItem.Merge(sortedlist);
- }
-
- // Add the list back to the dictionary
- Remove(key);
- AddRange(key, sortedlist);
- });
- }
- // If the merge type is the same, we want to sort the dictionary to be consistent
- else
- {
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- // Get the possibly unsorted list
- List sortedlist = this[key];
-
- // Sort the list of items to be consistent
- DatItem.Sort(ref sortedlist, false);
- });
- }
-
- // Now clean up all empty keys
- CleanEmptyKeys();
- }
-
- ///
- /// Take the arbitrarily sorted Files Dictionary and convert to one sorted by the highest available hash
- ///
- /// Dedupe type that should be used (default none)
- /// True if the key should be lowercased (default), false otherwise
- /// True if games should only be compared on game and file name, false if system and source are counted
- public void BucketByBestAvailable(DedupeType deduperoms = DedupeType.None, bool lower = true, bool norename = true)
- {
- // If all items are supposed to have a SHA-512, we sort by that
- if (RomCount + DiskCount - NodumpCount == SHA512Count)
- {
- BucketBy(SortedBy.SHA512, deduperoms, lower, norename);
- }
-
- // If all items are supposed to have a SHA-384, we sort by that
- else if (RomCount + DiskCount - NodumpCount == SHA384Count)
- {
- BucketBy(SortedBy.SHA384, deduperoms, lower, norename);
- }
-
- // If all items are supposed to have a SHA-256, we sort by that
- else if (RomCount + DiskCount - NodumpCount == SHA256Count)
- {
- BucketBy(SortedBy.SHA256, deduperoms, lower, norename);
- }
-
- // If all items are supposed to have a SHA-1, we sort by that
- else if (RomCount + DiskCount - NodumpCount == SHA1Count)
- {
- BucketBy(SortedBy.SHA1, deduperoms, lower, norename);
- }
-
- // If all items are supposed to have a MD5, we sort by that
- else if (RomCount + DiskCount - NodumpCount == MD5Count)
- {
- BucketBy(SortedBy.MD5, deduperoms, lower, norename);
- }
-
- // Otherwise, we sort by CRC
- else
- {
- BucketBy(SortedBy.CRC, deduperoms, lower, norename);
- }
- }
-
- ///
- /// Clean out all empty keys in the dictionary
- ///
- private void CleanEmptyKeys()
- {
- List keys = Keys;
- foreach(string key in keys)
- {
- if (this[key].Count == 0)
- {
- Remove(key);
- }
- }
- }
-
- #endregion
-
- #region Constructors
-
- ///
- /// Create a new, empty DatFile object
- ///
- public DatFile()
- {
- _items = new SortedDictionary>();
- }
-
- ///
- /// Create a new DatFile from an existing one
- ///
- /// DatFile to get the values from
- /// True if only the header should be cloned (default), false if this should be a reference to another DatFile
- public DatFile(DatFile datFile, bool cloneHeader = true)
- {
- if (cloneHeader)
- {
- this._datHeader = (DatHeader)datFile._datHeader.Clone();
- }
- else
- {
- this._datHeader = datFile._datHeader;
- this._items = datFile._items;
- this._sortedBy = datFile._sortedBy;
- this._mergedBy = datFile._mergedBy;
- this._datStats = datFile._datStats;
- }
- }
-
- ///
- /// Create a new DatFile from an existing DatHeader
- ///
- /// DatHeader to get the values from
- public DatFile(DatHeader datHeader)
- {
- _datHeader = (DatHeader)datHeader.Clone();
- }
-
- #endregion
-
- #region Converting and Updating
-
- ///
- /// Determine if input files should be merged, diffed, or processed invidually
- ///
- /// Names of the input files and/or folders
- /// Names of base files and/or folders
- /// Optional param for output directory
- /// Non-zero flag for diffing mode, zero otherwise
- /// True if the output files should overwrite their inputs, false otherwise
- /// True if the first cascaded diff file should be skipped on output, false otherwise
- /// True to clean the game names to WoD standard, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True to use game descriptions as the names, false otherwise (default)
- /// Filter object to be passed to the DatItem level
- /// Type of the split that should be performed (split, merged, fully merged)
- /// ReplaceMode representing what should be updated [only for base replacement]
- /// True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]
- public void DetermineUpdateType(List inputPaths, List basePaths, string outDir, UpdateMode updateMode, bool inplace, bool skip,
- bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
- {
- // Ensure we only have files in the inputs
- List inputFileNames = Utilities.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
- List baseFileNames = Utilities.GetOnlyFilesFromInputs(basePaths);
-
- // If we're in standard update mode, run through all of the inputs
- if (updateMode == UpdateMode.None)
- {
- Update(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
- return;
- }
-
- // Reverse inputs if we're in a required mode
- if ((updateMode & UpdateMode.DiffReverseCascade) != 0)
- {
- inputFileNames.Reverse();
- }
- if ((updateMode & UpdateMode.ReverseBaseReplace) != 0)
- {
- baseFileNames.Reverse();
- }
-
- // If we're in merging mode
- if ((updateMode & UpdateMode.Merge) != 0)
- {
- // Populate the combined data and get the headers
- List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
- MergeNoDiff(inputFileNames, datHeaders, outDir);
- }
- // If we have one of the standard diffing modes
- else if ((updateMode & UpdateMode.DiffDupesOnly) != 0
- || (updateMode & UpdateMode.DiffNoDupesOnly) != 0
- || (updateMode & UpdateMode.DiffIndividualsOnly) != 0)
- {
- // Populate the combined data
- PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
- DiffNoCascade(inputFileNames, outDir, filter, updateMode);
- }
- // If we have one of the cascaded diffing modes
- else if ((updateMode & UpdateMode.DiffCascade) != 0
- || (updateMode & UpdateMode.DiffReverseCascade) != 0)
- {
- // Populate the combined data and get the headers
- List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
- DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip);
- }
- // If we have diff against mode
- else if ((updateMode & UpdateMode.DiffAgainst) != 0)
- {
- // Populate the combined data
- PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
- DiffAgainst(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
- }
- // If we have one of the base replacement modes
- else if ((updateMode & UpdateMode.BaseReplace) != 0
- || (updateMode & UpdateMode.ReverseBaseReplace) != 0)
- {
- // Populate the combined data
- PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
- BaseReplace(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
- }
-
- return;
- }
-
- ///
- /// Populate the user DatData object from the input files
- ///
- /// Paths to DATs to parse
- /// True if the output files should overwrite their inputs, false otherwise
- /// True to clean the game names to WoD standard, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True to use game descriptions as the names, false otherwise (default)
- /// Optional param for output directory
- /// Filter object to be passed to the DatItem level
- /// Type of the split that should be performed (split, merged, fully merged)
- /// List of DatData objects representing headers
- private List PopulateUserData(List inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
- string outDir, Filter filter, SplitType splitType)
- {
- DatFile[] datHeaders = new DatFile[inputs.Count];
- InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
-
- // Parse all of the DATs into their own DatFiles in the array
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
- {
- string input = inputs[i];
- Globals.Logger.User("Adding DAT: {0}", input.Split('¬')[0]);
- datHeaders[i] = new DatFile()
- {
- DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
-
- // Filtering that needs to be copied over
- ExcludeFields = (bool[])this.ExcludeFields.Clone(),
- OneRom = this.OneRom,
- KeepEmptyGames = this.KeepEmptyGames,
- SceneDateStrip = this.SceneDateStrip,
- DedupeRoms = this.DedupeRoms,
- Prefix = this.Prefix,
- Postfix = this.Postfix,
- AddExtension = this.AddExtension,
- ReplaceExtension = this.ReplaceExtension,
- RemoveExtension = this.RemoveExtension,
- Romba = this.Romba,
- GameName = this.GameName,
- Quotes = this.Quotes,
- UseRomName = this.UseRomName,
- };
-
- datHeaders[i].Parse(input, i, i, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
- });
-
- watch.Stop();
-
- watch.Start("Populating internal DAT");
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
- {
- // Get the list of keys from the DAT
- List keys = datHeaders[i].Keys;
- foreach (string key in keys)
- {
- // Add everything from the key to the internal DAT
- AddRange(key, datHeaders[i][key]);
-
- // Now remove the key from the source DAT
- datHeaders[i].Remove(key);
- }
-
- // Now remove the file dictionary from the source DAT to save memory
- datHeaders[i].DeleteDictionary();
- });
-
- // Now that we have a merged DAT, filter it
- filter.FilterDatFile(this);
-
- watch.Stop();
-
- return datHeaders.ToList();
- }
-
- ///
- /// Replace item values from the base set represented by the current DAT
- ///
- /// Names of the input files
- /// Optional param for output directory
- /// True if the output files should overwrite their inputs, false otherwise
- /// True to clean the game names to WoD standard, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)
- /// Filter object to be passed to the DatItem level
- /// Type of the split that should be performed (split, merged, fully merged)
- /// ReplaceMode representing what should be updated
- /// True if descriptions should only be replaced if the game name is the same, false otherwise
- public void BaseReplace(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
- bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
- {
- // We want to try to replace each item in each input DAT from the base
- foreach (string path in inputFileNames)
- {
- Globals.Logger.User("Replacing items in '{0}' from the base DAT", path.Split('¬')[0]);
-
- // First we parse in the DAT internally
- DatFile intDat = new DatFile()
- {
- DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
-
- // Filtering that needs to be copied over
- ExcludeFields = (bool[])this.ExcludeFields.Clone(),
- OneRom = this.OneRom,
- KeepEmptyGames = this.KeepEmptyGames,
- SceneDateStrip = this.SceneDateStrip,
- DedupeRoms = this.DedupeRoms,
- Prefix = this.Prefix,
- Postfix = this.Postfix,
- AddExtension = this.AddExtension,
- ReplaceExtension = this.ReplaceExtension,
- RemoveExtension = this.RemoveExtension,
- Romba = this.Romba,
- GameName = this.GameName,
- Quotes = this.Quotes,
- UseRomName = this.UseRomName,
- };
-
- intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
- filter.FilterDatFile(intDat);
-
- // If we are matching based on hashes of any sort
- if ((replaceMode & ReplaceMode.ItemName) != 0
- || (replaceMode & ReplaceMode.Hash) != 0)
- {
- // For comparison's sake, we want to use CRC as the base ordering
- BucketBy(SortedBy.CRC, DedupeType.Full);
- intDat.BucketBy(SortedBy.CRC, DedupeType.None);
-
- // Then we do a hashwise comparison against the base DAT
- List keys = intDat.Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List datItems = intDat[key];
- List newDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- // If we have something other than a Rom or Disk, then this doesn't do anything
- if (datItem.Type != ItemType.Disk && datItem.Type != ItemType.Rom)
- {
- newDatItems.Add((DatItem)datItem.Clone());
- continue;
- }
-
- List dupes = datItem.GetDuplicates(this, sorted: true);
- DatItem newDatItem = (DatItem)datItem.Clone();
-
- if (dupes.Count > 0)
- {
- // If we're updating names, replace using the first found name
- if ((replaceMode & ReplaceMode.ItemName) != 0)
- {
- newDatItem.Name = dupes[0].Name;
- }
-
- // If we're updating hashes, only replace if the current item doesn't have them
- if ((replaceMode & ReplaceMode.Hash) != 0)
- {
- if (newDatItem.Type == ItemType.Rom)
- {
- Rom newRomItem = (Rom)newDatItem;
- if (String.IsNullOrEmpty(newRomItem.CRC) && !String.IsNullOrEmpty(((Rom)dupes[0]).CRC))
- {
- newRomItem.CRC = ((Rom)dupes[0]).CRC;
- }
- if (String.IsNullOrEmpty(newRomItem.MD5) && !String.IsNullOrEmpty(((Rom)dupes[0]).MD5))
- {
- newRomItem.MD5 = ((Rom)dupes[0]).MD5;
- }
- if (String.IsNullOrEmpty(newRomItem.SHA1) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA1))
- {
- newRomItem.SHA1 = ((Rom)dupes[0]).SHA1;
- }
- if (String.IsNullOrEmpty(newRomItem.SHA256) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA256))
- {
- newRomItem.SHA256 = ((Rom)dupes[0]).SHA256;
- }
- if (String.IsNullOrEmpty(newRomItem.SHA384) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA384))
- {
- newRomItem.SHA384 = ((Rom)dupes[0]).SHA384;
- }
- if (String.IsNullOrEmpty(newRomItem.SHA512) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA512))
- {
- newRomItem.SHA512 = ((Rom)dupes[0]).SHA512;
- }
-
- newDatItem = (Rom)newRomItem.Clone();
- }
- else if (newDatItem.Type == ItemType.Disk)
- {
- Disk newDiskItem = (Disk)newDatItem;
- if (String.IsNullOrEmpty(newDiskItem.MD5) && !String.IsNullOrEmpty(((Disk)dupes[0]).MD5))
- {
- newDiskItem.MD5 = ((Disk)dupes[0]).MD5;
- }
- if (String.IsNullOrEmpty(newDiskItem.SHA1) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA1))
- {
- newDiskItem.SHA1 = ((Disk)dupes[0]).SHA1;
- }
- if (String.IsNullOrEmpty(newDiskItem.SHA256) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA256))
- {
- newDiskItem.SHA256 = ((Disk)dupes[0]).SHA256;
- }
- if (String.IsNullOrEmpty(newDiskItem.SHA384) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA384))
- {
- newDiskItem.SHA384 = ((Disk)dupes[0]).SHA384;
- }
- if (String.IsNullOrEmpty(newDiskItem.SHA512) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA512))
- {
- newDiskItem.SHA512 = ((Disk)dupes[0]).SHA512;
- }
-
- newDatItem = (Disk)newDiskItem.Clone();
- }
- }
- }
-
- newDatItems.Add(newDatItem);
- }
-
- // Now add the new list to the key
- intDat.Remove(key);
- intDat.AddRange(key, newDatItems);
- });
- }
-
- // If we are matching based on names of any sort
- if ((replaceMode & ReplaceMode.Description) != 0
- || (replaceMode & ReplaceMode.MachineType) != 0
- || (replaceMode & ReplaceMode.Year) != 0
- || (replaceMode & ReplaceMode.Manufacturer) != 0
- || (replaceMode & ReplaceMode.Parents) != 0)
- {
- // For comparison's sake, we want to use Machine Name as the base ordering
- BucketBy(SortedBy.Game, DedupeType.Full);
- intDat.BucketBy(SortedBy.Game, DedupeType.None);
-
- // Then we do a namewise comparison against the base DAT
- List keys = intDat.Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List datItems = intDat[key];
- List newDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- DatItem newDatItem = (DatItem)datItem.Clone();
- if (Contains(key) && this[key].Count() > 0)
- {
- if ((replaceMode & ReplaceMode.Description) != 0)
- {
- if (!onlySame || (onlySame && newDatItem.MachineName == newDatItem.MachineDescription))
- {
- newDatItem.MachineDescription = this[key][0].MachineDescription;
- }
- }
- if ((replaceMode & ReplaceMode.MachineType) != 0)
- {
- newDatItem.MachineType = this[key][0].MachineType;
- }
- if ((replaceMode & ReplaceMode.Year) != 0)
- {
- newDatItem.Year = this[key][0].Year;
- }
- if ((replaceMode & ReplaceMode.Manufacturer) != 0)
- {
- newDatItem.Manufacturer = this[key][0].Manufacturer;
- }
- if ((replaceMode & ReplaceMode.Parents) != 0)
- {
- newDatItem.CloneOf = this[key][0].CloneOf;
- newDatItem.RomOf = this[key][0].RomOf;
- newDatItem.SampleOf = this[key][0].SampleOf;
- }
- }
-
- newDatItems.Add(newDatItem);
- }
-
- // Now add the new list to the key
- intDat.Remove(key);
- intDat.AddRange(key, newDatItems);
- });
- }
-
- // Determine the output path for the DAT
- string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
-
- // Once we're done, try writing out
- intDat.Write(interOutDir, overwrite: inplace);
-
- // Due to possible memory requirements, we force a garbage collection
- GC.Collect();
- }
- }
-
- ///
- /// Output diffs against a base set represented by the current DAT
- ///
- /// Names of the input files
- /// Optional param for output directory
- /// True if the output files should overwrite their inputs, false otherwise
- /// True to clean the game names to WoD standard, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True to use game descriptions as the names, false otherwise (default)
- /// Filter object to be passed to the DatItem level
- /// Type of the split that should be performed (split, merged, fully merged)
- public void DiffAgainst(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
- bool descAsName, Filter filter, SplitType splitType)
- {
- // For comparison's sake, we want to use CRC as the base ordering
- BucketBy(SortedBy.CRC, DedupeType.Full);
-
- // Now we want to compare each input DAT against the base
- foreach (string path in inputFileNames)
- {
- Globals.Logger.User("Comparing '{0}'' to base DAT", path.Split('¬')[0]);
-
- // First we parse in the DAT internally
- DatFile intDat = new DatFile();
- intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
-
- // For comparison's sake, we want to use CRC as the base ordering
- intDat.BucketBy(SortedBy.CRC, DedupeType.Full);
-
- // Then we do a hashwise comparison against the base DAT
- List keys = intDat.Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List datItems = intDat[key];
- List keepDatItems = new List();
- foreach (DatItem datItem in datItems)
- {
- if (!datItem.HasDuplicates(this, true))
- {
- keepDatItems.Add(datItem);
- }
- }
-
- // Now add the new list to the key
- intDat.Remove(key);
- intDat.AddRange(key, keepDatItems);
- });
-
- // Determine the output path for the DAT
- string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
-
- // Once we're done, try writing out
- intDat.Write(interOutDir, overwrite: inplace);
-
- // Due to possible memory requirements, we force a garbage collection
- GC.Collect();
- }
- }
-
- ///
- /// Output cascading diffs
- ///
- /// List of inputs to write out from
- /// Dat headers used optionally
- /// Output directory to write the DATs to
- /// True if cascaded diffs are outputted in-place, false otherwise
- /// True if the first cascaded diff file should be skipped on output, false otherwise
- public void DiffCascade(List inputs, List datHeaders, string outDir, bool inplace, bool skip)
- {
- // Create a list of DatData objects representing output files
- List outDats = new List();
-
- // Loop through each of the inputs and get or create a new DatData object
- InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
-
- DatFile[] outDatsArray = new DatFile[inputs.Count];
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
- {
- string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
- DatFile diffData;
-
- // If we're in inplace mode or the output directory is set, take the appropriate DatData object already stored
- if (inplace || outDir != Environment.CurrentDirectory)
- {
- diffData = datHeaders[j];
- }
- else
- {
- diffData = new DatFile(this);
- diffData.FileName += innerpost;
- diffData.Name += innerpost;
- diffData.Description += innerpost;
- }
-
- diffData.ResetDictionary();
- outDatsArray[j] = diffData;
- });
-
- outDats = outDatsArray.ToList();
- watch.Stop();
-
- // Then, ensure that the internal dat can be sorted in the best possible way
- BucketBy(SortedBy.CRC, DedupeType.None);
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all output DATs");
- List keys = Keys;
-
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(this[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- {
- return;
- }
-
- foreach (DatItem item in items)
- {
- // There's odd cases where there are items with System ID < 0. Skip them for now
- if (item.SystemID < 0)
- {
- Globals.Logger.Warning("Item found with a <0 SystemID: {0}", item.Name);
- continue;
- }
-
- outDats[item.SystemID].Add(key, item);
- }
- });
-
- watch.Stop();
-
- // Finally, loop through and output each of the DATs
- watch.Start("Outputting all created DATs");
-
- Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
- {
- string path = Utilities.GetOutputPath(outDir, inputs[j], inplace);
-
- // Try to output the file
- outDats[j].Write(path, overwrite: inplace);
- });
-
- watch.Stop();
- }
-
- ///
- /// Output non-cascading diffs
- ///
- /// List of inputs to write out from
- /// Output directory to write the DATs to
- /// Filter object to be passed to the DatItem level
- /// Non-zero flag for diffing mode, zero otherwise
- public void DiffNoCascade(List inputs, string outDir, Filter filter, UpdateMode diff)
- {
- InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
-
- // Default vars for use
- string post = "";
- DatFile outerDiffData = new DatFile();
- DatFile dupeData = new DatFile();
-
- // Fill in any information not in the base DAT
- if (String.IsNullOrWhiteSpace(FileName))
- {
- FileName = "All DATs";
- }
- if (String.IsNullOrWhiteSpace(Name))
- {
-
- Name = "All DATs";
- }
- if (String.IsNullOrWhiteSpace(Description))
- {
- Description = "All DATs";
- }
-
- // Don't have External dupes
- if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
- {
- post = " (No Duplicates)";
- outerDiffData = new DatFile(this);
- outerDiffData.FileName += post;
- outerDiffData.Name += post;
- outerDiffData.Description += post;
- outerDiffData.ResetDictionary();
- }
-
- // Have External dupes
- if ((diff & UpdateMode.DiffDupesOnly) != 0)
- {
- post = " (Duplicates)";
- dupeData = new DatFile(this);
- dupeData.FileName += post;
- dupeData.Name += post;
- dupeData.Description += post;
- dupeData.ResetDictionary();
- }
-
- // Create a list of DatData objects representing individual output files
- List outDats = new List();
-
- // Loop through each of the inputs and get or create a new DatData object
- if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
- {
- DatFile[] outDatsArray = new DatFile[inputs.Count];
-
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
- {
- string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
- DatFile diffData = new DatFile(this);
- diffData.FileName += innerpost;
- diffData.Name += innerpost;
- diffData.Description += innerpost;
- diffData.ResetDictionary();
- outDatsArray[j] = diffData;
- });
-
- outDats = outDatsArray.ToList();
- }
-
- watch.Stop();
-
- // Now, loop through the dictionary and populate the correct DATs
- watch.Start("Populating all output DATs");
-
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = DatItem.Merge(this[key]);
-
- // If the rom list is empty or null, just skip it
- if (items == null || items.Count == 0)
- {
- return;
- }
-
- // Loop through and add the items correctly
- foreach (DatItem item in items)
- {
- // No duplicates
- if ((diff & UpdateMode.DiffNoDupesOnly) != 0 || (diff & UpdateMode.DiffIndividualsOnly) != 0)
- {
- if ((item.Dupe & DupeType.Internal) != 0 || item.Dupe == 0x00)
- {
- // Individual DATs that are output
- if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
- {
- outDats[item.SystemID].Add(key, item);
- }
-
- // Merged no-duplicates DAT
- if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
- {
- DatItem newrom = item.Clone() as DatItem;
- newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
-
- outerDiffData.Add(key, newrom);
- }
- }
- }
-
- // Duplicates only
- if ((diff & UpdateMode.DiffDupesOnly) != 0)
- {
- if ((item.Dupe & DupeType.External) != 0)
- {
- DatItem newrom = item.Clone() as DatItem;
- newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
-
- dupeData.Add(key, newrom);
- }
- }
- }
- });
-
- watch.Stop();
-
- // Finally, loop through and output each of the DATs
- watch.Start("Outputting all created DATs");
-
- // Output the difflist (a-b)+(b-a) diff
- if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
- {
- outerDiffData.Write(outDir, overwrite: false);
- }
-
- // Output the (ab) diff
- if ((diff & UpdateMode.DiffDupesOnly) != 0)
- {
- dupeData.Write(outDir, overwrite: false);
- }
-
- // Output the individual (a-b) DATs
- if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
- {
- Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
- {
- string path = Utilities.GetOutputPath(outDir, inputs[j], false /* inplace */);
-
- // Try to output the file
- outDats[j].Write(path, overwrite: false);
- });
- }
-
- watch.Stop();
- }
-
- ///
- /// Output user defined merge
- ///
- /// List of inputs to write out from
- /// Dat headers used optionally
- /// Output directory to write the DATs to
- public void MergeNoDiff(List inputs, List datHeaders, string outDir)
- {
- // If we're in SuperDAT mode, prefix all games with their respective DATs
- if (Type == "SuperDAT")
- {
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = this[key].ToList();
- List newItems = new List();
- foreach (DatItem item in items)
- {
- DatItem newItem = item;
- string filename = inputs[newItem.SystemID].Split('¬')[0];
- string rootpath = inputs[newItem.SystemID].Split('¬')[1];
-
- rootpath += (String.IsNullOrWhiteSpace(rootpath) ? "" : Path.DirectorySeparatorChar.ToString());
- filename = filename.Remove(0, rootpath.Length);
- newItem.MachineName = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
- + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
- + newItem.MachineName;
-
- newItems.Add(newItem);
- }
-
- Remove(key);
- AddRange(key, newItems);
- });
- }
-
- // Try to output the file
- Write(outDir, overwrite: false);
- }
-
- ///
- /// Convert, update, and filter a DAT file or set of files
- ///
- /// Names of the input files and/or folders
- /// Optional param for output directory
- /// True if the output files should overwrite their inputs, false otherwise
- /// True to clean the game names to WoD standard, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True to use game descriptions as the names, false otherwise (default)
- /// Filter object to be passed to the DatItem level
- /// Type of the split that should be performed (split, merged, fully merged)
- public void Update(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
- Filter filter, SplitType splitType)
- {
- // Iterate over the files
- foreach (string file in inputFileNames)
- {
- DatFile innerDatdata = new DatFile(this);
- Globals.Logger.User("Processing '{0}'", Path.GetFileName(file.Split('¬')[0]));
- innerDatdata.Parse(file, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
- keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0
- || (innerDatdata.DatFormat & DatFormat.CSV) != 0
- || (innerDatdata.DatFormat & DatFormat.SSV) != 0));
- filter.FilterDatFile(innerDatdata);
-
- // Get the correct output path
- string realOutDir = Utilities.GetOutputPath(outDir, file, inplace);
-
- // Try to output the file, overwriting only if it's not in the current directory
- innerDatdata.Write(realOutDir, overwrite: inplace);
- }
- }
-
- #endregion
-
- #region Dictionary Manipulation
-
- ///
- /// Clones the files dictionary
- ///
- /// A new files dictionary instance
- public SortedDictionary> CloneDictionary()
- {
- // Create the placeholder dictionary to be used
- SortedDictionary> sorted = new SortedDictionary>();
-
- // Now perform a deep clone on the entire dictionary
- List keys = Keys;
- foreach (string key in keys)
- {
- // Clone each list of DATs in the dictionary
- List olditems = this[key];
- List newitems = new List();
- foreach (DatItem item in olditems)
- {
- newitems.Add((DatItem)item.Clone());
- }
-
- // If the key is missing from the new dictionary, add it
- if (!sorted.ContainsKey(key))
- {
- sorted.Add(key, new List());
- }
-
- // Now add the list of items
- sorted[key].AddRange(newitems);
- }
-
- return sorted;
- }
-
- ///
- /// Delete the file dictionary
- ///
- public void DeleteDictionary()
- {
- _items = null;
- _sortedBy = SortedBy.Default;
- _mergedBy = DedupeType.None;
-
- // Reset statistics
- _datStats.Reset();
- }
-
- ///
- /// Reset the file dictionary
- ///
- public void ResetDictionary()
- {
- _items = new SortedDictionary>();
- _sortedBy = SortedBy.Default;
- _mergedBy = DedupeType.None;
-
- // Reset statistics
- _datStats.Reset();
- }
-
- #endregion
-
- #region Filtering
-
- ///
- /// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
- ///
- private void MachineDescriptionToName()
- {
- try
- {
- // First we want to get a mapping for all games to description
- ConcurrentDictionary mapping = new ConcurrentDictionary();
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = this[key];
- foreach (DatItem item in items)
- {
- // If the key mapping doesn't exist, add it
- if (!mapping.ContainsKey(item.MachineName))
- {
- mapping.TryAdd(item.MachineName, item.MachineDescription.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
- }
- }
- });
-
- // Now we loop through every item and update accordingly
- keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = this[key];
- List newItems = new List();
- foreach (DatItem item in items)
- {
- // Update machine name
- if (!String.IsNullOrWhiteSpace(item.MachineName) && mapping.ContainsKey(item.MachineName))
- {
- item.MachineName = mapping[item.MachineName];
- }
-
- // Update cloneof
- if (!String.IsNullOrWhiteSpace(item.CloneOf) && mapping.ContainsKey(item.CloneOf))
- {
- item.CloneOf = mapping[item.CloneOf];
- }
-
- // Update romof
- if (!String.IsNullOrWhiteSpace(item.RomOf) && mapping.ContainsKey(item.RomOf))
- {
- item.RomOf = mapping[item.RomOf];
- }
-
- // Update sampleof
- if (!String.IsNullOrWhiteSpace(item.SampleOf) && mapping.ContainsKey(item.SampleOf))
- {
- item.SampleOf = mapping[item.SampleOf];
- }
-
- // Add the new item to the output list
- newItems.Add(item);
- }
-
- // Replace the old list of roms with the new one
- Remove(key);
- AddRange(key, newItems);
- });
- }
- catch (Exception ex)
- {
- Globals.Logger.Warning(ex.ToString());
- }
- }
-
- ///
- /// Ensure that all roms are in their own game (or at least try to ensure)
- ///
- private void OneRomPerGame()
- {
- // For each rom, we want to update the game to be "/"
- Parallel.ForEach(Keys, Globals.ParallelOptions, key =>
- {
- List items = this[key];
- for (int i = 0; i < items.Count; i++)
- {
- string[] splitname = items[i].Name.Split('.');
- items[i].MachineName += "/" + string.Join(".", splitname.Take(splitname.Length > 1 ? splitname.Length - 1 : 1));
- }
- });
- }
-
- ///
- /// Remove all items marked for removal from the DAT
- ///
- private void RemoveMarkedItems()
- {
- List keys = Keys;
- foreach (string key in keys)
- {
- List items = this[key];
- List newItems = new List();
- foreach (DatItem item in items)
- {
- if (!item.Remove)
- {
- newItems.Add(item);
- }
- }
-
- Remove(key);
- AddRange(key, newItems);
- }
- }
-
- ///
- /// Strip the dates from the beginning of scene-style set names
- ///
- private void StripSceneDatesFromItems()
- {
- // Output the logging statement
- Globals.Logger.User("Stripping scene-style dates");
-
- // Set the regex pattern to use
- string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
-
- // Now process all of the roms
- List keys = Keys;
- Parallel.ForEach(keys, Globals.ParallelOptions, key =>
- {
- List items = this[key];
- for (int j = 0; j < items.Count; j++)
- {
- DatItem item = items[j];
- if (Regex.IsMatch(item.MachineName, pattern))
- {
- item.MachineName = Regex.Replace(item.MachineName, pattern, "$2");
- }
- if (Regex.IsMatch(item.MachineDescription, pattern))
- {
- item.MachineDescription = Regex.Replace(item.MachineDescription, pattern, "$2");
- }
-
- items[j] = item;
- }
-
- Remove(key);
- AddRange(key, items);
- });
- }
-
- #endregion
-
- #region Internal Merging/Splitting
-
- ///
- /// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
- ///
- /// Dedupe type to be used
- public void CreateDeviceNonMergedSets(DedupeType mergeroms)
- {
- Globals.Logger.User("Creating device non-merged sets from the DAT");
-
- // For sake of ease, the first thing we want to do is sort by game
- BucketBy(SortedBy.Game, mergeroms, norename: true);
-
- // Now we want to loop through all of the games and set the correct information
- while (AddRomsFromDevices(false, false));
- while (AddRomsFromDevices(true, false));
-
- // Then, remove the romof and cloneof tags so it's not picked up by the manager
- RemoveTagsFromChild();
- }
-
- ///
- /// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
- ///
- /// Dedupe type to be used
- public void CreateFullyNonMergedSets(DedupeType mergeroms)
- {
- Globals.Logger.User("Creating fully non-merged sets from the DAT");
-
- // For sake of ease, the first thing we want to do is sort by game
- BucketBy(SortedBy.Game, mergeroms, norename: true);
-
- // Now we want to loop through all of the games and set the correct information
- while (AddRomsFromDevices(true, true));
- AddRomsFromDevices(false, true);
- AddRomsFromParent();
-
- // Now that we have looped through the cloneof tags, we loop through the romof tags
- AddRomsFromBios();
-
- // Then, remove the romof and cloneof tags so it's not picked up by the manager
- RemoveTagsFromChild();
- }
-
- ///
- /// Use cloneof tags to create merged sets and remove the tags
- ///
- /// Dedupe type to be used
- public void CreateMergedSets(DedupeType mergeroms)
- {
- Globals.Logger.User("Creating merged sets from the DAT");
-
- // For sake of ease, the first thing we want to do is sort by game
- BucketBy(SortedBy.Game, mergeroms, norename: true);
-
- // Now we want to loop through all of the games and set the correct information
- AddRomsFromChildren();
-
- // Now that we have looped through the cloneof tags, we loop through the romof tags
- RemoveBiosRomsFromChild(false);
- RemoveBiosRomsFromChild(true);
-
- // Finally, remove the romof and cloneof tags so it's not picked up by the manager
- RemoveTagsFromChild();
- }
-
- ///
- /// Use cloneof tags to create non-merged sets and remove the tags
- ///
- /// Dedupe type to be used
- public void CreateNonMergedSets(DedupeType mergeroms)
- {
- Globals.Logger.User("Creating non-merged sets from the DAT");
-
- // For sake of ease, the first thing we want to do is sort by game
- BucketBy(SortedBy.Game, mergeroms, norename: true);
-
- // Now we want to loop through all of the games and set the correct information
- AddRomsFromParent();
-
- // Now that we have looped through the cloneof tags, we loop through the romof tags
- RemoveBiosRomsFromChild(false);
- RemoveBiosRomsFromChild(true);
-
- // Finally, remove the romof and cloneof tags so it's not picked up by the manager
- RemoveTagsFromChild();
- }
-
- ///
- /// Use cloneof and romof tags to create split sets and remove the tags
- ///
- /// Dedupe type to be used
- public void CreateSplitSets(DedupeType mergeroms)
- {
- Globals.Logger.User("Creating split sets from the DAT");
-
- // For sake of ease, the first thing we want to do is sort by game
- BucketBy(SortedBy.Game, mergeroms, norename: true);
-
- // Now we want to loop through all of the games and set the correct information
- RemoveRomsFromChild();
-
- // Now that we have looped through the cloneof tags, we loop through the romof tags
- RemoveBiosRomsFromChild(false);
- RemoveBiosRomsFromChild(true);
-
- // Finally, remove the romof and cloneof tags so it's not picked up by the manager
- RemoveTagsFromChild();
- }
-
- ///
- /// Use romof tags to add roms to the children
- ///
- private void AddRomsFromBios()
- {
- List games = Keys;
- foreach (string game in games)
- {
- // If the game has no items in it, we want to continue
- if (this[game].Count == 0)
- {
- continue;
- }
-
- // Determine if the game has a parent or not
- string parent = null;
- if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
- {
- parent = this[game][0].RomOf;
- }
-
- // If the parent doesnt exist, we want to continue
- if (String.IsNullOrWhiteSpace(parent))
- {
- continue;
- }
-
- // If the parent doesn't have any items, we want to continue
- if (this[parent].Count == 0)
- {
- continue;
- }
-
- // If the parent exists and has items, we copy the items from the parent to the current game
- DatItem copyFrom = this[game][0];
- List parentItems = this[parent];
- foreach (DatItem item in parentItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- datItem.CopyMachineInformation(copyFrom);
- if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
- {
- Add(game, datItem);
- }
- }
- }
- }
-
- ///
- /// Use device_ref and optionally slotoption tags to add roms to the children
- ///
- /// True if only child device sets are touched, false for non-device sets (default)
- /// True if slotoptions tags are used as well, false otherwise
- private bool AddRomsFromDevices(bool dev = false, bool slotoptions = false)
- {
- bool foundnew = false;
- List games = Keys;
- foreach (string game in games)
- {
- // If the game doesn't have items, we continue
- if (this[game] == null || this[game].Count == 0)
- {
- continue;
- }
-
- // If the game (is/is not) a bios, we want to continue
- if (dev ^ (this[game][0].MachineType & MachineType.Device) != 0)
- {
- continue;
- }
-
- // If the game has no devices, we continue
- if (this[game][0].Devices == null
- || this[game][0].Devices.Count == 0
- || (slotoptions && this[game][0].SlotOptions == null)
- || (slotoptions && this[game][0].SlotOptions.Count == 0))
- {
- continue;
- }
-
- // Determine if the game has any devices or not
- List devices = this[game][0].Devices;
- List newdevs = new List();
- foreach (string device in devices)
- {
- // If the device doesn't exist then we continue
- if (this[device].Count == 0)
- {
- continue;
- }
-
- // Otherwise, copy the items from the device to the current game
- DatItem copyFrom = this[game][0];
- List devItems = this[device];
- foreach (DatItem item in devItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- newdevs.AddRange(datItem.Devices ?? new List());
- datItem.CopyMachineInformation(copyFrom);
- if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
- {
- foundnew = true;
- Add(game, datItem);
- }
- }
- }
-
- // Now that every device is accounted for, add the new list of devices, if they don't already exist
- foreach (string device in newdevs)
- {
- if (!this[game][0].Devices.Contains(device))
- {
- this[game][0].Devices.Add(device);
- }
- }
-
- // If we're checking slotoptions too
- if (slotoptions)
- {
- // Determine if the game has any slotoptions or not
- List slotopts = this[game][0].SlotOptions;
- List newslotopts = new List();
- foreach (string slotopt in slotopts)
- {
- // If the slotoption doesn't exist then we continue
- if (this[slotopt].Count == 0)
- {
- continue;
- }
-
- // Otherwise, copy the items from the slotoption to the current game
- DatItem copyFrom = this[game][0];
- List slotItems = this[slotopt];
- foreach (DatItem item in slotItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- newslotopts.AddRange(datItem.SlotOptions ?? new List());
- datItem.CopyMachineInformation(copyFrom);
- if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
- {
- foundnew = true;
- Add(game, datItem);
- }
- }
- }
-
- // Now that every slotoption is accounted for, add the new list of slotoptions, if they don't already exist
- foreach (string slotopt in newslotopts)
- {
- if (!this[game][0].SlotOptions.Contains(slotopt))
- {
- this[game][0].SlotOptions.Add(slotopt);
- }
- }
- }
- }
-
- return foundnew;
- }
-
- ///
- /// Use cloneof tags to add roms to the children, setting the new romof tag in the process
- ///
- private void AddRomsFromParent()
- {
- List games = Keys;
- foreach (string game in games)
- {
- // If the game has no items in it, we want to continue
- if (this[game].Count == 0)
- {
- continue;
- }
-
- // Determine if the game has a parent or not
- string parent = null;
- if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
- {
- parent = this[game][0].CloneOf;
- }
-
- // If the parent doesnt exist, we want to continue
- if (String.IsNullOrWhiteSpace(parent))
- {
- continue;
- }
-
- // If the parent doesn't have any items, we want to continue
- if (this[parent].Count == 0)
- {
- continue;
- }
-
- // If the parent exists and has items, we copy the items from the parent to the current game
- DatItem copyFrom = this[game][0];
- List parentItems = this[parent];
- foreach (DatItem item in parentItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- datItem.CopyMachineInformation(copyFrom);
- if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0
- && !this[game].Contains(datItem))
- {
- Add(game, datItem);
- }
- }
-
- // Now we want to get the parent romof tag and put it in each of the items
- List items = this[game];
- string romof = this[parent][0].RomOf;
- foreach (DatItem item in items)
- {
- item.RomOf = romof;
- }
- }
- }
-
- ///
- /// Use cloneof tags to add roms to the parents, removing the child sets in the process
- ///
- private void AddRomsFromChildren()
- {
- List games = Keys;
- foreach (string game in games)
- {
- // If the game has no items in it, we want to continue
- if (this[game].Count == 0)
- {
- continue;
- }
-
- // Determine if the game has a parent or not
- string parent = null;
- if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
- {
- parent = this[game][0].CloneOf;
- }
-
- // If there is no parent, then we continue
- if (String.IsNullOrWhiteSpace(parent))
- {
- continue;
- }
-
- // Otherwise, move the items from the current game to a subfolder of the parent game
- DatItem copyFrom = this[parent].Count == 0 ? new Rom { MachineName = parent, MachineDescription = parent } : this[parent][0];
- List items = this[game];
- foreach (DatItem item in items)
- {
- // If the disk doesn't have a valid merge tag OR the merged file doesn't exist in the parent, then add it
- if (item.Type == ItemType.Disk && (((Disk)item).MergeTag == null || !this[parent].Select(i => i.Name).Contains(((Disk)item).MergeTag)))
- {
- item.CopyMachineInformation(copyFrom);
- Add(parent, item);
- }
-
- // Otherwise, if the parent doesn't already contain the non-disk (or a merge-equivalent), add it
- else if (item.Type != ItemType.Disk && !this[parent].Contains(item))
- {
- // Rename the child so it's in a subfolder
- item.Name = item.MachineName + "\\" + item.Name;
-
- // Update the machine to be the new parent
- item.CopyMachineInformation(copyFrom);
-
- // Add the rom to the parent set
- Add(parent, item);
- }
- }
-
- // Then, remove the old game so it's not picked up by the writer
- Remove(game);
- }
- }
-
- ///
- /// Remove all BIOS and device sets
- ///
- private void RemoveBiosAndDeviceSets()
- {
- List games = Keys;
- foreach (string game in games)
- {
- if (this[game].Count > 0
- && ((this[game][0].MachineType & MachineType.Bios) != 0
- || (this[game][0].MachineType & MachineType.Device) != 0))
- {
- Remove(game);
- }
- }
- }
-
- ///
- /// Use romof tags to remove bios roms from children
- ///
- /// True if only child Bios sets are touched, false for non-bios sets (default)
- private void RemoveBiosRomsFromChild(bool bios = false)
- {
- // Loop through the romof tags
- List games = Keys;
- foreach (string game in games)
- {
- // If the game has no items in it, we want to continue
- if (this[game].Count == 0)
- {
- continue;
- }
-
- // If the game (is/is not) a bios, we want to continue
- if (bios ^ (this[game][0].MachineType & MachineType.Bios) != 0)
- {
- continue;
- }
-
- // Determine if the game has a parent or not
- string parent = null;
- if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
- {
- parent = this[game][0].RomOf;
- }
-
- // If the parent doesnt exist, we want to continue
- if (String.IsNullOrWhiteSpace(parent))
- {
- continue;
- }
-
- // If the parent doesn't have any items, we want to continue
- if (this[parent].Count == 0)
- {
- continue;
- }
-
- // If the parent exists and has items, we remove the items that are in the parent from the current game
- List parentItems = this[parent];
- foreach (DatItem item in parentItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- while (this[game].Contains(datItem))
- {
- Remove(game, datItem);
- }
- }
- }
- }
-
- ///
- /// Use cloneof tags to remove roms from the children
- ///
- private void RemoveRomsFromChild()
- {
- List games = Keys;
- foreach (string game in games)
- {
- // If the game has no items in it, we want to continue
- if (this[game].Count == 0)
- {
- continue;
- }
-
- // Determine if the game has a parent or not
- string parent = null;
- if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
- {
- parent = this[game][0].CloneOf;
- }
-
- // If the parent doesnt exist, we want to continue
- if (String.IsNullOrWhiteSpace(parent))
- {
- continue;
- }
-
- // If the parent doesn't have any items, we want to continue
- if (this[parent].Count == 0)
- {
- continue;
- }
-
- // If the parent exists and has items, we remove the parent items from the current game
- List parentItems = this[parent];
- foreach (DatItem item in parentItems)
- {
- DatItem datItem = (DatItem)item.Clone();
- while (this[game].Contains(datItem))
- {
- Remove(game, datItem);
- }
- }
-
- // Now we want to get the parent romof tag and put it in each of the remaining items
- List items = this[game];
- string romof = this[parent][0].RomOf;
- foreach (DatItem item in items)
- {
- item.RomOf = romof;
- }
- }
- }
-
- ///
- /// Remove all romof and cloneof tags from all games
- ///
- private void RemoveTagsFromChild()
- {
- List games = Keys;
- foreach (string game in games)
- {
- List items = this[game];
- foreach (DatItem item in items)
- {
- item.CloneOf = null;
- item.RomOf = null;
- }
- }
- }
-
- #endregion
-
- #region Parsing
-
- ///
- /// Parse a DAT and return all found games and roms within
- ///
- /// Name of the file to be parsed
- /// System ID for the DAT
- /// Source ID for the DAT
- /// The DatData object representing found roms to this point
- /// True if full pathnames are to be kept, false otherwise (default)
- /// True if game names are sanitized, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True if descriptions should be used as names, false otherwise (default)
- /// True if original extension should be kept, false otherwise (default)
- /// True if tags from the DAT should be used to merge the output, false otherwise (default)
- public void Parse(string filename, int sysid, int srcid, bool keep = false, bool clean = false,
- bool remUnicode = false, bool descAsName = false, bool keepext = false, bool useTags = false)
- {
- Parse(filename, sysid, srcid, SplitType.None, keep: keep, clean: clean,
- remUnicode: remUnicode, descAsName: descAsName, keepext: keepext, useTags: useTags);
- }
-
- ///
- /// Parse a DAT and return all found games and roms within
- ///
- /// Name of the file to be parsed
- /// System ID for the DAT
- /// Source ID for the DAT>
- /// Type of the split that should be performed (split, merged, fully merged)
- /// True if full pathnames are to be kept, false otherwise (default)
- /// True if game names are sanitized, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// True if descriptions should be used as names, false otherwise (default)
- /// True if original extension should be kept, false otherwise (default)
- /// True if tags from the DAT should be used to merge the output, false otherwise (default)
- public void Parse(
- // Standard Dat parsing
- string filename,
- int sysid,
- int srcid,
-
- // Rom renaming
- SplitType splitType,
-
- // Miscellaneous
- bool keep = false,
- bool clean = false,
- bool remUnicode = false,
- bool descAsName = false,
- bool keepext = false,
- bool useTags = false)
- {
- // Check if we have a split path and get the filename accordingly
- if (filename.Contains("¬"))
- {
- filename = filename.Split('¬')[0];
- }
-
- // Check the file extension first as a safeguard
- if (!Utilities.HasValidDatExtension(filename))
- {
- return;
- }
-
- // If the output filename isn't set already, get the internal filename
- FileName = (String.IsNullOrWhiteSpace(FileName) ? (keepext ? Path.GetFileName(filename) : Path.GetFileNameWithoutExtension(filename)) : FileName);
-
- // If the output type isn't set already, get the internal output type
- DatFormat = (DatFormat == 0 ? Utilities.GetDatFormatFromFile(filename) : DatFormat);
- _sortedBy = SortedBy.CRC; // Setting this because it can reduce issues later
-
- // Now parse the correct type of DAT
- try
- {
- Utilities.GetDatFile(filename, this)?.ParseFile(filename, sysid, srcid, keep, clean, remUnicode);
- }
- catch (Exception ex)
- {
- Globals.Logger.Error("Error with file '{0}': {1}", filename, ex);
- }
-
- // If we want to use descriptions as names, update everything
- if (descAsName)
- {
- MachineDescriptionToName();
- }
-
- // If we are using tags from the DAT, set the proper input for split type unless overridden
- if (useTags && splitType == SplitType.None)
- {
- splitType = Utilities.GetSplitType(ForceMerging);
- }
-
- // Now we pre-process the DAT with the splitting/merging mode
- switch (splitType)
- {
- case SplitType.None:
- // No-op
- break;
- case SplitType.DeviceNonMerged:
- CreateDeviceNonMergedSets(DedupeType.None);
- break;
- case SplitType.FullNonMerged:
- CreateFullyNonMergedSets(DedupeType.None);
- break;
- case SplitType.NonMerged:
- CreateNonMergedSets(DedupeType.None);
- break;
- case SplitType.Merged:
- CreateMergedSets(DedupeType.None);
- break;
- case SplitType.Split:
- CreateSplitSets(DedupeType.None);
- break;
- }
-
- // Finally, we remove any blanks, if we aren't supposed to have any
- if (!KeepEmptyGames)
- {
- foreach (string key in Keys)
- {
- List items = this[key];
- List newitems = new List();
- foreach (DatItem item in items)
- {
- if (item.Type != ItemType.Blank)
- {
- newitems.Add(item);
- }
- }
-
- this.Remove(key);
- this.AddRange(key, newitems);
- }
- }
- }
-
- ///
- /// Add a rom to the Dat after checking
- ///
- /// Item data to check against
- /// True if the names should be cleaned to WoD standards, false otherwise
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// The key for the item
- public string ParseAddHelper(DatItem item, bool clean, bool remUnicode)
- {
- string key = "";
-
- // If there's no name in the rom, we log and skip it
- if (item.Name == null)
- {
- Globals.Logger.Warning("{0}: Rom with no name found! Skipping...", FileName);
- return key;
- }
-
- // If we're in cleaning mode, sanitize the game name
- item.MachineName = (clean ? Utilities.CleanGameName(item.MachineName) : item.MachineName);
-
- // If we're stripping unicode characters, do so from all relevant things
- if (remUnicode)
- {
- item.Name = Utilities.RemoveUnicodeCharacters(item.Name);
- item.MachineName = Utilities.RemoveUnicodeCharacters(item.MachineName);
- item.MachineDescription = Utilities.RemoveUnicodeCharacters(item.MachineDescription);
- }
-
- // If we have a Rom or a Disk, clean the hash data
- if (item.Type == ItemType.Rom)
- {
- Rom itemRom = (Rom)item;
-
- // Sanitize the hashes from null, hex sizes, and "true blank" strings
- itemRom.CRC = Utilities.CleanHashData(itemRom.CRC, Constants.CRCLength);
- itemRom.MD5 = Utilities.CleanHashData(itemRom.MD5, Constants.MD5Length);
- itemRom.SHA1 = Utilities.CleanHashData(itemRom.SHA1, Constants.SHA1Length);
- itemRom.SHA256 = Utilities.CleanHashData(itemRom.SHA256, Constants.SHA256Length);
- itemRom.SHA384 = Utilities.CleanHashData(itemRom.SHA384, Constants.SHA384Length);
- itemRom.SHA512 = Utilities.CleanHashData(itemRom.SHA512, Constants.SHA512Length);
-
- // If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
- if (itemRom.Size == -1
- && String.IsNullOrWhiteSpace(itemRom.CRC)
- && String.IsNullOrWhiteSpace(itemRom.MD5)
- && !String.IsNullOrWhiteSpace(itemRom.SHA1)
- && String.IsNullOrWhiteSpace(itemRom.SHA256)
- && String.IsNullOrWhiteSpace(itemRom.SHA384)
- && String.IsNullOrWhiteSpace(itemRom.SHA512))
- {
- // No-op, just catch it so it doesn't go further
- Globals.Logger.Verbose("{0}: Entry with only SHA-1 found - '{1}'", FileName, itemRom.Name);
- }
-
- // If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
- else if ((itemRom.Size == 0 || itemRom.Size == -1)
- && ((itemRom.CRC == Constants.CRCZero || String.IsNullOrWhiteSpace(itemRom.CRC))
- || itemRom.MD5 == Constants.MD5Zero
- || itemRom.SHA1 == Constants.SHA1Zero
- || itemRom.SHA256 == Constants.SHA256Zero
- || itemRom.SHA384 == Constants.SHA384Zero
- || itemRom.SHA512 == Constants.SHA512Zero))
- {
- // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- itemRom.Size = Constants.SizeZero;
- itemRom.CRC = Constants.CRCZero;
- itemRom.MD5 = Constants.MD5Zero;
- itemRom.SHA1 = Constants.SHA1Zero;
- itemRom.SHA256 = null;
- itemRom.SHA384 = null;
- itemRom.SHA512 = null;
- //itemRom.SHA256 = Constants.SHA256Zero;
- //itemRom.SHA384 = Constants.SHA384Zero;
- //itemRom.SHA512 = Constants.SHA512Zero;
- }
- // If the file has no size and it's not the above case, skip and log
- else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1))
- {
- Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
- itemRom.ItemStatus = ItemStatus.Nodump;
- }
- // If the file has a size but aboslutely no hashes, skip and log
- else if (itemRom.ItemStatus != ItemStatus.Nodump
- && itemRom.Size > 0
- && String.IsNullOrWhiteSpace(itemRom.CRC)
- && String.IsNullOrWhiteSpace(itemRom.MD5)
- && String.IsNullOrWhiteSpace(itemRom.SHA1)
- && String.IsNullOrWhiteSpace(itemRom.SHA256)
- && String.IsNullOrWhiteSpace(itemRom.SHA384)
- && String.IsNullOrWhiteSpace(itemRom.SHA512))
- {
- Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
- itemRom.ItemStatus = ItemStatus.Nodump;
- }
-
- item = itemRom;
- }
- else if (item.Type == ItemType.Disk)
- {
- Disk itemDisk = (Disk)item;
-
- // Sanitize the hashes from null, hex sizes, and "true blank" strings
- itemDisk.MD5 = Utilities.CleanHashData(itemDisk.MD5, Constants.MD5Length);
- itemDisk.SHA1 = Utilities.CleanHashData(itemDisk.SHA1, Constants.SHA1Length);
- itemDisk.SHA256 = Utilities.CleanHashData(itemDisk.SHA256, Constants.SHA256Length);
- itemDisk.SHA384 = Utilities.CleanHashData(itemDisk.SHA384, Constants.SHA384Length);
- itemDisk.SHA512 = Utilities.CleanHashData(itemDisk.SHA512, Constants.SHA512Length);
-
- // If the file has aboslutely no hashes, skip and log
- if (itemDisk.ItemStatus != ItemStatus.Nodump
- && String.IsNullOrWhiteSpace(itemDisk.MD5)
- && String.IsNullOrWhiteSpace(itemDisk.SHA1)
- && String.IsNullOrWhiteSpace(itemDisk.SHA256)
- && String.IsNullOrWhiteSpace(itemDisk.SHA384)
- && String.IsNullOrWhiteSpace(itemDisk.SHA512))
- {
- Globals.Logger.Verbose("Incomplete entry for '{0}' will be output as nodump", itemDisk.Name);
- itemDisk.ItemStatus = ItemStatus.Nodump;
- }
-
- item = itemDisk;
- }
-
- // Get the key and add the file
- key = Utilities.GetKeyFromDatItem(item, SortedBy.CRC);
- Add(key, item);
-
- return key;
- }
-
- ///
- /// Add a rom to the Dat after checking
- ///
- /// Item data to check against
- /// True if the names should be cleaned to WoD standards, false otherwise
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- /// The key for the item
- public async Task ParseAddHelperAsync(DatItem item, bool clean, bool remUnicode)
- {
- return await Task.Run(() => ParseAddHelper(item, clean, remUnicode));
- }
-
- ///
- /// Parse DatFile and return all found games and roms within
- ///
- /// Name of the file to be parsed
- /// System ID for the DAT
- /// Source ID for the DAT
- /// True if full pathnames are to be kept, false otherwise (default)
- /// True if game names are sanitized, false otherwise (default)
- /// True if we should remove non-ASCII characters from output, false otherwise (default)
- public virtual void ParseFile(
- // Standard Dat parsing
- string filename,
- int sysid,
- int srcid,
-
- // Miscellaneous
- bool keep,
- bool clean,
- bool remUnicode)
- {
- throw new NotImplementedException();
- }
-
- #endregion
-
- #region Populate DAT from Directory
-
- ///
- /// Create a new Dat from a directory
- ///
- /// Base folder to be used in creating the DAT
- /// Hash flag saying what hashes should not be calculated
- /// True if the date should be omitted from the DAT, false otherwise
- /// True if archives should be treated as files, false otherwise
- /// Type of files that should be skipped
- /// True if blank items should be created for empty folders, false otherwise
- /// True if dates should be archived for all files, false otherwise
- /// Name of the directory to create a temp folder in (blank is current directory)
- /// Output directory to
- /// True if files should be copied to the temp directory before hashing, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if CHDs should be treated like regular files, false otherwise
- public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType,
- bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
- {
- // If the description is defined but not the name, set the name from the description
- if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
- {
- Name = Description;
- }
-
- // If the name is defined but not the description, set the description from the name
- else if (!String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
- {
- Description = Name + (bare ? "" : " (" + Date + ")");
- }
-
- // If neither the name or description are defined, set them from the automatic values
- else if (String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
- {
- string[] splitpath = basePath.Split(Path.DirectorySeparatorChar);
- Name = String.IsNullOrWhiteSpace(splitpath.Last()) ? splitpath[splitpath.Length - 2] : splitpath.Last();
- Description = Name + (bare ? "" : " (" + Date + ")");
- }
-
- // Clean the temp directory path
- tempDir = Utilities.EnsureTempDirectory(tempDir);
-
- // Process the input
- if (Directory.Exists(basePath))
- {
- Globals.Logger.Verbose("Folder found: {0}", basePath);
-
- // Process the files in the main folder or any subfolder
- List files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
- Parallel.ForEach(files, Globals.ParallelOptions, item =>
- {
- CheckFileForHashes(item, basePath, omitFromScan, bare, archivesAsFiles, skipFileType,
- addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
- });
-
- // Now find all folders that are empty, if we are supposed to
- if (!Romba && addBlanks)
- {
- List empties = Utilities.GetEmptyDirectories(basePath).ToList();
- Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
- {
- // Get the full path for the directory
- string fulldir = Path.GetFullPath(dir);
-
- // Set the temporary variables
- string gamename = "";
- string romname = "";
-
- // If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
- if (Type == "SuperDAT")
- {
- gamename = fulldir.Remove(0, basePath.Length + 1);
- romname = "_";
- }
-
- // Otherwise, we want just the top level folder as the game, and the file as everything else
- else
- {
- gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
- romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
- }
-
- // Sanitize the names
- if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
- {
- gamename = gamename.Substring(1);
- }
- if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
- {
- gamename = gamename.Substring(0, gamename.Length - 1);
- }
- if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
- {
- romname = romname.Substring(1);
- }
- if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
- {
- romname = romname.Substring(0, romname.Length - 1);
- }
-
- Globals.Logger.Verbose("Adding blank empty folder: {0}", gamename);
- this["null"].Add(new Rom(romname, gamename, omitFromScan));
- });
- }
- }
- else if (File.Exists(basePath))
- {
- CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles,
- skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
- }
-
- // Now that we're done, delete the temp folder (if it's not the default)
- Globals.Logger.User("Cleaning temp folder");
- if (tempDir != Path.GetTempPath())
- {
- Utilities.TryDeleteDirectory(tempDir);
- }
-
- return true;
- }
-
- ///
- /// Check a given file for hashes, based on current settings
- ///
- /// Filename of the item to be checked
- /// Base folder to be used in creating the DAT
- /// Hash flag saying what hashes should not be calculated
- /// True if the date should be omitted from the DAT, false otherwise
- /// True if archives should be treated as files, false otherwise
- /// Type of files that should be skipped
- /// True if blank items should be created for empty folders, false otherwise
- /// True if dates should be archived for all files, false otherwise
- /// Name of the directory to create a temp folder in (blank is current directory)
- /// True if files should be copied to the temp directory before hashing, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if CHDs should be treated like regular files, false otherwise
- private void CheckFileForHashes(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
- SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
- {
- // Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
- if (Romba)
- {
- GZipArchive gzarc = new GZipArchive(item);
- BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
-
- // If the rom is valid, write it out
- if (baseFile != null && baseFile.Filename != null)
- {
- // Add the list if it doesn't exist already
- Rom rom = new Rom(baseFile);
- Add(Utilities.GetKeyFromDatItem(rom, SortedBy.CRC), rom);
- Globals.Logger.User("File added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
- }
- else
- {
- Globals.Logger.User("File not added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
- return;
- }
-
- return;
- }
-
- // If we're copying files, copy it first and get the new filename
- string newItem = item;
- string newBasePath = basePath;
- if (copyFiles)
- {
- newBasePath = Path.Combine(tempDir, Guid.NewGuid().ToString());
- newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
- Utilities.TryCreateDirectory(Path.GetDirectoryName(newItem));
- File.Copy(item, newItem, true);
- }
-
- // Initialize possible archive variables
- BaseArchive archive = Utilities.GetArchive(newItem);
- List extracted = null;
-
- // If we have an archive and we're supposed to scan it
- if (archive != null && !archivesAsFiles)
- {
- extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
- }
-
- // If the file should be skipped based on type, do so now
- if ((extracted != null && skipFileType == SkipFileType.Archive)
- || (extracted == null && skipFileType == SkipFileType.File))
- {
- return;
- }
-
- // If the extracted list is null, just scan the item itself
- if (extracted == null)
- {
- ProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst, chdsAsFiles);
- }
- // Otherwise, add all of the found items
- else
- {
- // First take care of the found items
- Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
- {
- DatItem datItem = Utilities.GetDatItem(rom);
- ProcessFileHelper(newItem,
- datItem,
- basePath,
- (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
- });
-
- // Then, if we're looking for blanks, get all of the blank folders and add them
- if (addBlanks)
- {
- List empties = new List();
-
- // Now get all blank folders from the archive
- if (archive != null)
- {
- empties = archive.GetEmptyFolders();
- }
-
- // Add add all of the found empties to the DAT
- Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
- {
- Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
- ProcessFileHelper(newItem,
- emptyRom,
- basePath,
- (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
- });
- }
- }
-
- // Cue to delete the file if it's a copy
- if (copyFiles && item != newItem)
- {
- Utilities.TryDeleteDirectory(newBasePath);
- }
- }
-
- ///
- /// Process a single file as a file
- ///
- /// File to be added
- /// Parent game to be used
- /// Path the represents the parent directory
- /// Hash flag saying what hashes should not be calculated
- /// True if dates should be archived for all files, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if CHDs should be treated like regular files, false otherwise
- private void ProcessFile(string item, string parent, string basePath, Hash omitFromScan,
- bool addDate, string headerToCheckAgainst, bool chdsAsFiles)
- {
- Globals.Logger.Verbose("'{0}' treated like a file", Path.GetFileName(item));
- BaseFile baseFile = Utilities.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
- ProcessFileHelper(item, Utilities.GetDatItem(baseFile), basePath, parent);
- }
-
- ///
- /// Process a single file as a file (with found Rom data)
- ///
- /// File to be added
- /// Rom data to be used to write to file
- /// Path the represents the parent directory
- /// Parent game to be used
- private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
- {
- // If we somehow got something other than a Rom or Disk, cancel out
- if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
- {
- return;
- }
-
- try
- {
- // If the basepath ends with a directory separator, remove it
- if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
- {
- basepath += Path.DirectorySeparatorChar.ToString();
- }
-
- // Make sure we have the full item path
- item = Path.GetFullPath(item);
-
- // Process the item to sanitize names based on input
- SetDatItemInfo(datItem, item, parent, basepath);
-
- // Add the file information to the DAT
- string key = Utilities.GetKeyFromDatItem(datItem, SortedBy.CRC);
- Add(key, datItem);
-
- Globals.Logger.User("File added: {0}", datItem.Name + Environment.NewLine);
- }
- catch (IOException ex)
- {
- Globals.Logger.Error(ex.ToString());
- return;
- }
- }
-
- ///
- /// Set proper Game and Rom names from user inputs
- ///
- /// DatItem representing the input file
- /// Item name to use
- /// Parent name to use
- /// Base path to use
- private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
- {
- // Get the data to be added as game and item names
- string gamename = "";
- string romname = "";
-
- // If the parent is blank, then we have a non-archive file
- if (String.IsNullOrWhiteSpace(parent))
- {
- // If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
- if (Type == "SuperDAT")
- {
- gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
- romname = Path.GetFileName(item);
- }
-
- // Otherwise, we want just the top level folder as the game, and the file as everything else
- else
- {
- gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
- romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
- }
- }
-
- // Otherwise, we assume that we have an archive
- else
- {
- // If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
- if (Type == "SuperDAT")
- {
- gamename = parent;
- romname = datItem.Name;
- }
-
- // Otherwise, we want the archive name as the game, and the file as everything else
- else
- {
- gamename = parent;
- romname = datItem.Name;
- }
- }
-
- // Sanitize the names
- if (romname == null)
- {
- romname = "";
- }
- if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
- {
- gamename = gamename.Substring(1);
- }
- if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
- {
- gamename = gamename.Substring(0, gamename.Length - 1);
- }
- if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
- {
- romname = romname.Substring(1);
- }
- if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
- {
- romname = romname.Substring(0, romname.Length - 1);
- }
- if (!String.IsNullOrWhiteSpace(gamename) && String.IsNullOrWhiteSpace(romname))
- {
- romname = gamename;
- gamename = "Default";
- }
-
- // Update rom information
- datItem.Name = romname;
- datItem.MachineName = gamename;
- datItem.MachineDescription = gamename;
-
- // If we have a Disk, then the ".chd" extension needs to be removed
- if (datItem.Type == ItemType.Disk)
- {
- datItem.Name = datItem.Name.Replace(".chd", "");
- }
- }
-
- #endregion
-
- #region Rebuilding and Verifying
-
- ///
- /// Process the DAT and find all matches in input files and folders assuming they're a depot
- ///
- /// List of input files/folders to check
- /// Output directory to use to build to
- /// True if the date from the DAT should be used if available, false otherwise
- /// True if input files should be deleted, false otherwise
- /// True if the DAT should be used as a filter instead of a template, false otherwise
- /// Output format that files should be written to
- /// True if files should be output in Romba depot folders, false otherwise
- /// True if the updated DAT should be output, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if rebuilding was a success, false otherwise
- public bool RebuildDepot(List inputs, string outDir, bool date, bool delete,
- bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst)
- {
- #region Perform setup
-
- // If the DAT is not populated and inverse is not set, inform the user and quit
- if (Count == 0 && !inverse)
- {
- Globals.Logger.User("No entries were found to rebuild, exiting...");
- return false;
- }
-
- // Check that the output directory exists
- outDir = Utilities.EnsureOutputDirectory(outDir, create: true);
-
- // Now we want to get forcepack flag if it's not overridden
- if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
- {
- switch (ForcePacking)
- {
- case ForcePacking.Zip:
- outputFormat = OutputFormat.TorrentZip;
- break;
- case ForcePacking.Unzip:
- outputFormat = OutputFormat.Folder;
- break;
- }
- }
-
- // Preload the Skipper list
- int listcount = Skipper.List.Count;
-
- #endregion
-
- bool success = true;
-
- #region Rebuild from depots in order
-
- string format = "";
- switch (outputFormat)
- {
- case OutputFormat.Folder:
- format = "directory";
- break;
- case OutputFormat.TapeArchive:
- format = "TAR";
- break;
- case OutputFormat.Torrent7Zip:
- format = "Torrent7Z";
- break;
- case OutputFormat.TorrentGzip:
- format = "TorrentGZ";
- break;
- case OutputFormat.TorrentLRZip:
- format = "TorrentLRZ";
- break;
- case OutputFormat.TorrentRar:
- format = "TorrentRAR";
- break;
- case OutputFormat.TorrentXZ:
- format = "TorrentXZ";
- break;
- case OutputFormat.TorrentZip:
- format = "TorrentZip";
- break;
- }
-
- InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
-
- // Now loop through and get only directories from the input paths
- List directories = new List();
- Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
- {
- // Add to the list if the input is a directory
- if (Directory.Exists(input))
- {
- Globals.Logger.Verbose("Adding depot: {0}", input);
- lock (directories)
- {
- directories.Add(input);
- }
- }
- });
-
- // If we don't have any directories, we want to exit
- if (directories.Count == 0)
- {
- return success;
- }
-
- // Now that we have a list of depots, we want to sort the input DAT by SHA-1
- BucketBy(SortedBy.SHA1, DedupeType.None);
-
- // Then we want to loop through each of the hashes and see if we can rebuild
- List hashes = Keys;
- foreach (string hash in hashes)
- {
- // Pre-empt any issues that could arise from string length
- if (hash.Length != Constants.SHA1Length)
- {
- continue;
- }
-
- Globals.Logger.User("Checking hash '{0}'", hash);
-
- // Get the extension path for the hash
- string subpath = Utilities.GetRombaPath(hash);
-
- // Find the first depot that includes the hash
- string foundpath = null;
- foreach (string directory in directories)
- {
- if (File.Exists(Path.Combine(directory, subpath)))
- {
- foundpath = Path.Combine(directory, subpath);
- break;
- }
- }
-
- // If we didn't find a path, then we continue
- if (foundpath == null)
- {
- continue;
- }
-
- // If we have a path, we want to try to get the rom information
- GZipArchive archive = new GZipArchive(foundpath);
- BaseFile fileinfo = archive.GetTorrentGZFileInfo();
-
- // If the file information is null, then we continue
- if (fileinfo == null)
- {
- continue;
- }
+ ///
+ /// Represents a format-agnostic DAT
+ ///
+ public class DatFile
+ {
+ #region Private instance variables
+
+ // Internal DatHeader values
+ internal DatHeader _datHeader = new DatHeader();
+
+ // DatItems dictionary
+ internal SortedDictionary> _items = new SortedDictionary>();
+
+ // Internal statistical data
+ internal DatStats _datStats = new DatStats();
+
+ #endregion
+
+ #region Publicly facing variables
+
+ #region Data common to most DAT types
+
+ ///
+ /// External name of the DAT
+ ///
+ public string FileName
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.FileName;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.FileName = value;
+ }
+ }
+
+ ///
+ /// Internal name of the DAT
+ ///
+ public string Name
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Name;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Name = value;
+ }
+ }
+
+ ///
+ /// DAT description
+ ///
+ public string Description
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Description;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Description = value;
+ }
+ }
+
+ ///
+ /// Root directory for the files; currently TruRip/EmuARC-exclusive
+ ///
+ public string RootDir
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.RootDir;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.RootDir = value;
+ }
+ }
+
+ ///
+ /// General category of items found in the DAT
+ ///
+ public string Category
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Category;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Category = value;
+ }
+ }
+
+ ///
+ /// Version of the DAT
+ ///
+ public string Version
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Version;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Version = value;
+ }
+ }
+
+ ///
+ /// Creation or modification date
+ ///
+ public string Date
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Date;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Date = value;
+ }
+ }
+
+ ///
+ /// List of authors who contributed to the DAT
+ ///
+ public string Author
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Author;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Author = value;
+ }
+ }
+
+ ///
+ /// Email address for DAT author(s)
+ ///
+ public string Email
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Email;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Email = value;
+ }
+ }
+
+ ///
+ /// Author or distribution homepage name
+ ///
+ public string Homepage
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Homepage;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Homepage = value;
+ }
+ }
+
+ ///
+ /// Author or distribution URL
+ ///
+ public string Url
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Url;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Url = value;
+ }
+ }
+
+ ///
+ /// Any comment that does not already fit an existing field
+ ///
+ public string Comment
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Comment;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Comment = value;
+ }
+ }
+
+ ///
+ /// Header skipper to be used when loading the DAT
+ ///
+ public string Header
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Header;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Header = value;
+ }
+ }
+
+ ///
+ /// Classification of the DAT. Generally only used for SuperDAT
+ ///
+ public string Type
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Type;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Type = value;
+ }
+ }
+
+ ///
+ /// Force a merging style when loaded
+ ///
+ public ForceMerging ForceMerging
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.ForceMerging;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.ForceMerging = value;
+ }
+ }
+
+ ///
+ /// Force nodump handling when loaded
+ ///
+ public ForceNodump ForceNodump
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.ForceNodump;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.ForceNodump = value;
+ }
+ }
+
+ ///
+ /// Force output packing when loaded
+ ///
+ public ForcePacking ForcePacking
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.ForcePacking;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.ForcePacking = value;
+ }
+ }
+
+ ///
+ /// Read or write format
+ ///
+ public DatFormat DatFormat
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.DatFormat;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.DatFormat = value;
+ }
+ }
+
+ ///
+ /// List of fields in machine and items to exclude from writing
+ ///
+ public bool[] ExcludeFields
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.ExcludeFields;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.ExcludeFields = value;
+ }
+ }
+
+ ///
+ /// Enable "One Rom, One Region (1G1R)" mode
+ ///
+ public bool OneRom
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.OneRom;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.OneRom = value;
+ }
+ }
+
+ ///
+ /// Keep machines that don't contain any items
+ ///
+ public bool KeepEmptyGames
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.KeepEmptyGames;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.KeepEmptyGames = value;
+ }
+ }
+
+ ///
+ /// Remove scene dates from the beginning of machine names
+ ///
+ public bool SceneDateStrip
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.SceneDateStrip;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.SceneDateStrip = value;
+ }
+ }
+
+ ///
+ /// Deduplicate items using the given method
+ ///
+ public DedupeType DedupeRoms
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.DedupeRoms;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.DedupeRoms = value;
+ }
+ }
+
+ ///
+ /// Strip hash types from items
+ ///
+ public Hash StripHash
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.StripHash;
+ }
+ }
+
+ ///
+ /// Determine the sorting key for all items
+ ///
+ public SortedBy SortedBy { get; private set; }
+
+ ///
+ /// Determine merging type for all items
+ ///
+ public DedupeType MergedBy { get; private set; }
+
+ #endregion
+
+ #region Write pre-processing
+
+ ///
+ /// Text to prepend to all outputted lines
+ ///
+ public string Prefix
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Prefix;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Prefix = value;
+ }
+ }
+
+ ///
+ /// Text to append to all outputted lines
+ ///
+ public string Postfix
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Postfix;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Postfix = value;
+ }
+ }
+
+ ///
+ /// Add a new extension to all items
+ ///
+ public string AddExtension
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.AddExtension;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.AddExtension = value;
+ }
+ }
+
+ ///
+ /// Replace all item extensions
+ ///
+ public string ReplaceExtension
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.ReplaceExtension;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.ReplaceExtension = value;
+ }
+ }
+
+ ///
+ /// Remove all item extensions
+ ///
+ public bool RemoveExtension
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.RemoveExtension;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.RemoveExtension = value;
+ }
+ }
+
+ ///
+ /// Romba output mode
+ ///
+ public bool Romba
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Romba;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Romba = value;
+ }
+ }
+
+ ///
+ /// Output the machine name
+ ///
+ public bool GameName
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.GameName;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.GameName = value;
+ }
+ }
+
+ ///
+ /// Wrap quotes around the entire line, sans prefix and postfix
+ ///
+ public bool Quotes
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.Quotes;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.Quotes = value;
+ }
+ }
+
+ #endregion
+
+ #region Data specific to the Miss DAT type
+
+ ///
+ /// Output the item name
+ ///
+ public bool UseRomName
+ {
+ get
+ {
+ EnsureDatHeader();
+ return _datHeader.UseRomName;
+ }
+ set
+ {
+ EnsureDatHeader();
+ _datHeader.UseRomName = value;
+ }
+ }
+
+ #endregion
+
+ #region Statistical data related to the DAT
+
+ ///
+ /// Statistics writing format
+ ///
+ public StatReportFormat ReportFormat
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.ReportFormat;
+ }
+ set
+ {
+ EnsureDatStats();
+ _datStats.ReportFormat = value;
+ }
+ }
+
+ ///
+ /// Overall item count
+ ///
+ public long Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.Count = value;
+ }
+ }
+
+ ///
+ /// Number of Archive items
+ ///
+ public long ArchiveCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.ArchiveCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.ArchiveCount = value;
+ }
+ }
+
+ ///
+ /// Number of BiosSet items
+ ///
+ public long BiosSetCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.BiosSetCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.BiosSetCount = value;
+ }
+ }
+
+ ///
+ /// Number of Disk items
+ ///
+ public long DiskCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.DiskCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.DiskCount = value;
+ }
+ }
+
+ ///
+ /// Number of Release items
+ ///
+ public long ReleaseCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.ReleaseCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.ReleaseCount = value;
+ }
+ }
+
+ ///
+ /// Number of Rom items
+ ///
+ public long RomCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.RomCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.RomCount = value;
+ }
+ }
+
+ ///
+ /// Number of Sample items
+ ///
+ public long SampleCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.SampleCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.SampleCount = value;
+ }
+ }
+
+ ///
+ /// Total uncompressed size
+ ///
+ public long TotalSize
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.TotalSize;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.TotalSize = value;
+ }
+ }
+
+ ///
+ /// Number of items with a CRC hash
+ ///
+ public long CRCCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.CRCCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.CRCCount = value;
+ }
+ }
+
+ ///
+ /// Number of items with an MD5 hash
+ ///
+ public long MD5Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.MD5Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.MD5Count = value;
+ }
+ }
+
+ ///
+ /// Number of items with a SHA-1 hash
+ ///
+ public long SHA1Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.SHA1Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.SHA1Count = value;
+ }
+ }
+
+ ///
+ /// Number of items with a SHA-256 hash
+ ///
+ public long SHA256Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.SHA256Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.SHA256Count = value;
+ }
+ }
+
+ ///
+ /// Number of items with a SHA-384 hash
+ ///
+ public long SHA384Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.SHA384Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.SHA384Count = value;
+ }
+ }
+
+ ///
+ /// Number of items with a SHA-512 hash
+ ///
+ public long SHA512Count
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.SHA512Count;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.SHA512Count = value;
+ }
+ }
+
+ ///
+ /// Number of items with the baddump status
+ ///
+ public long BaddumpCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.BaddumpCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.BaddumpCount = value;
+ }
+ }
+
+ ///
+ /// Number of items with the good status
+ ///
+ public long GoodCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.GoodCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.GoodCount = value;
+ }
+ }
+
+ ///
+ /// Number of items with the nodump status
+ ///
+ public long NodumpCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.NodumpCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.NodumpCount = value;
+ }
+ }
+
+ ///
+ /// Number of items with the verified status
+ ///
+ public long VerifiedCount
+ {
+ get
+ {
+ EnsureDatStats();
+ return _datStats.VerifiedCount;
+ }
+ private set
+ {
+ EnsureDatStats();
+ _datStats.VerifiedCount = value;
+ }
+ }
+
+ #endregion
+
+ #endregion
+
+ #region Instance Methods
+
+ #region Accessors
+
+ ///
+ /// Passthrough to access the file dictionary
+ ///
+ /// Key in the dictionary to reference
+ /// We don't want to allow direct setting of values because it bypasses the statistics
+ public List this[string key]
+ {
+ get
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ lock (_items)
+ {
+ // Ensure the key exists
+ EnsureKey(key);
+
+ // Now return the value
+ return _items[key];
+ }
+ }
+ }
+
+ ///
+ /// Add a new key to the file dictionary
+ ///
+ /// Key in the dictionary to add
+ public void Add(string key)
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ lock (_items)
+ {
+ // Ensure the key exists
+ EnsureKey(key);
+ }
+ }
+
+ ///
+ /// Add a value to the file dictionary
+ ///
+ /// Key in the dictionary to add to
+ /// Value to add to the dictionary
+ public void Add(string key, DatItem value)
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // Add the key, if necessary
+ Add(key);
+
+ lock (_items)
+ {
+ // Now add the value
+ _items[key].Add(value);
+
+ // Now update the statistics
+ _datStats.AddItem(value);
+ }
+ }
+
+ ///
+ /// Add a range of values to the file dictionary
+ ///
+ /// Key in the dictionary to add to
+ /// Value to add to the dictionary
+ public void AddRange(string key, List value)
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // Add the key, if necessary
+ Add(key);
+
+ lock (_items)
+ {
+ // Now add the value
+ _items[key].AddRange(value);
+
+ // Now update the statistics
+ foreach (DatItem item in value)
+ {
+ _datStats.AddItem(item);
+ }
+ }
+ }
+
+ ///
+ /// Get if the file dictionary contains the key
+ ///
+ /// Key in the dictionary to check
+ /// True if the key exists, false otherwise
+ public bool Contains(string key)
+ {
+ bool contains = false;
+
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // If the key is null, we return false since keys can't be null
+ if (key == null)
+ {
+ return contains;
+ }
+
+ lock (_items)
+ {
+ contains = _items.ContainsKey(key);
+ }
+
+ return contains;
+ }
+
+ ///
+ /// Get if the file dictionary contains the key and value
+ ///
+ /// Key in the dictionary to check
+ /// Value in the dictionary to check
+ /// True if the key exists, false otherwise
+ public bool Contains(string key, DatItem value)
+ {
+ bool contains = false;
+
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // If the key is null, we return false since keys can't be null
+ if (key == null)
+ {
+ return contains;
+ }
+
+ lock (_items)
+ {
+ if (_items.ContainsKey(key))
+ {
+ contains = _items[key].Contains(value);
+ }
+ }
+
+ return contains;
+ }
+
+ ///
+ /// Get the keys from the file dictionary
+ ///
+ /// List of the keys
+ public List Keys
+ {
+ get
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ lock (_items)
+ {
+ return _items.Keys.Select(item => (String)item.Clone()).ToList();
+ }
+ }
+ }
+
+ ///
+ /// Remove a key from the file dictionary if it exists
+ ///
+ /// Key in the dictionary to remove
+ public void Remove(string key)
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // If the key doesn't exist, return
+ if (!Contains(key))
+ {
+ return;
+ }
+
+ lock (_items)
+ {
+ // Remove the statistics first
+ foreach (DatItem item in _items[key])
+ {
+ _datStats.RemoveItem(item);
+ }
+
+ // Remove the key from the dictionary
+ _items.Remove(key);
+ }
+ }
+
+ ///
+ /// Remove the first instance of a value from the file dictionary if it exists
+ ///
+ /// Key in the dictionary to remove from
+ /// Value to remove from the dictionary
+ public void Remove(string key, DatItem value)
+ {
+ // Ensure the dictionary is created
+ EnsureDictionary();
+
+ // If the key and value doesn't exist, return
+ if (!Contains(key, value))
+ {
+ return;
+ }
+
+ lock (_items)
+ {
+ // Remove the statistics first
+ _datStats.RemoveItem(value);
+
+ _items[key].Remove(value);
+ }
+ }
+
+ ///
+ /// Remove a range of values from the file dictionary if they exists
+ ///
+ /// Key in the dictionary to remove from
+ /// Value to remove from the dictionary
+ public void RemoveRange(string key, List value)
+ {
+ foreach(DatItem item in value)
+ {
+ Remove(key, item);
+ }
+ }
+
+ ///
+ /// Ensure the DatHeader
+ ///
+ private void EnsureDatHeader()
+ {
+ if (_datHeader == null)
+ {
+ _datHeader = new DatHeader();
+ }
+ }
+
+ ///
+ /// Ensure the DatStats
+ ///
+ private void EnsureDatStats()
+ {
+ if (_datStats == null)
+ {
+ _datStats = new DatStats();
+ }
+ }
+
+ ///
+ /// Ensure the items dictionary
+ ///
+ private void EnsureDictionary()
+ {
+ // If the dictionary is null, create it
+ if (_items == null)
+ {
+ _items = new SortedDictionary>();
+ }
+ }
+
+ ///
+ /// Ensure the key exists in the items dictionary
+ ///
+ /// Key to ensure
+ private void EnsureKey(string key)
+ {
+ // If the key is missing from the dictionary, add it
+ if (!_items.ContainsKey(key))
+ {
+ _items.Add(key, new List());
+ }
+ }
+
+ #endregion
+
+ #region Bucketing
+
+ ///
+ /// Take the arbitrarily sorted Files Dictionary and convert to one sorted by a user-defined method
+ ///
+ /// SortedBy enum representing how to sort the individual items
+ /// Dedupe type that should be used
+ /// True if the key should be lowercased (default), false otherwise
+ /// True if games should only be compared on game and file name, false if system and source are counted
+ public void BucketBy(SortedBy bucketBy, DedupeType deduperoms, bool lower = true, bool norename = true)
+ {
+ // If we have a situation where there's no dictionary or no keys at all, we skip
+ if (_items == null || _items.Count == 0)
+ {
+ return;
+ }
+
+ // If the sorted type isn't the same, we want to sort the dictionary accordingly
+ if (this.SortedBy != bucketBy)
+ {
+ Globals.Logger.User("Organizing roms by {0}", bucketBy);
+
+ // Set the sorted type
+ this.SortedBy = bucketBy;
+
+ // Reset the merged type since this might change the merge
+ this.MergedBy = DedupeType.None;
+
+ // First do the initial sort of all of the roms inplace
+ List oldkeys = Keys;
+ for (int k = 0; k < oldkeys.Count; k++)
+ {
+ string key = oldkeys[k];
+
+ // Get the unsorted current list
+ List roms = this[key];
+
+ // Now add each of the roms to their respective keys
+ for (int i = 0; i < roms.Count; i++)
+ {
+ DatItem rom = roms[i];
+
+ // We want to get the key most appropriate for the given sorting type
+ string newkey = Utilities.GetKeyFromDatItem(rom, bucketBy, lower, norename);
+
+ // If the key is different, move the item to the new key
+ if (newkey != key)
+ {
+ Add(newkey, rom);
+ Remove(key, rom);
+ i--; // This make sure that the pointer stays on the correct since one was removed
+ }
+ }
+
+ // If the key is now empty, remove it
+ if (this[key].Count == 0)
+ {
+ Remove(key);
+ }
+ }
+ }
+
+ // If the merge type isn't the same, we want to merge the dictionary accordingly
+ if (this.MergedBy != deduperoms)
+ {
+ Globals.Logger.User("Deduping roms by {0}", deduperoms);
+
+ // Set the sorted type
+ this.MergedBy = deduperoms;
+
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ // Get the possibly unsorted list
+ List sortedlist = this[key];
+
+ // Sort the list of items to be consistent
+ DatItem.Sort(ref sortedlist, false);
+
+ // If we're merging the roms, do so
+ if (deduperoms == DedupeType.Full || (deduperoms == DedupeType.Game && bucketBy == SortedBy.Game))
+ {
+ sortedlist = DatItem.Merge(sortedlist);
+ }
+
+ // Add the list back to the dictionary
+ Remove(key);
+ AddRange(key, sortedlist);
+ });
+ }
+ // If the merge type is the same, we want to sort the dictionary to be consistent
+ else
+ {
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ // Get the possibly unsorted list
+ List sortedlist = this[key];
+
+ // Sort the list of items to be consistent
+ DatItem.Sort(ref sortedlist, false);
+ });
+ }
+
+ // Now clean up all empty keys
+ CleanEmptyKeys();
+ }
+
+ ///
+ /// Take the arbitrarily sorted Files Dictionary and convert to one sorted by the highest available hash
+ ///
+ /// Dedupe type that should be used (default none)
+ /// True if the key should be lowercased (default), false otherwise
+ /// True if games should only be compared on game and file name, false if system and source are counted
+ public void BucketByBestAvailable(DedupeType deduperoms = DedupeType.None, bool lower = true, bool norename = true)
+ {
+ // If all items are supposed to have a SHA-512, we sort by that
+ if (RomCount + DiskCount - NodumpCount == SHA512Count)
+ {
+ BucketBy(SortedBy.SHA512, deduperoms, lower, norename);
+ }
+
+ // If all items are supposed to have a SHA-384, we sort by that
+ else if (RomCount + DiskCount - NodumpCount == SHA384Count)
+ {
+ BucketBy(SortedBy.SHA384, deduperoms, lower, norename);
+ }
+
+ // If all items are supposed to have a SHA-256, we sort by that
+ else if (RomCount + DiskCount - NodumpCount == SHA256Count)
+ {
+ BucketBy(SortedBy.SHA256, deduperoms, lower, norename);
+ }
+
+ // If all items are supposed to have a SHA-1, we sort by that
+ else if (RomCount + DiskCount - NodumpCount == SHA1Count)
+ {
+ BucketBy(SortedBy.SHA1, deduperoms, lower, norename);
+ }
+
+ // If all items are supposed to have a MD5, we sort by that
+ else if (RomCount + DiskCount - NodumpCount == MD5Count)
+ {
+ BucketBy(SortedBy.MD5, deduperoms, lower, norename);
+ }
+
+ // Otherwise, we sort by CRC
+ else
+ {
+ BucketBy(SortedBy.CRC, deduperoms, lower, norename);
+ }
+ }
+
+ ///
+ /// Clean out all empty keys in the dictionary
+ ///
+ private void CleanEmptyKeys()
+ {
+ List keys = Keys;
+ foreach(string key in keys)
+ {
+ if (this[key].Count == 0)
+ {
+ Remove(key);
+ }
+ }
+ }
+
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Create a new, empty DatFile object
+ ///
+ public DatFile()
+ {
+ _items = new SortedDictionary>();
+ }
+
+ ///
+ /// Create a new DatFile from an existing one
+ ///
+ /// DatFile to get the values from
+ /// True if only the header should be cloned (default), false if this should be a reference to another DatFile
+ public DatFile(DatFile datFile, bool cloneHeader = true)
+ {
+ if (cloneHeader)
+ {
+ this._datHeader = (DatHeader)datFile._datHeader.Clone();
+ }
+ else
+ {
+ this._datHeader = datFile._datHeader;
+ this._items = datFile._items;
+ this.SortedBy = datFile.SortedBy;
+ this.MergedBy = datFile.MergedBy;
+ this._datStats = datFile._datStats;
+ }
+ }
+
+ ///
+ /// Create a new DatFile from an existing DatHeader
+ ///
+ /// DatHeader to get the values from
+ public DatFile(DatHeader datHeader)
+ {
+ _datHeader = (DatHeader)datHeader.Clone();
+ }
+
+ #endregion
+
+ #region Converting and Updating
+
+ ///
+ /// Determine if input files should be merged, diffed, or processed invidually
+ ///
+ /// Names of the input files and/or folders
+ /// Names of base files and/or folders
+ /// Optional param for output directory
+ /// Non-zero flag for diffing mode, zero otherwise
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// True if the first cascaded diff file should be skipped on output, false otherwise
+ /// True to clean the game names to WoD standard, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True to use game descriptions as the names, false otherwise (default)
+ /// Filter object to be passed to the DatItem level
+ /// Type of the split that should be performed (split, merged, fully merged)
+ /// ReplaceMode representing what should be updated [only for base replacement]
+ /// True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]
+ public void DetermineUpdateType(List inputPaths, List basePaths, string outDir, UpdateMode updateMode, bool inplace, bool skip,
+ bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
+ {
+ // Ensure we only have files in the inputs
+ List inputFileNames = Utilities.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
+ List baseFileNames = Utilities.GetOnlyFilesFromInputs(basePaths);
+
+ // If we're in standard update mode, run through all of the inputs
+ if (updateMode == UpdateMode.None)
+ {
+ Update(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
+ return;
+ }
+
+ // Reverse inputs if we're in a required mode
+ if ((updateMode & UpdateMode.DiffReverseCascade) != 0)
+ {
+ inputFileNames.Reverse();
+ }
+ if ((updateMode & UpdateMode.ReverseBaseReplace) != 0)
+ {
+ baseFileNames.Reverse();
+ }
+
+ // If we're in merging mode
+ if ((updateMode & UpdateMode.Merge) != 0)
+ {
+ // Populate the combined data and get the headers
+ List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
+ MergeNoDiff(inputFileNames, datHeaders, outDir);
+ }
+ // If we have one of the standard diffing modes
+ else if ((updateMode & UpdateMode.DiffDupesOnly) != 0
+ || (updateMode & UpdateMode.DiffNoDupesOnly) != 0
+ || (updateMode & UpdateMode.DiffIndividualsOnly) != 0)
+ {
+ // Populate the combined data
+ PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
+ DiffNoCascade(inputFileNames, outDir, filter, updateMode);
+ }
+ // If we have one of the cascaded diffing modes
+ else if ((updateMode & UpdateMode.DiffCascade) != 0
+ || (updateMode & UpdateMode.DiffReverseCascade) != 0)
+ {
+ // Populate the combined data and get the headers
+ List datHeaders = PopulateUserData(inputFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
+ DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip);
+ }
+ // If we have diff against mode
+ else if ((updateMode & UpdateMode.DiffAgainst) != 0)
+ {
+ // Populate the combined data
+ PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
+ DiffAgainst(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType);
+ }
+ // If we have one of the base replacement modes
+ else if ((updateMode & UpdateMode.BaseReplace) != 0
+ || (updateMode & UpdateMode.ReverseBaseReplace) != 0)
+ {
+ // Populate the combined data
+ PopulateUserData(baseFileNames, inplace, clean, remUnicode, descAsName, outDir, filter, splitType);
+ BaseReplace(inputFileNames, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, replaceMode, onlySame);
+ }
+
+ return;
+ }
+
+ ///
+ /// Populate the user DatData object from the input files
+ ///
+ /// Paths to DATs to parse
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// True to clean the game names to WoD standard, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True to use game descriptions as the names, false otherwise (default)
+ /// Optional param for output directory
+ /// Filter object to be passed to the DatItem level
+ /// Type of the split that should be performed (split, merged, fully merged)
+ /// List of DatData objects representing headers
+ private List PopulateUserData(List inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
+ string outDir, Filter filter, SplitType splitType)
+ {
+ DatFile[] datHeaders = new DatFile[inputs.Count];
+ InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
+
+ // Parse all of the DATs into their own DatFiles in the array
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
+ {
+ string input = inputs[i];
+ Globals.Logger.User("Adding DAT: {0}", input.Split('¬')[0]);
+ datHeaders[i] = new DatFile()
+ {
+ DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
+
+ // Filtering that needs to be copied over
+ ExcludeFields = (bool[])this.ExcludeFields.Clone(),
+ OneRom = this.OneRom,
+ KeepEmptyGames = this.KeepEmptyGames,
+ SceneDateStrip = this.SceneDateStrip,
+ DedupeRoms = this.DedupeRoms,
+ Prefix = this.Prefix,
+ Postfix = this.Postfix,
+ AddExtension = this.AddExtension,
+ ReplaceExtension = this.ReplaceExtension,
+ RemoveExtension = this.RemoveExtension,
+ Romba = this.Romba,
+ GameName = this.GameName,
+ Quotes = this.Quotes,
+ UseRomName = this.UseRomName,
+ };
+
+ datHeaders[i].Parse(input, i, i, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
+ });
+
+ watch.Stop();
+
+ watch.Start("Populating internal DAT");
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
+ {
+ // Get the list of keys from the DAT
+ List keys = datHeaders[i].Keys;
+ foreach (string key in keys)
+ {
+ // Add everything from the key to the internal DAT
+ AddRange(key, datHeaders[i][key]);
+
+ // Now remove the key from the source DAT
+ datHeaders[i].Remove(key);
+ }
+
+ // Now remove the file dictionary from the source DAT to save memory
+ datHeaders[i].DeleteDictionary();
+ });
+
+ // Now that we have a merged DAT, filter it
+ filter.FilterDatFile(this);
+
+ watch.Stop();
+
+ return datHeaders.ToList();
+ }
+
+ ///
+ /// Replace item values from the base set represented by the current DAT
+ ///
+ /// Names of the input files
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// True to clean the game names to WoD standard, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)
+ /// Filter object to be passed to the DatItem level
+ /// Type of the split that should be performed (split, merged, fully merged)
+ /// ReplaceMode representing what should be updated
+ /// True if descriptions should only be replaced if the game name is the same, false otherwise
+ public void BaseReplace(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
+ bool descAsName, Filter filter, SplitType splitType, ReplaceMode replaceMode, bool onlySame)
+ {
+ // We want to try to replace each item in each input DAT from the base
+ foreach (string path in inputFileNames)
+ {
+ Globals.Logger.User("Replacing items in '{0}' from the base DAT", path.Split('¬')[0]);
+
+ // First we parse in the DAT internally
+ DatFile intDat = new DatFile()
+ {
+ DatFormat = (this.DatFormat != 0 ? this.DatFormat : 0),
+
+ // Filtering that needs to be copied over
+ ExcludeFields = (bool[])this.ExcludeFields.Clone(),
+ OneRom = this.OneRom,
+ KeepEmptyGames = this.KeepEmptyGames,
+ SceneDateStrip = this.SceneDateStrip,
+ DedupeRoms = this.DedupeRoms,
+ Prefix = this.Prefix,
+ Postfix = this.Postfix,
+ AddExtension = this.AddExtension,
+ ReplaceExtension = this.ReplaceExtension,
+ RemoveExtension = this.RemoveExtension,
+ Romba = this.Romba,
+ GameName = this.GameName,
+ Quotes = this.Quotes,
+ UseRomName = this.UseRomName,
+ };
+
+ intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
+ filter.FilterDatFile(intDat);
+
+ // If we are matching based on hashes of any sort
+ if ((replaceMode & ReplaceMode.ItemName) != 0
+ || (replaceMode & ReplaceMode.Hash) != 0)
+ {
+ // For comparison's sake, we want to use CRC as the base ordering
+ BucketBy(SortedBy.CRC, DedupeType.Full);
+ intDat.BucketBy(SortedBy.CRC, DedupeType.None);
+
+ // Then we do a hashwise comparison against the base DAT
+ List keys = intDat.Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List datItems = intDat[key];
+ List newDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ // If we have something other than a Rom or Disk, then this doesn't do anything
+ if (datItem.Type != ItemType.Disk && datItem.Type != ItemType.Rom)
+ {
+ newDatItems.Add((DatItem)datItem.Clone());
+ continue;
+ }
+
+ List dupes = datItem.GetDuplicates(this, sorted: true);
+ DatItem newDatItem = (DatItem)datItem.Clone();
+
+ if (dupes.Count > 0)
+ {
+ // If we're updating names, replace using the first found name
+ if ((replaceMode & ReplaceMode.ItemName) != 0)
+ {
+ newDatItem.Name = dupes[0].Name;
+ }
+
+ // If we're updating hashes, only replace if the current item doesn't have them
+ if ((replaceMode & ReplaceMode.Hash) != 0)
+ {
+ if (newDatItem.Type == ItemType.Rom)
+ {
+ Rom newRomItem = (Rom)newDatItem;
+ if (String.IsNullOrEmpty(newRomItem.CRC) && !String.IsNullOrEmpty(((Rom)dupes[0]).CRC))
+ {
+ newRomItem.CRC = ((Rom)dupes[0]).CRC;
+ }
+ if (String.IsNullOrEmpty(newRomItem.MD5) && !String.IsNullOrEmpty(((Rom)dupes[0]).MD5))
+ {
+ newRomItem.MD5 = ((Rom)dupes[0]).MD5;
+ }
+ if (String.IsNullOrEmpty(newRomItem.SHA1) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA1))
+ {
+ newRomItem.SHA1 = ((Rom)dupes[0]).SHA1;
+ }
+ if (String.IsNullOrEmpty(newRomItem.SHA256) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA256))
+ {
+ newRomItem.SHA256 = ((Rom)dupes[0]).SHA256;
+ }
+ if (String.IsNullOrEmpty(newRomItem.SHA384) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA384))
+ {
+ newRomItem.SHA384 = ((Rom)dupes[0]).SHA384;
+ }
+ if (String.IsNullOrEmpty(newRomItem.SHA512) && !String.IsNullOrEmpty(((Rom)dupes[0]).SHA512))
+ {
+ newRomItem.SHA512 = ((Rom)dupes[0]).SHA512;
+ }
+
+ newDatItem = (Rom)newRomItem.Clone();
+ }
+ else if (newDatItem.Type == ItemType.Disk)
+ {
+ Disk newDiskItem = (Disk)newDatItem;
+ if (String.IsNullOrEmpty(newDiskItem.MD5) && !String.IsNullOrEmpty(((Disk)dupes[0]).MD5))
+ {
+ newDiskItem.MD5 = ((Disk)dupes[0]).MD5;
+ }
+ if (String.IsNullOrEmpty(newDiskItem.SHA1) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA1))
+ {
+ newDiskItem.SHA1 = ((Disk)dupes[0]).SHA1;
+ }
+ if (String.IsNullOrEmpty(newDiskItem.SHA256) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA256))
+ {
+ newDiskItem.SHA256 = ((Disk)dupes[0]).SHA256;
+ }
+ if (String.IsNullOrEmpty(newDiskItem.SHA384) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA384))
+ {
+ newDiskItem.SHA384 = ((Disk)dupes[0]).SHA384;
+ }
+ if (String.IsNullOrEmpty(newDiskItem.SHA512) && !String.IsNullOrEmpty(((Disk)dupes[0]).SHA512))
+ {
+ newDiskItem.SHA512 = ((Disk)dupes[0]).SHA512;
+ }
+
+ newDatItem = (Disk)newDiskItem.Clone();
+ }
+ }
+ }
+
+ newDatItems.Add(newDatItem);
+ }
+
+ // Now add the new list to the key
+ intDat.Remove(key);
+ intDat.AddRange(key, newDatItems);
+ });
+ }
+
+ // If we are matching based on names of any sort
+ if ((replaceMode & ReplaceMode.Description) != 0
+ || (replaceMode & ReplaceMode.MachineType) != 0
+ || (replaceMode & ReplaceMode.Year) != 0
+ || (replaceMode & ReplaceMode.Manufacturer) != 0
+ || (replaceMode & ReplaceMode.Parents) != 0)
+ {
+ // For comparison's sake, we want to use Machine Name as the base ordering
+ BucketBy(SortedBy.Game, DedupeType.Full);
+ intDat.BucketBy(SortedBy.Game, DedupeType.None);
+
+ // Then we do a namewise comparison against the base DAT
+ List keys = intDat.Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List datItems = intDat[key];
+ List newDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ DatItem newDatItem = (DatItem)datItem.Clone();
+ if (Contains(key) && this[key].Count() > 0)
+ {
+ if ((replaceMode & ReplaceMode.Description) != 0)
+ {
+ if (!onlySame || (onlySame && newDatItem.MachineName == newDatItem.MachineDescription))
+ {
+ newDatItem.MachineDescription = this[key][0].MachineDescription;
+ }
+ }
+ if ((replaceMode & ReplaceMode.MachineType) != 0)
+ {
+ newDatItem.MachineType = this[key][0].MachineType;
+ }
+ if ((replaceMode & ReplaceMode.Year) != 0)
+ {
+ newDatItem.Year = this[key][0].Year;
+ }
+ if ((replaceMode & ReplaceMode.Manufacturer) != 0)
+ {
+ newDatItem.Manufacturer = this[key][0].Manufacturer;
+ }
+ if ((replaceMode & ReplaceMode.Parents) != 0)
+ {
+ newDatItem.CloneOf = this[key][0].CloneOf;
+ newDatItem.RomOf = this[key][0].RomOf;
+ newDatItem.SampleOf = this[key][0].SampleOf;
+ }
+ }
+
+ newDatItems.Add(newDatItem);
+ }
+
+ // Now add the new list to the key
+ intDat.Remove(key);
+ intDat.AddRange(key, newDatItems);
+ });
+ }
+
+ // Determine the output path for the DAT
+ string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
+
+ // Once we're done, try writing out
+ intDat.Write(interOutDir, overwrite: inplace);
+
+ // Due to possible memory requirements, we force a garbage collection
+ GC.Collect();
+ }
+ }
+
+ ///
+ /// Output diffs against a base set represented by the current DAT
+ ///
+ /// Names of the input files
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// True to clean the game names to WoD standard, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True to use game descriptions as the names, false otherwise (default)
+ /// Filter object to be passed to the DatItem level
+ /// Type of the split that should be performed (split, merged, fully merged)
+ public void DiffAgainst(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode,
+ bool descAsName, Filter filter, SplitType splitType)
+ {
+ // For comparison's sake, we want to use CRC as the base ordering
+ BucketBy(SortedBy.CRC, DedupeType.Full);
+
+ // Now we want to compare each input DAT against the base
+ foreach (string path in inputFileNames)
+ {
+ Globals.Logger.User("Comparing '{0}'' to base DAT", path.Split('¬')[0]);
+
+ // First we parse in the DAT internally
+ DatFile intDat = new DatFile();
+ intDat.Parse(path, 1, 1, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName);
+
+ // For comparison's sake, we want to use CRC as the base ordering
+ intDat.BucketBy(SortedBy.CRC, DedupeType.Full);
+
+ // Then we do a hashwise comparison against the base DAT
+ List keys = intDat.Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List datItems = intDat[key];
+ List keepDatItems = new List();
+ foreach (DatItem datItem in datItems)
+ {
+ if (!datItem.HasDuplicates(this, true))
+ {
+ keepDatItems.Add(datItem);
+ }
+ }
+
+ // Now add the new list to the key
+ intDat.Remove(key);
+ intDat.AddRange(key, keepDatItems);
+ });
+
+ // Determine the output path for the DAT
+ string interOutDir = Utilities.GetOutputPath(outDir, path, inplace);
+
+ // Once we're done, try writing out
+ intDat.Write(interOutDir, overwrite: inplace);
+
+ // Due to possible memory requirements, we force a garbage collection
+ GC.Collect();
+ }
+ }
+
+ ///
+ /// Output cascading diffs
+ ///
+ /// List of inputs to write out from
+ /// Dat headers used optionally
+ /// Output directory to write the DATs to
+ /// True if cascaded diffs are outputted in-place, false otherwise
+ /// True if the first cascaded diff file should be skipped on output, false otherwise
+ public void DiffCascade(List inputs, List datHeaders, string outDir, bool inplace, bool skip)
+ {
+ // Create a list of DatData objects representing output files
+ List outDats = new List();
+
+ // Loop through each of the inputs and get or create a new DatData object
+ InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
+
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
+ {
+ string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
+ DatFile diffData;
+
+ // If we're in inplace mode or the output directory is set, take the appropriate DatData object already stored
+ if (inplace || outDir != Environment.CurrentDirectory)
+ {
+ diffData = datHeaders[j];
+ }
+ else
+ {
+ diffData = new DatFile(this);
+ diffData.FileName += innerpost;
+ diffData.Name += innerpost;
+ diffData.Description += innerpost;
+ }
+
+ diffData.ResetDictionary();
+ outDatsArray[j] = diffData;
+ });
+
+ outDats = outDatsArray.ToList();
+ watch.Stop();
+
+ // Then, ensure that the internal dat can be sorted in the best possible way
+ BucketBy(SortedBy.CRC, DedupeType.None);
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all output DATs");
+ List keys = Keys;
+
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(this[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ {
+ return;
+ }
+
+ foreach (DatItem item in items)
+ {
+ // There's odd cases where there are items with System ID < 0. Skip them for now
+ if (item.SystemID < 0)
+ {
+ Globals.Logger.Warning("Item found with a <0 SystemID: {0}", item.Name);
+ continue;
+ }
+
+ outDats[item.SystemID].Add(key, item);
+ }
+ });
+
+ watch.Stop();
+
+ // Finally, loop through and output each of the DATs
+ watch.Start("Outputting all created DATs");
+
+ Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
+ {
+ string path = Utilities.GetOutputPath(outDir, inputs[j], inplace);
+
+ // Try to output the file
+ outDats[j].Write(path, overwrite: inplace);
+ });
+
+ watch.Stop();
+ }
+
+ ///
+ /// Output non-cascading diffs
+ ///
+ /// List of inputs to write out from
+ /// Output directory to write the DATs to
+ /// Filter object to be passed to the DatItem level
+ /// Non-zero flag for diffing mode, zero otherwise
+ public void DiffNoCascade(List inputs, string outDir, Filter filter, UpdateMode diff)
+ {
+ InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
+
+ // Default vars for use
+ string post = "";
+ DatFile outerDiffData = new DatFile();
+ DatFile dupeData = new DatFile();
+
+ // Fill in any information not in the base DAT
+ if (String.IsNullOrWhiteSpace(FileName))
+ {
+ FileName = "All DATs";
+ }
+ if (String.IsNullOrWhiteSpace(Name))
+ {
+
+ Name = "All DATs";
+ }
+ if (String.IsNullOrWhiteSpace(Description))
+ {
+ Description = "All DATs";
+ }
+
+ // Don't have External dupes
+ if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
+ {
+ post = " (No Duplicates)";
+ outerDiffData = new DatFile(this);
+ outerDiffData.FileName += post;
+ outerDiffData.Name += post;
+ outerDiffData.Description += post;
+ outerDiffData.ResetDictionary();
+ }
+
+ // Have External dupes
+ if ((diff & UpdateMode.DiffDupesOnly) != 0)
+ {
+ post = " (Duplicates)";
+ dupeData = new DatFile(this);
+ dupeData.FileName += post;
+ dupeData.Name += post;
+ dupeData.Description += post;
+ dupeData.ResetDictionary();
+ }
+
+ // Create a list of DatData objects representing individual output files
+ List outDats = new List();
+
+ // Loop through each of the inputs and get or create a new DatData object
+ if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
+ {
+ DatFile[] outDatsArray = new DatFile[inputs.Count];
+
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
+ {
+ string innerpost = " (" + j + " - " + Utilities.GetFilenameFromFileAndParent(inputs[j], true) + " Only)";
+ DatFile diffData = new DatFile(this);
+ diffData.FileName += innerpost;
+ diffData.Name += innerpost;
+ diffData.Description += innerpost;
+ diffData.ResetDictionary();
+ outDatsArray[j] = diffData;
+ });
+
+ outDats = outDatsArray.ToList();
+ }
+
+ watch.Stop();
+
+ // Now, loop through the dictionary and populate the correct DATs
+ watch.Start("Populating all output DATs");
+
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = DatItem.Merge(this[key]);
+
+ // If the rom list is empty or null, just skip it
+ if (items == null || items.Count == 0)
+ {
+ return;
+ }
+
+ // Loop through and add the items correctly
+ foreach (DatItem item in items)
+ {
+ // No duplicates
+ if ((diff & UpdateMode.DiffNoDupesOnly) != 0 || (diff & UpdateMode.DiffIndividualsOnly) != 0)
+ {
+ if ((item.Dupe & DupeType.Internal) != 0 || item.Dupe == 0x00)
+ {
+ // Individual DATs that are output
+ if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
+ {
+ outDats[item.SystemID].Add(key, item);
+ }
+
+ // Merged no-duplicates DAT
+ if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
+ {
+ DatItem newrom = item.Clone() as DatItem;
+ newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
+
+ outerDiffData.Add(key, newrom);
+ }
+ }
+ }
+
+ // Duplicates only
+ if ((diff & UpdateMode.DiffDupesOnly) != 0)
+ {
+ if ((item.Dupe & DupeType.External) != 0)
+ {
+ DatItem newrom = item.Clone() as DatItem;
+ newrom.MachineName += " (" + Path.GetFileNameWithoutExtension(inputs[item.SystemID].Split('¬')[0]) + ")";
+
+ dupeData.Add(key, newrom);
+ }
+ }
+ }
+ });
+
+ watch.Stop();
+
+ // Finally, loop through and output each of the DATs
+ watch.Start("Outputting all created DATs");
+
+ // Output the difflist (a-b)+(b-a) diff
+ if ((diff & UpdateMode.DiffNoDupesOnly) != 0)
+ {
+ outerDiffData.Write(outDir, overwrite: false);
+ }
+
+ // Output the (ab) diff
+ if ((diff & UpdateMode.DiffDupesOnly) != 0)
+ {
+ dupeData.Write(outDir, overwrite: false);
+ }
+
+ // Output the individual (a-b) DATs
+ if ((diff & UpdateMode.DiffIndividualsOnly) != 0)
+ {
+ Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
+ {
+ string path = Utilities.GetOutputPath(outDir, inputs[j], false /* inplace */);
+
+ // Try to output the file
+ outDats[j].Write(path, overwrite: false);
+ });
+ }
+
+ watch.Stop();
+ }
+
+ ///
+ /// Output user defined merge
+ ///
+ /// List of inputs to write out from
+ /// Dat headers used optionally
+ /// Output directory to write the DATs to
+ public void MergeNoDiff(List inputs, List datHeaders, string outDir)
+ {
+ // If we're in SuperDAT mode, prefix all games with their respective DATs
+ if (Type == "SuperDAT")
+ {
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = this[key].ToList();
+ List newItems = new List();
+ foreach (DatItem item in items)
+ {
+ DatItem newItem = item;
+ string filename = inputs[newItem.SystemID].Split('¬')[0];
+ string rootpath = inputs[newItem.SystemID].Split('¬')[1];
+
+ rootpath += (String.IsNullOrWhiteSpace(rootpath) ? "" : Path.DirectorySeparatorChar.ToString());
+ filename = filename.Remove(0, rootpath.Length);
+ newItem.MachineName = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ + Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ + newItem.MachineName;
+
+ newItems.Add(newItem);
+ }
+
+ Remove(key);
+ AddRange(key, newItems);
+ });
+ }
+
+ // Try to output the file
+ Write(outDir, overwrite: false);
+ }
+
+ ///
+ /// Convert, update, and filter a DAT file or set of files
+ ///
+ /// Names of the input files and/or folders
+ /// Optional param for output directory
+ /// True if the output files should overwrite their inputs, false otherwise
+ /// True to clean the game names to WoD standard, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True to use game descriptions as the names, false otherwise (default)
+ /// Filter object to be passed to the DatItem level
+ /// Type of the split that should be performed (split, merged, fully merged)
+ public void Update(List inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
+ Filter filter, SplitType splitType)
+ {
+ // Iterate over the files
+ foreach (string file in inputFileNames)
+ {
+ DatFile innerDatdata = new DatFile(this);
+ Globals.Logger.User("Processing '{0}'", Path.GetFileName(file.Split('¬')[0]));
+ innerDatdata.Parse(file, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
+ keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0
+ || (innerDatdata.DatFormat & DatFormat.CSV) != 0
+ || (innerDatdata.DatFormat & DatFormat.SSV) != 0));
+ filter.FilterDatFile(innerDatdata);
+
+ // Get the correct output path
+ string realOutDir = Utilities.GetOutputPath(outDir, file, inplace);
+
+ // Try to output the file, overwriting only if it's not in the current directory
+ innerDatdata.Write(realOutDir, overwrite: inplace);
+ }
+ }
+
+ #endregion
+
+ #region Dictionary Manipulation
+
+ ///
+ /// Clones the files dictionary
+ ///
+ /// A new files dictionary instance
+ public SortedDictionary> CloneDictionary()
+ {
+ // Create the placeholder dictionary to be used
+ SortedDictionary> sorted = new SortedDictionary>();
+
+ // Now perform a deep clone on the entire dictionary
+ List keys = Keys;
+ foreach (string key in keys)
+ {
+ // Clone each list of DATs in the dictionary
+ List olditems = this[key];
+ List newitems = new List();
+ foreach (DatItem item in olditems)
+ {
+ newitems.Add((DatItem)item.Clone());
+ }
+
+ // If the key is missing from the new dictionary, add it
+ if (!sorted.ContainsKey(key))
+ {
+ sorted.Add(key, new List());
+ }
+
+ // Now add the list of items
+ sorted[key].AddRange(newitems);
+ }
+
+ return sorted;
+ }
+
+ ///
+ /// Delete the file dictionary
+ ///
+ public void DeleteDictionary()
+ {
+ _items = null;
+ this.SortedBy = SortedBy.Default;
+ this.MergedBy = DedupeType.None;
+
+ // Reset statistics
+ _datStats.Reset();
+ }
+
+ ///
+ /// Reset the file dictionary
+ ///
+ public void ResetDictionary()
+ {
+ _items = new SortedDictionary>();
+ this.SortedBy = SortedBy.Default;
+ this.MergedBy = DedupeType.None;
+
+ // Reset statistics
+ _datStats.Reset();
+ }
+
+ #endregion
+
+ #region Filtering
+
+ ///
+ /// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
+ ///
+ private void MachineDescriptionToName()
+ {
+ try
+ {
+ // First we want to get a mapping for all games to description
+ ConcurrentDictionary mapping = new ConcurrentDictionary();
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = this[key];
+ foreach (DatItem item in items)
+ {
+ // If the key mapping doesn't exist, add it
+ if (!mapping.ContainsKey(item.MachineName))
+ {
+ mapping.TryAdd(item.MachineName, item.MachineDescription.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
+ }
+ }
+ });
+
+ // Now we loop through every item and update accordingly
+ keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = this[key];
+ List newItems = new List();
+ foreach (DatItem item in items)
+ {
+ // Update machine name
+ if (!String.IsNullOrWhiteSpace(item.MachineName) && mapping.ContainsKey(item.MachineName))
+ {
+ item.MachineName = mapping[item.MachineName];
+ }
+
+ // Update cloneof
+ if (!String.IsNullOrWhiteSpace(item.CloneOf) && mapping.ContainsKey(item.CloneOf))
+ {
+ item.CloneOf = mapping[item.CloneOf];
+ }
+
+ // Update romof
+ if (!String.IsNullOrWhiteSpace(item.RomOf) && mapping.ContainsKey(item.RomOf))
+ {
+ item.RomOf = mapping[item.RomOf];
+ }
+
+ // Update sampleof
+ if (!String.IsNullOrWhiteSpace(item.SampleOf) && mapping.ContainsKey(item.SampleOf))
+ {
+ item.SampleOf = mapping[item.SampleOf];
+ }
+
+ // Add the new item to the output list
+ newItems.Add(item);
+ }
+
+ // Replace the old list of roms with the new one
+ Remove(key);
+ AddRange(key, newItems);
+ });
+ }
+ catch (Exception ex)
+ {
+ Globals.Logger.Warning(ex.ToString());
+ }
+ }
+
+ ///
+ /// Ensure that all roms are in their own game (or at least try to ensure)
+ ///
+ private void OneRomPerGame()
+ {
+ // For each rom, we want to update the game to be "/"
+ Parallel.ForEach(Keys, Globals.ParallelOptions, key =>
+ {
+ List items = this[key];
+ for (int i = 0; i < items.Count; i++)
+ {
+ string[] splitname = items[i].Name.Split('.');
+ items[i].MachineName += "/" + string.Join(".", splitname.Take(splitname.Length > 1 ? splitname.Length - 1 : 1));
+ }
+ });
+ }
+
+ ///
+ /// Remove all items marked for removal from the DAT
+ ///
+ private void RemoveMarkedItems()
+ {
+ List keys = Keys;
+ foreach (string key in keys)
+ {
+ List items = this[key];
+ List newItems = new List();
+ foreach (DatItem item in items)
+ {
+ if (!item.Remove)
+ {
+ newItems.Add(item);
+ }
+ }
+
+ Remove(key);
+ AddRange(key, newItems);
+ }
+ }
+
+ ///
+ /// Strip the dates from the beginning of scene-style set names
+ ///
+ private void StripSceneDatesFromItems()
+ {
+ // Output the logging statement
+ Globals.Logger.User("Stripping scene-style dates");
+
+ // Set the regex pattern to use
+ string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
+
+ // Now process all of the roms
+ List keys = Keys;
+ Parallel.ForEach(keys, Globals.ParallelOptions, key =>
+ {
+ List items = this[key];
+ for (int j = 0; j < items.Count; j++)
+ {
+ DatItem item = items[j];
+ if (Regex.IsMatch(item.MachineName, pattern))
+ {
+ item.MachineName = Regex.Replace(item.MachineName, pattern, "$2");
+ }
+ if (Regex.IsMatch(item.MachineDescription, pattern))
+ {
+ item.MachineDescription = Regex.Replace(item.MachineDescription, pattern, "$2");
+ }
+
+ items[j] = item;
+ }
+
+ Remove(key);
+ AddRange(key, items);
+ });
+ }
+
+ #endregion
+
+ #region Internal Merging/Splitting
+
+ ///
+ /// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
+ ///
+ /// Dedupe type to be used
+ public void CreateDeviceNonMergedSets(DedupeType mergeroms)
+ {
+ Globals.Logger.User("Creating device non-merged sets from the DAT");
+
+ // For sake of ease, the first thing we want to do is sort by game
+ BucketBy(SortedBy.Game, mergeroms, norename: true);
+
+ // Now we want to loop through all of the games and set the correct information
+ while (AddRomsFromDevices(false, false));
+ while (AddRomsFromDevices(true, false));
+
+ // Then, remove the romof and cloneof tags so it's not picked up by the manager
+ RemoveTagsFromChild();
+ }
+
+ ///
+ /// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
+ ///
+ /// Dedupe type to be used
+ public void CreateFullyNonMergedSets(DedupeType mergeroms)
+ {
+ Globals.Logger.User("Creating fully non-merged sets from the DAT");
+
+ // For sake of ease, the first thing we want to do is sort by game
+ BucketBy(SortedBy.Game, mergeroms, norename: true);
+
+ // Now we want to loop through all of the games and set the correct information
+ while (AddRomsFromDevices(true, true));
+ AddRomsFromDevices(false, true);
+ AddRomsFromParent();
+
+ // Now that we have looped through the cloneof tags, we loop through the romof tags
+ AddRomsFromBios();
+
+ // Then, remove the romof and cloneof tags so it's not picked up by the manager
+ RemoveTagsFromChild();
+ }
+
+ ///
+ /// Use cloneof tags to create merged sets and remove the tags
+ ///
+ /// Dedupe type to be used
+ public void CreateMergedSets(DedupeType mergeroms)
+ {
+ Globals.Logger.User("Creating merged sets from the DAT");
+
+ // For sake of ease, the first thing we want to do is sort by game
+ BucketBy(SortedBy.Game, mergeroms, norename: true);
+
+ // Now we want to loop through all of the games and set the correct information
+ AddRomsFromChildren();
+
+ // Now that we have looped through the cloneof tags, we loop through the romof tags
+ RemoveBiosRomsFromChild(false);
+ RemoveBiosRomsFromChild(true);
+
+ // Finally, remove the romof and cloneof tags so it's not picked up by the manager
+ RemoveTagsFromChild();
+ }
+
+ ///
+ /// Use cloneof tags to create non-merged sets and remove the tags
+ ///
+ /// Dedupe type to be used
+ public void CreateNonMergedSets(DedupeType mergeroms)
+ {
+ Globals.Logger.User("Creating non-merged sets from the DAT");
+
+ // For sake of ease, the first thing we want to do is sort by game
+ BucketBy(SortedBy.Game, mergeroms, norename: true);
+
+ // Now we want to loop through all of the games and set the correct information
+ AddRomsFromParent();
+
+ // Now that we have looped through the cloneof tags, we loop through the romof tags
+ RemoveBiosRomsFromChild(false);
+ RemoveBiosRomsFromChild(true);
+
+ // Finally, remove the romof and cloneof tags so it's not picked up by the manager
+ RemoveTagsFromChild();
+ }
+
+ ///
+ /// Use cloneof and romof tags to create split sets and remove the tags
+ ///
+ /// Dedupe type to be used
+ public void CreateSplitSets(DedupeType mergeroms)
+ {
+ Globals.Logger.User("Creating split sets from the DAT");
+
+ // For sake of ease, the first thing we want to do is sort by game
+ BucketBy(SortedBy.Game, mergeroms, norename: true);
+
+ // Now we want to loop through all of the games and set the correct information
+ RemoveRomsFromChild();
+
+ // Now that we have looped through the cloneof tags, we loop through the romof tags
+ RemoveBiosRomsFromChild(false);
+ RemoveBiosRomsFromChild(true);
+
+ // Finally, remove the romof and cloneof tags so it's not picked up by the manager
+ RemoveTagsFromChild();
+ }
+
+ ///
+ /// Use romof tags to add roms to the children
+ ///
+ private void AddRomsFromBios()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game has no items in it, we want to continue
+ if (this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // Determine if the game has a parent or not
+ string parent = null;
+ if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
+ {
+ parent = this[game][0].RomOf;
+ }
+
+ // If the parent doesnt exist, we want to continue
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ continue;
+ }
+
+ // If the parent doesn't have any items, we want to continue
+ if (this[parent].Count == 0)
+ {
+ continue;
+ }
+
+ // If the parent exists and has items, we copy the items from the parent to the current game
+ DatItem copyFrom = this[game][0];
+ List parentItems = this[parent];
+ foreach (DatItem item in parentItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ datItem.CopyMachineInformation(copyFrom);
+ if (this[game].Where(i => i.Name == datItem.Name).Count() == 0 && !this[game].Contains(datItem))
+ {
+ Add(game, datItem);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Use device_ref and optionally slotoption tags to add roms to the children
+ ///
+ /// True if only child device sets are touched, false for non-device sets (default)
+ /// True if slotoptions tags are used as well, false otherwise
+ private bool AddRomsFromDevices(bool dev = false, bool slotoptions = false)
+ {
+ bool foundnew = false;
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game doesn't have items, we continue
+ if (this[game] == null || this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // If the game (is/is not) a bios, we want to continue
+ if (dev ^ (this[game][0].MachineType & MachineType.Device) != 0)
+ {
+ continue;
+ }
+
+ // If the game has no devices, we continue
+ if (this[game][0].Devices == null
+ || this[game][0].Devices.Count == 0
+ || (slotoptions && this[game][0].SlotOptions == null)
+ || (slotoptions && this[game][0].SlotOptions.Count == 0))
+ {
+ continue;
+ }
+
+ // Determine if the game has any devices or not
+ List devices = this[game][0].Devices;
+ List newdevs = new List();
+ foreach (string device in devices)
+ {
+ // If the device doesn't exist then we continue
+ if (this[device].Count == 0)
+ {
+ continue;
+ }
+
+ // Otherwise, copy the items from the device to the current game
+ DatItem copyFrom = this[game][0];
+ List devItems = this[device];
+ foreach (DatItem item in devItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ newdevs.AddRange(datItem.Devices ?? new List());
+ datItem.CopyMachineInformation(copyFrom);
+ if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
+ {
+ foundnew = true;
+ Add(game, datItem);
+ }
+ }
+ }
+
+ // Now that every device is accounted for, add the new list of devices, if they don't already exist
+ foreach (string device in newdevs)
+ {
+ if (!this[game][0].Devices.Contains(device))
+ {
+ this[game][0].Devices.Add(device);
+ }
+ }
+
+ // If we're checking slotoptions too
+ if (slotoptions)
+ {
+ // Determine if the game has any slotoptions or not
+ List slotopts = this[game][0].SlotOptions;
+ List newslotopts = new List();
+ foreach (string slotopt in slotopts)
+ {
+ // If the slotoption doesn't exist then we continue
+ if (this[slotopt].Count == 0)
+ {
+ continue;
+ }
+
+ // Otherwise, copy the items from the slotoption to the current game
+ DatItem copyFrom = this[game][0];
+ List slotItems = this[slotopt];
+ foreach (DatItem item in slotItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ newslotopts.AddRange(datItem.SlotOptions ?? new List());
+ datItem.CopyMachineInformation(copyFrom);
+ if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
+ {
+ foundnew = true;
+ Add(game, datItem);
+ }
+ }
+ }
+
+ // Now that every slotoption is accounted for, add the new list of slotoptions, if they don't already exist
+ foreach (string slotopt in newslotopts)
+ {
+ if (!this[game][0].SlotOptions.Contains(slotopt))
+ {
+ this[game][0].SlotOptions.Add(slotopt);
+ }
+ }
+ }
+ }
+
+ return foundnew;
+ }
+
+ ///
+ /// Use cloneof tags to add roms to the children, setting the new romof tag in the process
+ ///
+ private void AddRomsFromParent()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game has no items in it, we want to continue
+ if (this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // Determine if the game has a parent or not
+ string parent = null;
+ if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
+ {
+ parent = this[game][0].CloneOf;
+ }
+
+ // If the parent doesnt exist, we want to continue
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ continue;
+ }
+
+ // If the parent doesn't have any items, we want to continue
+ if (this[parent].Count == 0)
+ {
+ continue;
+ }
+
+ // If the parent exists and has items, we copy the items from the parent to the current game
+ DatItem copyFrom = this[game][0];
+ List parentItems = this[parent];
+ foreach (DatItem item in parentItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ datItem.CopyMachineInformation(copyFrom);
+ if (this[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0
+ && !this[game].Contains(datItem))
+ {
+ Add(game, datItem);
+ }
+ }
+
+ // Now we want to get the parent romof tag and put it in each of the items
+ List items = this[game];
+ string romof = this[parent][0].RomOf;
+ foreach (DatItem item in items)
+ {
+ item.RomOf = romof;
+ }
+ }
+ }
+
+ ///
+ /// Use cloneof tags to add roms to the parents, removing the child sets in the process
+ ///
+ private void AddRomsFromChildren()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game has no items in it, we want to continue
+ if (this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // Determine if the game has a parent or not
+ string parent = null;
+ if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
+ {
+ parent = this[game][0].CloneOf;
+ }
+
+ // If there is no parent, then we continue
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ continue;
+ }
+
+ // Otherwise, move the items from the current game to a subfolder of the parent game
+ DatItem copyFrom = this[parent].Count == 0 ? new Rom { MachineName = parent, MachineDescription = parent } : this[parent][0];
+ List items = this[game];
+ foreach (DatItem item in items)
+ {
+ // If the disk doesn't have a valid merge tag OR the merged file doesn't exist in the parent, then add it
+ if (item.Type == ItemType.Disk && (((Disk)item).MergeTag == null || !this[parent].Select(i => i.Name).Contains(((Disk)item).MergeTag)))
+ {
+ item.CopyMachineInformation(copyFrom);
+ Add(parent, item);
+ }
+
+ // Otherwise, if the parent doesn't already contain the non-disk (or a merge-equivalent), add it
+ else if (item.Type != ItemType.Disk && !this[parent].Contains(item))
+ {
+ // Rename the child so it's in a subfolder
+ item.Name = item.MachineName + "\\" + item.Name;
+
+ // Update the machine to be the new parent
+ item.CopyMachineInformation(copyFrom);
+
+ // Add the rom to the parent set
+ Add(parent, item);
+ }
+ }
+
+ // Then, remove the old game so it's not picked up by the writer
+ Remove(game);
+ }
+ }
+
+ ///
+ /// Remove all BIOS and device sets
+ ///
+ private void RemoveBiosAndDeviceSets()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ if (this[game].Count > 0
+ && ((this[game][0].MachineType & MachineType.Bios) != 0
+ || (this[game][0].MachineType & MachineType.Device) != 0))
+ {
+ Remove(game);
+ }
+ }
+ }
+
+ ///
+ /// Use romof tags to remove bios roms from children
+ ///
+ /// True if only child Bios sets are touched, false for non-bios sets (default)
+ private void RemoveBiosRomsFromChild(bool bios = false)
+ {
+ // Loop through the romof tags
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game has no items in it, we want to continue
+ if (this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // If the game (is/is not) a bios, we want to continue
+ if (bios ^ (this[game][0].MachineType & MachineType.Bios) != 0)
+ {
+ continue;
+ }
+
+ // Determine if the game has a parent or not
+ string parent = null;
+ if (!String.IsNullOrWhiteSpace(this[game][0].RomOf))
+ {
+ parent = this[game][0].RomOf;
+ }
+
+ // If the parent doesnt exist, we want to continue
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ continue;
+ }
+
+ // If the parent doesn't have any items, we want to continue
+ if (this[parent].Count == 0)
+ {
+ continue;
+ }
+
+ // If the parent exists and has items, we remove the items that are in the parent from the current game
+ List parentItems = this[parent];
+ foreach (DatItem item in parentItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ while (this[game].Contains(datItem))
+ {
+ Remove(game, datItem);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Use cloneof tags to remove roms from the children
+ ///
+ private void RemoveRomsFromChild()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ // If the game has no items in it, we want to continue
+ if (this[game].Count == 0)
+ {
+ continue;
+ }
+
+ // Determine if the game has a parent or not
+ string parent = null;
+ if (!String.IsNullOrWhiteSpace(this[game][0].CloneOf))
+ {
+ parent = this[game][0].CloneOf;
+ }
+
+ // If the parent doesnt exist, we want to continue
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ continue;
+ }
+
+ // If the parent doesn't have any items, we want to continue
+ if (this[parent].Count == 0)
+ {
+ continue;
+ }
+
+ // If the parent exists and has items, we remove the parent items from the current game
+ List parentItems = this[parent];
+ foreach (DatItem item in parentItems)
+ {
+ DatItem datItem = (DatItem)item.Clone();
+ while (this[game].Contains(datItem))
+ {
+ Remove(game, datItem);
+ }
+ }
+
+ // Now we want to get the parent romof tag and put it in each of the remaining items
+ List items = this[game];
+ string romof = this[parent][0].RomOf;
+ foreach (DatItem item in items)
+ {
+ item.RomOf = romof;
+ }
+ }
+ }
+
+ ///
+ /// Remove all romof and cloneof tags from all games
+ ///
+ private void RemoveTagsFromChild()
+ {
+ List games = Keys;
+ foreach (string game in games)
+ {
+ List items = this[game];
+ foreach (DatItem item in items)
+ {
+ item.CloneOf = null;
+ item.RomOf = null;
+ }
+ }
+ }
+
+ #endregion
+
+ #region Parsing
+
+ ///
+ /// Parse a DAT and return all found games and roms within
+ ///
+ /// Name of the file to be parsed
+ /// System ID for the DAT
+ /// Source ID for the DAT
+ /// The DatData object representing found roms to this point
+ /// True if full pathnames are to be kept, false otherwise (default)
+ /// True if game names are sanitized, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True if descriptions should be used as names, false otherwise (default)
+ /// True if original extension should be kept, false otherwise (default)
+ /// True if tags from the DAT should be used to merge the output, false otherwise (default)
+ public void Parse(string filename, int sysid, int srcid, bool keep = false, bool clean = false,
+ bool remUnicode = false, bool descAsName = false, bool keepext = false, bool useTags = false)
+ {
+ Parse(filename, sysid, srcid, SplitType.None, keep: keep, clean: clean,
+ remUnicode: remUnicode, descAsName: descAsName, keepext: keepext, useTags: useTags);
+ }
+
+ ///
+ /// Parse a DAT and return all found games and roms within
+ ///
+ /// Name of the file to be parsed
+ /// System ID for the DAT
+ /// Source ID for the DAT>
+ /// Type of the split that should be performed (split, merged, fully merged)
+ /// True if full pathnames are to be kept, false otherwise (default)
+ /// True if game names are sanitized, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// True if descriptions should be used as names, false otherwise (default)
+ /// True if original extension should be kept, false otherwise (default)
+ /// True if tags from the DAT should be used to merge the output, false otherwise (default)
+ public void Parse(
+ // Standard Dat parsing
+ string filename,
+ int sysid,
+ int srcid,
+
+ // Rom renaming
+ SplitType splitType,
+
+ // Miscellaneous
+ bool keep = false,
+ bool clean = false,
+ bool remUnicode = false,
+ bool descAsName = false,
+ bool keepext = false,
+ bool useTags = false)
+ {
+ // Check if we have a split path and get the filename accordingly
+ if (filename.Contains("¬"))
+ {
+ filename = filename.Split('¬')[0];
+ }
+
+ // Check the file extension first as a safeguard
+ if (!Utilities.HasValidDatExtension(filename))
+ {
+ return;
+ }
+
+ // If the output filename isn't set already, get the internal filename
+ FileName = (String.IsNullOrWhiteSpace(FileName) ? (keepext ? Path.GetFileName(filename) : Path.GetFileNameWithoutExtension(filename)) : FileName);
+
+ // If the output type isn't set already, get the internal output type
+ DatFormat = (DatFormat == 0 ? Utilities.GetDatFormatFromFile(filename) : DatFormat);
+ this.SortedBy = SortedBy.CRC; // Setting this because it can reduce issues later
+
+ // Now parse the correct type of DAT
+ try
+ {
+ Utilities.GetDatFile(filename, this)?.ParseFile(filename, sysid, srcid, keep, clean, remUnicode);
+ }
+ catch (Exception ex)
+ {
+ Globals.Logger.Error("Error with file '{0}': {1}", filename, ex);
+ }
+
+ // If we want to use descriptions as names, update everything
+ if (descAsName)
+ {
+ MachineDescriptionToName();
+ }
+
+ // If we are using tags from the DAT, set the proper input for split type unless overridden
+ if (useTags && splitType == SplitType.None)
+ {
+ splitType = Utilities.GetSplitType(ForceMerging);
+ }
+
+ // Now we pre-process the DAT with the splitting/merging mode
+ switch (splitType)
+ {
+ case SplitType.None:
+ // No-op
+ break;
+ case SplitType.DeviceNonMerged:
+ CreateDeviceNonMergedSets(DedupeType.None);
+ break;
+ case SplitType.FullNonMerged:
+ CreateFullyNonMergedSets(DedupeType.None);
+ break;
+ case SplitType.NonMerged:
+ CreateNonMergedSets(DedupeType.None);
+ break;
+ case SplitType.Merged:
+ CreateMergedSets(DedupeType.None);
+ break;
+ case SplitType.Split:
+ CreateSplitSets(DedupeType.None);
+ break;
+ }
+
+ // Finally, we remove any blanks, if we aren't supposed to have any
+ if (!KeepEmptyGames)
+ {
+ foreach (string key in Keys)
+ {
+ List items = this[key];
+ List newitems = new List();
+ foreach (DatItem item in items)
+ {
+ if (item.Type != ItemType.Blank)
+ {
+ newitems.Add(item);
+ }
+ }
+
+ this.Remove(key);
+ this.AddRange(key, newitems);
+ }
+ }
+ }
+
+ ///
+ /// Add a rom to the Dat after checking
+ ///
+ /// Item data to check against
+ /// True if the names should be cleaned to WoD standards, false otherwise
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// The key for the item
+ public string ParseAddHelper(DatItem item, bool clean, bool remUnicode)
+ {
+ string key = "";
+
+ // If there's no name in the rom, we log and skip it
+ if (item.Name == null)
+ {
+ Globals.Logger.Warning("{0}: Rom with no name found! Skipping...", FileName);
+ return key;
+ }
+
+ // If we're in cleaning mode, sanitize the game name
+ item.MachineName = (clean ? Utilities.CleanGameName(item.MachineName) : item.MachineName);
+
+ // If we're stripping unicode characters, do so from all relevant things
+ if (remUnicode)
+ {
+ item.Name = Utilities.RemoveUnicodeCharacters(item.Name);
+ item.MachineName = Utilities.RemoveUnicodeCharacters(item.MachineName);
+ item.MachineDescription = Utilities.RemoveUnicodeCharacters(item.MachineDescription);
+ }
+
+ // If we have a Rom or a Disk, clean the hash data
+ if (item.Type == ItemType.Rom)
+ {
+ Rom itemRom = (Rom)item;
+
+ // Sanitize the hashes from null, hex sizes, and "true blank" strings
+ itemRom.CRC = Utilities.CleanHashData(itemRom.CRC, Constants.CRCLength);
+ itemRom.MD5 = Utilities.CleanHashData(itemRom.MD5, Constants.MD5Length);
+ itemRom.SHA1 = Utilities.CleanHashData(itemRom.SHA1, Constants.SHA1Length);
+ itemRom.SHA256 = Utilities.CleanHashData(itemRom.SHA256, Constants.SHA256Length);
+ itemRom.SHA384 = Utilities.CleanHashData(itemRom.SHA384, Constants.SHA384Length);
+ itemRom.SHA512 = Utilities.CleanHashData(itemRom.SHA512, Constants.SHA512Length);
+
+ // If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
+ if (itemRom.Size == -1
+ && String.IsNullOrWhiteSpace(itemRom.CRC)
+ && String.IsNullOrWhiteSpace(itemRom.MD5)
+ && !String.IsNullOrWhiteSpace(itemRom.SHA1)
+ && String.IsNullOrWhiteSpace(itemRom.SHA256)
+ && String.IsNullOrWhiteSpace(itemRom.SHA384)
+ && String.IsNullOrWhiteSpace(itemRom.SHA512))
+ {
+ // No-op, just catch it so it doesn't go further
+ Globals.Logger.Verbose("{0}: Entry with only SHA-1 found - '{1}'", FileName, itemRom.Name);
+ }
+
+ // If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
+ else if ((itemRom.Size == 0 || itemRom.Size == -1)
+ && ((itemRom.CRC == Constants.CRCZero || String.IsNullOrWhiteSpace(itemRom.CRC))
+ || itemRom.MD5 == Constants.MD5Zero
+ || itemRom.SHA1 == Constants.SHA1Zero
+ || itemRom.SHA256 == Constants.SHA256Zero
+ || itemRom.SHA384 == Constants.SHA384Zero
+ || itemRom.SHA512 == Constants.SHA512Zero))
+ {
+ // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
+ itemRom.Size = Constants.SizeZero;
+ itemRom.CRC = Constants.CRCZero;
+ itemRom.MD5 = Constants.MD5Zero;
+ itemRom.SHA1 = Constants.SHA1Zero;
+ itemRom.SHA256 = null;
+ itemRom.SHA384 = null;
+ itemRom.SHA512 = null;
+ //itemRom.SHA256 = Constants.SHA256Zero;
+ //itemRom.SHA384 = Constants.SHA384Zero;
+ //itemRom.SHA512 = Constants.SHA512Zero;
+ }
+ // If the file has no size and it's not the above case, skip and log
+ else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1))
+ {
+ Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
+ itemRom.ItemStatus = ItemStatus.Nodump;
+ }
+ // If the file has a size but aboslutely no hashes, skip and log
+ else if (itemRom.ItemStatus != ItemStatus.Nodump
+ && itemRom.Size > 0
+ && String.IsNullOrWhiteSpace(itemRom.CRC)
+ && String.IsNullOrWhiteSpace(itemRom.MD5)
+ && String.IsNullOrWhiteSpace(itemRom.SHA1)
+ && String.IsNullOrWhiteSpace(itemRom.SHA256)
+ && String.IsNullOrWhiteSpace(itemRom.SHA384)
+ && String.IsNullOrWhiteSpace(itemRom.SHA512))
+ {
+ Globals.Logger.Verbose("{0}: Incomplete entry for '{1}' will be output as nodump", FileName, itemRom.Name);
+ itemRom.ItemStatus = ItemStatus.Nodump;
+ }
+
+ item = itemRom;
+ }
+ else if (item.Type == ItemType.Disk)
+ {
+ Disk itemDisk = (Disk)item;
+
+ // Sanitize the hashes from null, hex sizes, and "true blank" strings
+ itemDisk.MD5 = Utilities.CleanHashData(itemDisk.MD5, Constants.MD5Length);
+ itemDisk.SHA1 = Utilities.CleanHashData(itemDisk.SHA1, Constants.SHA1Length);
+ itemDisk.SHA256 = Utilities.CleanHashData(itemDisk.SHA256, Constants.SHA256Length);
+ itemDisk.SHA384 = Utilities.CleanHashData(itemDisk.SHA384, Constants.SHA384Length);
+ itemDisk.SHA512 = Utilities.CleanHashData(itemDisk.SHA512, Constants.SHA512Length);
+
+ // If the file has aboslutely no hashes, skip and log
+ if (itemDisk.ItemStatus != ItemStatus.Nodump
+ && String.IsNullOrWhiteSpace(itemDisk.MD5)
+ && String.IsNullOrWhiteSpace(itemDisk.SHA1)
+ && String.IsNullOrWhiteSpace(itemDisk.SHA256)
+ && String.IsNullOrWhiteSpace(itemDisk.SHA384)
+ && String.IsNullOrWhiteSpace(itemDisk.SHA512))
+ {
+ Globals.Logger.Verbose("Incomplete entry for '{0}' will be output as nodump", itemDisk.Name);
+ itemDisk.ItemStatus = ItemStatus.Nodump;
+ }
+
+ item = itemDisk;
+ }
+
+ // Get the key and add the file
+ key = Utilities.GetKeyFromDatItem(item, SortedBy.CRC);
+ Add(key, item);
+
+ return key;
+ }
+
+ ///
+ /// Add a rom to the Dat after checking
+ ///
+ /// Item data to check against
+ /// True if the names should be cleaned to WoD standards, false otherwise
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ /// The key for the item
+ public async Task ParseAddHelperAsync(DatItem item, bool clean, bool remUnicode)
+ {
+ return await Task.Run(() => ParseAddHelper(item, clean, remUnicode));
+ }
+
+ ///
+ /// Parse DatFile and return all found games and roms within
+ ///
+ /// Name of the file to be parsed
+ /// System ID for the DAT
+ /// Source ID for the DAT
+ /// True if full pathnames are to be kept, false otherwise (default)
+ /// True if game names are sanitized, false otherwise (default)
+ /// True if we should remove non-ASCII characters from output, false otherwise (default)
+ public virtual void ParseFile(
+ // Standard Dat parsing
+ string filename,
+ int sysid,
+ int srcid,
+
+ // Miscellaneous
+ bool keep,
+ bool clean,
+ bool remUnicode)
+ {
+ throw new NotImplementedException();
+ }
+
+ #endregion
+
+ #region Populate DAT from Directory
+
+ ///
+ /// Create a new Dat from a directory
+ ///
+ /// Base folder to be used in creating the DAT
+ /// Hash flag saying what hashes should not be calculated
+ /// True if the date should be omitted from the DAT, false otherwise
+ /// True if archives should be treated as files, false otherwise
+ /// Type of files that should be skipped
+ /// True if blank items should be created for empty folders, false otherwise
+ /// True if dates should be archived for all files, false otherwise
+ /// Name of the directory to create a temp folder in (blank is current directory)
+ /// Output directory to
+ /// True if files should be copied to the temp directory before hashing, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if CHDs should be treated like regular files, false otherwise
+ public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, SkipFileType skipFileType,
+ bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
+ {
+ // If the description is defined but not the name, set the name from the description
+ if (String.IsNullOrWhiteSpace(Name) && !String.IsNullOrWhiteSpace(Description))
+ {
+ Name = Description;
+ }
+
+ // If the name is defined but not the description, set the description from the name
+ else if (!String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
+ {
+ Description = Name + (bare ? "" : " (" + Date + ")");
+ }
+
+ // If neither the name or description are defined, set them from the automatic values
+ else if (String.IsNullOrWhiteSpace(Name) && String.IsNullOrWhiteSpace(Description))
+ {
+ string[] splitpath = basePath.Split(Path.DirectorySeparatorChar);
+ Name = String.IsNullOrWhiteSpace(splitpath.Last()) ? splitpath[splitpath.Length - 2] : splitpath.Last();
+ Description = Name + (bare ? "" : " (" + Date + ")");
+ }
+
+ // Clean the temp directory path
+ tempDir = Utilities.EnsureTempDirectory(tempDir);
+
+ // Process the input
+ if (Directory.Exists(basePath))
+ {
+ Globals.Logger.Verbose("Folder found: {0}", basePath);
+
+ // Process the files in the main folder or any subfolder
+ List files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
+ Parallel.ForEach(files, Globals.ParallelOptions, item =>
+ {
+ CheckFileForHashes(item, basePath, omitFromScan, bare, archivesAsFiles, skipFileType,
+ addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
+ });
+
+ // Now find all folders that are empty, if we are supposed to
+ if (!Romba && addBlanks)
+ {
+ List empties = Utilities.GetEmptyDirectories(basePath).ToList();
+ Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
+ {
+ // Get the full path for the directory
+ string fulldir = Path.GetFullPath(dir);
+
+ // Set the temporary variables
+ string gamename = "";
+ string romname = "";
+
+ // If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
+ if (Type == "SuperDAT")
+ {
+ gamename = fulldir.Remove(0, basePath.Length + 1);
+ romname = "_";
+ }
+
+ // Otherwise, we want just the top level folder as the game, and the file as everything else
+ else
+ {
+ gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
+ romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
+ }
+
+ // Sanitize the names
+ if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ gamename = gamename.Substring(1);
+ }
+ if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ gamename = gamename.Substring(0, gamename.Length - 1);
+ }
+ if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ romname = romname.Substring(1);
+ }
+ if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ romname = romname.Substring(0, romname.Length - 1);
+ }
+
+ Globals.Logger.Verbose("Adding blank empty folder: {0}", gamename);
+ this["null"].Add(new Rom(romname, gamename, omitFromScan));
+ });
+ }
+ }
+ else if (File.Exists(basePath))
+ {
+ CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles,
+ skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
+ }
+
+ // Now that we're done, delete the temp folder (if it's not the default)
+ Globals.Logger.User("Cleaning temp folder");
+ if (tempDir != Path.GetTempPath())
+ {
+ Utilities.TryDeleteDirectory(tempDir);
+ }
+
+ return true;
+ }
+
+ ///
+ /// Check a given file for hashes, based on current settings
+ ///
+ /// Filename of the item to be checked
+ /// Base folder to be used in creating the DAT
+ /// Hash flag saying what hashes should not be calculated
+ /// True if the date should be omitted from the DAT, false otherwise
+ /// True if archives should be treated as files, false otherwise
+ /// Type of files that should be skipped
+ /// True if blank items should be created for empty folders, false otherwise
+ /// True if dates should be archived for all files, false otherwise
+ /// Name of the directory to create a temp folder in (blank is current directory)
+ /// True if files should be copied to the temp directory before hashing, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if CHDs should be treated like regular files, false otherwise
+ private void CheckFileForHashes(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
+ SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst, bool chdsAsFiles)
+ {
+ // Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
+ if (Romba)
+ {
+ GZipArchive gzarc = new GZipArchive(item);
+ BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
+
+ // If the rom is valid, write it out
+ if (baseFile != null && baseFile.Filename != null)
+ {
+ // Add the list if it doesn't exist already
+ Rom rom = new Rom(baseFile);
+ Add(Utilities.GetKeyFromDatItem(rom, SortedBy.CRC), rom);
+ Globals.Logger.User("File added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
+ }
+ else
+ {
+ Globals.Logger.User("File not added: {0}", Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
+ return;
+ }
+
+ return;
+ }
+
+ // If we're copying files, copy it first and get the new filename
+ string newItem = item;
+ string newBasePath = basePath;
+ if (copyFiles)
+ {
+ newBasePath = Path.Combine(tempDir, Guid.NewGuid().ToString());
+ newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
+ Utilities.TryCreateDirectory(Path.GetDirectoryName(newItem));
+ File.Copy(item, newItem, true);
+ }
+
+ // Initialize possible archive variables
+ BaseArchive archive = Utilities.GetArchive(newItem);
+ List extracted = null;
+
+ // If we have an archive and we're supposed to scan it
+ if (archive != null && !archivesAsFiles)
+ {
+ extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
+ }
+
+ // If the file should be skipped based on type, do so now
+ if ((extracted != null && skipFileType == SkipFileType.Archive)
+ || (extracted == null && skipFileType == SkipFileType.File))
+ {
+ return;
+ }
+
+ // If the extracted list is null, just scan the item itself
+ if (extracted == null)
+ {
+ ProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst, chdsAsFiles);
+ }
+ // Otherwise, add all of the found items
+ else
+ {
+ // First take care of the found items
+ Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
+ {
+ DatItem datItem = Utilities.GetDatItem(rom);
+ ProcessFileHelper(newItem,
+ datItem,
+ basePath,
+ (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
+ });
+
+ // Then, if we're looking for blanks, get all of the blank folders and add them
+ if (addBlanks)
+ {
+ List empties = new List();
+
+ // Now get all blank folders from the archive
+ if (archive != null)
+ {
+ empties = archive.GetEmptyFolders();
+ }
+
+ // Add add all of the found empties to the DAT
+ Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
+ {
+ Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
+ ProcessFileHelper(newItem,
+ emptyRom,
+ basePath,
+ (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
+ });
+ }
+ }
+
+ // Cue to delete the file if it's a copy
+ if (copyFiles && item != newItem)
+ {
+ Utilities.TryDeleteDirectory(newBasePath);
+ }
+ }
+
+ ///
+ /// Process a single file as a file
+ ///
+ /// File to be added
+ /// Parent game to be used
+ /// Path the represents the parent directory
+ /// Hash flag saying what hashes should not be calculated
+ /// True if dates should be archived for all files, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if CHDs should be treated like regular files, false otherwise
+ private void ProcessFile(string item, string parent, string basePath, Hash omitFromScan,
+ bool addDate, string headerToCheckAgainst, bool chdsAsFiles)
+ {
+ Globals.Logger.Verbose("'{0}' treated like a file", Path.GetFileName(item));
+ BaseFile baseFile = Utilities.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
+ ProcessFileHelper(item, Utilities.GetDatItem(baseFile), basePath, parent);
+ }
+
+ ///
+ /// Process a single file as a file (with found Rom data)
+ ///
+ /// File to be added
+ /// Rom data to be used to write to file
+ /// Path the represents the parent directory
+ /// Parent game to be used
+ private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
+ {
+ // If we somehow got something other than a Rom or Disk, cancel out
+ if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
+ {
+ return;
+ }
+
+ try
+ {
+ // If the basepath ends with a directory separator, remove it
+ if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ basepath += Path.DirectorySeparatorChar.ToString();
+ }
+
+ // Make sure we have the full item path
+ item = Path.GetFullPath(item);
+
+ // Process the item to sanitize names based on input
+ SetDatItemInfo(datItem, item, parent, basepath);
+
+ // Add the file information to the DAT
+ string key = Utilities.GetKeyFromDatItem(datItem, SortedBy.CRC);
+ Add(key, datItem);
+
+ Globals.Logger.User("File added: {0}", datItem.Name + Environment.NewLine);
+ }
+ catch (IOException ex)
+ {
+ Globals.Logger.Error(ex.ToString());
+ return;
+ }
+ }
+
+ ///
+ /// Set proper Game and Rom names from user inputs
+ ///
+ /// DatItem representing the input file
+ /// Item name to use
+ /// Parent name to use
+ /// Base path to use
+ private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
+ {
+ // Get the data to be added as game and item names
+ string gamename = "";
+ string romname = "";
+
+ // If the parent is blank, then we have a non-archive file
+ if (String.IsNullOrWhiteSpace(parent))
+ {
+ // If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
+ if (Type == "SuperDAT")
+ {
+ gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
+ romname = Path.GetFileName(item);
+ }
+
+ // Otherwise, we want just the top level folder as the game, and the file as everything else
+ else
+ {
+ gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
+ romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
+ }
+ }
+
+ // Otherwise, we assume that we have an archive
+ else
+ {
+ // If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
+ if (Type == "SuperDAT")
+ {
+ gamename = parent;
+ romname = datItem.Name;
+ }
+
+ // Otherwise, we want the archive name as the game, and the file as everything else
+ else
+ {
+ gamename = parent;
+ romname = datItem.Name;
+ }
+ }
+
+ // Sanitize the names
+ if (romname == null)
+ {
+ romname = "";
+ }
+ if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ gamename = gamename.Substring(1);
+ }
+ if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ gamename = gamename.Substring(0, gamename.Length - 1);
+ }
+ if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ romname = romname.Substring(1);
+ }
+ if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
+ {
+ romname = romname.Substring(0, romname.Length - 1);
+ }
+ if (!String.IsNullOrWhiteSpace(gamename) && String.IsNullOrWhiteSpace(romname))
+ {
+ romname = gamename;
+ gamename = "Default";
+ }
+
+ // Update rom information
+ datItem.Name = romname;
+ datItem.MachineName = gamename;
+ datItem.MachineDescription = gamename;
+
+ // If we have a Disk, then the ".chd" extension needs to be removed
+ if (datItem.Type == ItemType.Disk)
+ {
+ datItem.Name = datItem.Name.Replace(".chd", "");
+ }
+ }
+
+ #endregion
+
+ #region Rebuilding and Verifying
+
+ ///
+ /// Process the DAT and find all matches in input files and folders assuming they're a depot
+ ///
+ /// List of input files/folders to check
+ /// Output directory to use to build to
+ /// True if the date from the DAT should be used if available, false otherwise
+ /// True if input files should be deleted, false otherwise
+ /// True if the DAT should be used as a filter instead of a template, false otherwise
+ /// Output format that files should be written to
+ /// True if files should be output in Romba depot folders, false otherwise
+ /// True if the updated DAT should be output, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if rebuilding was a success, false otherwise
+ public bool RebuildDepot(List inputs, string outDir, bool date, bool delete,
+ bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst)
+ {
+ #region Perform setup
+
+ // If the DAT is not populated and inverse is not set, inform the user and quit
+ if (Count == 0 && !inverse)
+ {
+ Globals.Logger.User("No entries were found to rebuild, exiting...");
+ return false;
+ }
+
+ // Check that the output directory exists
+ outDir = Utilities.EnsureOutputDirectory(outDir, create: true);
+
+ // Now we want to get forcepack flag if it's not overridden
+ if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
+ {
+ switch (ForcePacking)
+ {
+ case ForcePacking.Zip:
+ outputFormat = OutputFormat.TorrentZip;
+ break;
+ case ForcePacking.Unzip:
+ outputFormat = OutputFormat.Folder;
+ break;
+ }
+ }
+
+ // Preload the Skipper list
+ int listcount = Skipper.List.Count;
+
+ #endregion
+
+ bool success = true;
+
+ #region Rebuild from depots in order
+
+ string format = "";
+ switch (outputFormat)
+ {
+ case OutputFormat.Folder:
+ format = "directory";
+ break;
+ case OutputFormat.TapeArchive:
+ format = "TAR";
+ break;
+ case OutputFormat.Torrent7Zip:
+ format = "Torrent7Z";
+ break;
+ case OutputFormat.TorrentGzip:
+ format = "TorrentGZ";
+ break;
+ case OutputFormat.TorrentLRZip:
+ format = "TorrentLRZ";
+ break;
+ case OutputFormat.TorrentRar:
+ format = "TorrentRAR";
+ break;
+ case OutputFormat.TorrentXZ:
+ format = "TorrentXZ";
+ break;
+ case OutputFormat.TorrentZip:
+ format = "TorrentZip";
+ break;
+ }
+
+ InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
+
+ // Now loop through and get only directories from the input paths
+ List directories = new List();
+ Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
+ {
+ // Add to the list if the input is a directory
+ if (Directory.Exists(input))
+ {
+ Globals.Logger.Verbose("Adding depot: {0}", input);
+ lock (directories)
+ {
+ directories.Add(input);
+ }
+ }
+ });
+
+ // If we don't have any directories, we want to exit
+ if (directories.Count == 0)
+ {
+ return success;
+ }
+
+ // Now that we have a list of depots, we want to sort the input DAT by SHA-1
+ BucketBy(SortedBy.SHA1, DedupeType.None);
+
+ // Then we want to loop through each of the hashes and see if we can rebuild
+ List hashes = Keys;
+ foreach (string hash in hashes)
+ {
+ // Pre-empt any issues that could arise from string length
+ if (hash.Length != Constants.SHA1Length)
+ {
+ continue;
+ }
+
+ Globals.Logger.User("Checking hash '{0}'", hash);
+
+ // Get the extension path for the hash
+ string subpath = Utilities.GetRombaPath(hash);
+
+ // Find the first depot that includes the hash
+ string foundpath = null;
+ foreach (string directory in directories)
+ {
+ if (File.Exists(Path.Combine(directory, subpath)))
+ {
+ foundpath = Path.Combine(directory, subpath);
+ break;
+ }
+ }
+
+ // If we didn't find a path, then we continue
+ if (foundpath == null)
+ {
+ continue;
+ }
+
+ // If we have a path, we want to try to get the rom information
+ GZipArchive archive = new GZipArchive(foundpath);
+ BaseFile fileinfo = archive.GetTorrentGZFileInfo();
+
+ // If the file information is null, then we continue
+ if (fileinfo == null)
+ {
+ continue;
+ }
// Otherwise, we rebuild that file to all locations that we need to
if (this[hash][0].Type == ItemType.Disk)
@@ -3587,1521 +3810,1521 @@ namespace SabreTools.Library.DatFiles
RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, romba,
updateDat, false /* isZip */, headerToCheckAgainst);
}
- }
-
- watch.Stop();
-
- #endregion
-
- // If we're updating the DAT, output to the rebuild directory
- if (updateDat)
- {
- FileName = "fixDAT_" + FileName;
- Name = "fixDAT_" + Name;
- Description = "fixDAT_" + Description;
- RemoveMarkedItems();
- Write(outDir);
- }
-
- return success;
- }
-
- ///
- /// Process the DAT and find all matches in input files and folders
- ///
- /// List of input files/folders to check
- /// Output directory to use to build to
- /// True to enable external scanning of archives, false otherwise
- /// True if the date from the DAT should be used if available, false otherwise
- /// True if input files should be deleted, false otherwise
- /// True if the DAT should be used as a filter instead of a template, false otherwise
- /// Output format that files should be written to
- /// True if files should be output in Romba depot folders, false otherwise
- /// ArchiveScanLevel representing the archive handling levels
- /// True if the updated DAT should be output, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if CHDs should be treated like regular files, false otherwise
- /// True if rebuilding was a success, false otherwise
- public bool RebuildGeneric(List inputs, string outDir, bool quickScan, bool date,
- bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
- string headerToCheckAgainst, bool chdsAsFiles)
- {
- #region Perform setup
-
- // If the DAT is not populated and inverse is not set, inform the user and quit
- if (Count == 0 && !inverse)
- {
- Globals.Logger.User("No entries were found to rebuild, exiting...");
- return false;
- }
-
- // Check that the output directory exists
- if (!Directory.Exists(outDir))
- {
- Directory.CreateDirectory(outDir);
- outDir = Path.GetFullPath(outDir);
- }
-
- // Now we want to get forcepack flag if it's not overridden
- if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
- {
- switch (ForcePacking)
- {
- case ForcePacking.Zip:
- outputFormat = OutputFormat.TorrentZip;
- break;
- case ForcePacking.Unzip:
- outputFormat = OutputFormat.Folder;
- break;
- }
- }
-
- // Preload the Skipper list
- int listcount = Skipper.List.Count;
-
- #endregion
-
- bool success = true;
-
- #region Rebuild from sources in order
-
- string format = "";
- switch (outputFormat)
- {
- case OutputFormat.Folder:
- format = "directory";
- break;
- case OutputFormat.TapeArchive:
- format = "TAR";
- break;
- case OutputFormat.Torrent7Zip:
- format = "Torrent7Z";
- break;
- case OutputFormat.TorrentGzip:
- format = "TorrentGZ";
- break;
- case OutputFormat.TorrentLRZip:
- format = "TorrentLRZ";
- break;
- case OutputFormat.TorrentRar:
- format = "TorrentRAR";
- break;
- case OutputFormat.TorrentXZ:
- format = "TorrentXZ";
- break;
- case OutputFormat.TorrentZip:
- format = "TorrentZip";
- break;
- }
-
- InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
-
- // Now loop through all of the files in all of the inputs
- foreach (string input in inputs)
- {
- // If the input is a file
- if (File.Exists(input))
- {
- Globals.Logger.User("Checking file: {0}", input);
- RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse,
- outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
- }
-
- // If the input is a directory
- else if (Directory.Exists(input))
- {
- Globals.Logger.Verbose("Checking directory: {0}", input);
- foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
- {
- Globals.Logger.User("Checking file: {0}", file);
- RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse,
- outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
- }
- }
- }
-
- watch.Stop();
-
- #endregion
-
- // If we're updating the DAT, output to the rebuild directory
- if (updateDat)
- {
- FileName = "fixDAT_" + FileName;
- Name = "fixDAT_" + Name;
- Description = "fixDAT_" + Description;
- RemoveMarkedItems();
- Write(outDir);
- }
-
- return success;
- }
-
- ///
- /// Attempt to add a file to the output if it matches
- ///
- /// Name of the file to process
- /// Output directory to use to build to
- /// True to enable external scanning of archives, false otherwise
- /// True if the date from the DAT should be used if available, false otherwise
- /// True if input files should be deleted, false otherwise
- /// True if the DAT should be used as a filter instead of a template, false otherwise
- /// Output format that files should be written to
- /// True if files should be output in Romba depot folders, false otherwise
- /// ArchiveScanLevel representing the archive handling levels
- /// True if the updated DAT should be output, false otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if CHDs should be treated like regular files, false otherwise
- private void RebuildGenericHelper(string file, string outDir, bool quickScan, bool date,
- bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
- string headerToCheckAgainst, bool chdsAsFiles)
- {
- // If we somehow have a null filename, return
- if (file == null)
- {
- return;
- }
-
- // Set the deletion variables
- bool usedExternally = false;
- bool usedInternally = false;
-
- // Get the required scanning level for the file
- Utilities.GetInternalExternalProcess(file, archiveScanLevel, out bool shouldExternalProcess, out bool shouldInternalProcess);
-
- // If we're supposed to scan the file externally
- if (shouldExternalProcess)
- {
- // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
- header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
- DatItem datItem = null;
- if (fileinfo.Type == FileType.CHD)
- {
- datItem = new Disk(fileinfo);
- }
- else if (fileinfo.Type == FileType.None)
- {
- datItem = new Rom(fileinfo);
- }
-
- usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
- romba, updateDat, null /* isZip */, headerToCheckAgainst);
- }
-
- // If we're supposed to scan the file internally
- if (shouldInternalProcess)
- {
- // Create an empty list of BaseFile for archive entries
- List entries = null;
- usedInternally = true;
-
- // Get the TGZ status for later
- GZipArchive tgz = new GZipArchive(file);
- bool isTorrentGzip = tgz.IsTorrent();
-
- // Get the base archive first
- BaseArchive archive = Utilities.GetArchive(file);
-
- // Now get all extracted items from the archive
- if (archive != null)
- {
- // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- entries = archive.GetChildren(omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), date: date);
- }
-
- // If the entries list is null, we encountered an error and should scan exteranlly
- if (entries == null && File.Exists(file))
- {
- // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
- BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: chdsAsFiles);
- DatItem datItem = null;
- if (fileinfo.Type == FileType.CHD)
- {
- datItem = new Disk(fileinfo);
- }
- else if (fileinfo.Type == FileType.None)
- {
- datItem = new Rom(fileinfo);
- }
-
- usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
- romba, updateDat, null /* isZip */, headerToCheckAgainst);
- }
- // Otherwise, loop through the entries and try to match
- else
- {
- foreach (BaseFile entry in entries)
- {
- DatItem datItem = Utilities.GetDatItem(entry);
- usedInternally &= RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
- romba, updateDat, !isTorrentGzip /* isZip */, headerToCheckAgainst);
- }
- }
- }
-
- // If we are supposed to delete the file, do so
- if (delete && (usedExternally || usedInternally))
- {
- Utilities.TryDeleteFile(file);
- }
- }
-
- ///
- /// Find duplicates and rebuild individual files to output
- ///
- /// Information for the current file to rebuild from
- /// Name of the file to process
- /// Output directory to use to build to
- /// True if the date from the DAT should be used if available, false otherwise
- /// True if the DAT should be used as a filter instead of a template, false otherwise
- /// Output format that files should be written to
- /// True if files should be output in Romba depot folders, false otherwise
- /// True if the updated DAT should be output, false otherwise
- /// True if the input file is an archive, false if the file is TGZ, null otherwise
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if the file was able to be rebuilt, false otherwise
- private bool RebuildIndividualFile(DatItem datItem, string file, string outDir, bool date,
- bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool? isZip, string headerToCheckAgainst)
- {
- // Set the output value
- bool rebuilt = true;
-
- // If the DatItem is a Disk, force rebuilding to a folder except if TGZ
- if (datItem.Type == ItemType.Disk && outputFormat != OutputFormat.TorrentGzip)
- {
- outputFormat = OutputFormat.Folder;
- }
-
- // Prepopluate a few key strings based on DatItem type
- string crc = null;
- string sha1 = null;
- if (datItem.Type == ItemType.Rom)
- {
- crc = ((Rom)datItem).CRC;
- sha1 = ((Rom)datItem).SHA1;
- }
- else if (datItem.Type == ItemType.Disk)
- {
- crc = "";
- sha1 = ((Disk)datItem).SHA1;
- }
-
- // Find if the file has duplicates in the DAT
- bool hasDuplicates = datItem.HasDuplicates(this);
-
- // If it has duplicates and we're not filtering, rebuild it
- if (hasDuplicates && !inverse)
- {
- // Get the list of duplicates to rebuild to
- List dupes = datItem.GetDuplicates(this, remove: updateDat);
-
- // If we don't have any duplicates, continue
- if (dupes.Count == 0)
- {
- return false;
- }
-
- // If we have a very specific TGZ->TGZ case, just copy it accordingly
- GZipArchive tgz = new GZipArchive(file);
- BaseFile rom = tgz.GetTorrentGZFileInfo();
- if (isZip == false && rom != null && outputFormat == OutputFormat.TorrentGzip)
- {
- Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
-
- // Get the proper output path
- if (romba)
- {
- outDir = Path.Combine(outDir, Utilities.GetRombaPath(sha1));
- }
- else
- {
- outDir = Path.Combine(outDir, sha1 + ".gz");
- }
-
- // Make sure the output folder is created
- Directory.CreateDirectory(Path.GetDirectoryName(outDir));
-
- // Now copy the file over
- try
- {
- File.Copy(file, outDir);
- rebuilt &= true;
- }
- catch
- {
- rebuilt = false;
- }
-
- return rebuilt;
- }
-
- // Get a generic stream for the file
- Stream fileStream = new MemoryStream();
-
- // If we have a zipfile, extract the stream to memory
- if (isZip != null)
- {
- string realName = null;
- BaseArchive archive = Utilities.GetArchive(file);
- if (archive != null)
- {
- (fileStream, realName) = archive.CopyToStream(datItem.Name);
- }
- }
- // Otherwise, just open the filestream
- else
- {
- fileStream = Utilities.TryOpenRead(file);
- }
-
- // If the stream is null, then continue
- if (fileStream == null)
- {
- return false;
- }
-
- // Seek to the beginning of the stream
- fileStream.Seek(0, SeekOrigin.Begin);
-
- Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
- rebuilt = true;
-
- // Now loop through the list and rebuild accordingly
- foreach (DatItem item in dupes)
- {
- // Get the output archive, if possible
- Folder outputArchive = Utilities.GetArchive(outputFormat);
-
- // Now rebuild to the output file
- outputArchive.Write(fileStream, outDir, (Rom)item, date: date, romba: romba);
- }
-
- // Close the input stream
- fileStream?.Dispose();
- }
-
- // If we have no duplicates and we're filtering, rebuild it
- else if (!hasDuplicates && inverse)
- {
- string machinename = null;
-
- // If we have a very specific TGZ->TGZ case, just copy it accordingly
- GZipArchive tgz = new GZipArchive(file);
- BaseFile rom = tgz.GetTorrentGZFileInfo();
- if (isZip == false && rom != null && outputFormat == OutputFormat.TorrentGzip)
- {
- Globals.Logger.User("Matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
-
- // Get the proper output path
- if (romba)
- {
- outDir = Path.Combine(outDir, Utilities.GetRombaPath(sha1));
- }
- else
- {
- outDir = Path.Combine(outDir, sha1 + ".gz");
- }
-
- // Make sure the output folder is created
- Directory.CreateDirectory(Path.GetDirectoryName(outDir));
-
- // Now copy the file over
- try
- {
- File.Copy(file, outDir);
- rebuilt &= true;
- }
- catch
- {
- rebuilt = false;
- }
-
- return rebuilt;
- }
-
- // Get a generic stream for the file
- Stream fileStream = new MemoryStream();
-
- // If we have a zipfile, extract the stream to memory
- if (isZip != null)
- {
- string realName = null;
- BaseArchive archive = Utilities.GetArchive(file);
- if (archive != null)
- {
- (fileStream, realName) = archive.CopyToStream(datItem.Name);
- }
- }
- // Otherwise, just open the filestream
- else
- {
- fileStream = Utilities.TryOpenRead(file);
- }
-
- // If the stream is null, then continue
- if (fileStream == null)
- {
- return false;
- }
-
- // Get the item from the current file
- Rom item = new Rom(Utilities.GetStreamInfo(fileStream, fileStream.Length, keepReadOpen: true));
- item.MachineName = Path.GetFileNameWithoutExtension(item.Name);
- item.MachineDescription = Path.GetFileNameWithoutExtension(item.Name);
-
- // If we are coming from an archive, set the correct machine name
- if (machinename != null)
- {
- item.MachineName = machinename;
- item.MachineDescription = machinename;
- }
-
- Globals.Logger.User("No matches found for '{0}', rebuilding accordingly from inverse flag...", Path.GetFileName(datItem.Name));
-
- // Get the output archive, if possible
- Folder outputArchive = Utilities.GetArchive(outputFormat);
-
- // Now rebuild to the output file
- if (outputArchive == null)
- {
- string outfile = Path.Combine(outDir, Utilities.RemovePathUnsafeCharacters(item.MachineName), item.Name);
-
- // Make sure the output folder is created
- Directory.CreateDirectory(Path.GetDirectoryName(outfile));
-
- // Now copy the file over
- try
- {
- FileStream writeStream = Utilities.TryCreate(outfile);
-
- // Copy the input stream to the output
- int bufferSize = 4096 * 128;
- byte[] ibuffer = new byte[bufferSize];
- int ilen;
- while ((ilen = fileStream.Read(ibuffer, 0, bufferSize)) > 0)
- {
- writeStream.Write(ibuffer, 0, ilen);
- writeStream.Flush();
- }
- writeStream.Dispose();
-
- if (date && !String.IsNullOrWhiteSpace(item.Date))
- {
- File.SetCreationTime(outfile, DateTime.Parse(item.Date));
- }
-
- rebuilt &= true;
- }
- catch
- {
- rebuilt &= false;
- }
- }
- else
- {
- rebuilt &= outputArchive.Write(fileStream, outDir, item, date: date, romba: romba);
- }
-
- // Close the input stream
- fileStream?.Dispose();
- }
-
- // Now we want to take care of headers, if applicable
- if (headerToCheckAgainst != null)
- {
- // Get a generic stream for the file
- Stream fileStream = new MemoryStream();
-
- // If we have a zipfile, extract the stream to memory
- if (isZip != null)
- {
- string realName = null;
- BaseArchive archive = Utilities.GetArchive(file);
- if (archive != null)
- {
- (fileStream, realName) = archive.CopyToStream(datItem.Name);
- }
- }
- // Otherwise, just open the filestream
- else
- {
- fileStream = Utilities.TryOpenRead(file);
- }
-
- // If the stream is null, then continue
- if (fileStream == null)
- {
- return false;
- }
-
- // Check to see if we have a matching header first
- SkipperRule rule = Skipper.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(headerToCheckAgainst));
-
- // If there's a match, create the new file to write
- if (rule.Tests != null && rule.Tests.Count != 0)
- {
- // If the file could be transformed correctly
- MemoryStream transformStream = new MemoryStream();
- if (rule.TransformStream(fileStream, transformStream, keepReadOpen: true, keepWriteOpen: true))
- {
- // Get the file informations that we will be using
- Rom headerless = new Rom(Utilities.GetStreamInfo(transformStream, transformStream.Length, keepReadOpen: true));
-
- // Find if the file has duplicates in the DAT
- hasDuplicates = headerless.HasDuplicates(this);
-
- // If it has duplicates and we're not filtering, rebuild it
- if (hasDuplicates && !inverse)
- {
- // Get the list of duplicates to rebuild to
- List dupes = headerless.GetDuplicates(this, remove: updateDat);
-
- // If we don't have any duplicates, continue
- if (dupes.Count == 0)
- {
- return false;
- }
-
- Globals.Logger.User("Headerless matches found for '{0}', rebuilding accordingly...", Path.GetFileName(datItem.Name));
- rebuilt = true;
-
- // Now loop through the list and rebuild accordingly
- foreach (DatItem item in dupes)
- {
- // Create a headered item to use as well
- datItem.CopyMachineInformation(item);
- datItem.Name += "_" + crc;
-
- // If either copy succeeds, then we want to set rebuilt to true
- bool eitherSuccess = false;
-
- // Get the output archive, if possible
- Folder outputArchive = Utilities.GetArchive(outputFormat);
-
- // Now rebuild to the output file
- eitherSuccess |= outputArchive.Write(transformStream, outDir, (Rom)item, date: date, romba: romba);
- eitherSuccess |= outputArchive.Write(fileStream, outDir, (Rom)datItem, date: date, romba: romba);
-
- // Now add the success of either rebuild
- rebuilt &= eitherSuccess;
- }
- }
- }
-
- // Dispose of the stream
- transformStream?.Dispose();
- }
-
- // Dispose of the stream
- fileStream?.Dispose();
- }
-
- return rebuilt;
- }
-
- ///
- /// Process the DAT and verify from the depots
- ///
- /// List of input directories to compare against
- /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
- /// True if verification was a success, false otherwise
- public bool VerifyDepot(List inputs, string headerToCheckAgainst)
- {
- bool success = true;
-
- InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied depots");
-
- // Now loop through and get only directories from the input paths
- List directories = new List();
- foreach (string input in inputs)
- {
- // Add to the list if the input is a directory
- if (Directory.Exists(input))
- {
- Globals.Logger.Verbose("Adding depot: {0}", input);
- directories.Add(input);
- }
- }
-
- // If we don't have any directories, we want to exit
- if (directories.Count == 0)
- {
- return success;
- }
-
- // Now that we have a list of depots, we want to sort the input DAT by SHA-1
- BucketBy(SortedBy.SHA1, DedupeType.None);
-
- // Then we want to loop through each of the hashes and see if we can rebuild
- List hashes = Keys;
- foreach (string hash in hashes)
- {
- // Pre-empt any issues that could arise from string length
- if (hash.Length != Constants.SHA1Length)
- {
- continue;
- }
-
- Globals.Logger.User("Checking hash '{0}'", hash);
-
- // Get the extension path for the hash
- string subpath = Utilities.GetRombaPath(hash);
-
- // Find the first depot that includes the hash
- string foundpath = null;
- foreach (string directory in directories)
- {
- if (File.Exists(Path.Combine(directory, subpath)))
- {
- foundpath = Path.Combine(directory, subpath);
- break;
- }
- }
-
- // If we didn't find a path, then we continue
- if (foundpath == null)
- {
- continue;
- }
-
- // If we have a path, we want to try to get the rom information
- GZipArchive tgz = new GZipArchive(foundpath);
- BaseFile fileinfo = tgz.GetTorrentGZFileInfo();
-
- // If the file information is null, then we continue
- if (fileinfo == null)
- {
- continue;
- }
-
- // Now we want to remove all duplicates from the DAT
- new Rom(fileinfo).GetDuplicates(this, remove: true)
+ }
+
+ watch.Stop();
+
+ #endregion
+
+ // If we're updating the DAT, output to the rebuild directory
+ if (updateDat)
+ {
+ FileName = "fixDAT_" + FileName;
+ Name = "fixDAT_" + Name;
+ Description = "fixDAT_" + Description;
+ RemoveMarkedItems();
+ Write(outDir);
+ }
+
+ return success;
+ }
+
+ ///
+ /// Process the DAT and find all matches in input files and folders
+ ///
+ /// List of input files/folders to check
+ /// Output directory to use to build to
+ /// True to enable external scanning of archives, false otherwise
+ /// True if the date from the DAT should be used if available, false otherwise
+ /// True if input files should be deleted, false otherwise
+ /// True if the DAT should be used as a filter instead of a template, false otherwise
+ /// Output format that files should be written to
+ /// True if files should be output in Romba depot folders, false otherwise
+ /// ArchiveScanLevel representing the archive handling levels
+ /// True if the updated DAT should be output, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if CHDs should be treated like regular files, false otherwise
+ /// True if rebuilding was a success, false otherwise
+ public bool RebuildGeneric(List inputs, string outDir, bool quickScan, bool date,
+ bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
+ string headerToCheckAgainst, bool chdsAsFiles)
+ {
+ #region Perform setup
+
+ // If the DAT is not populated and inverse is not set, inform the user and quit
+ if (Count == 0 && !inverse)
+ {
+ Globals.Logger.User("No entries were found to rebuild, exiting...");
+ return false;
+ }
+
+ // Check that the output directory exists
+ if (!Directory.Exists(outDir))
+ {
+ Directory.CreateDirectory(outDir);
+ outDir = Path.GetFullPath(outDir);
+ }
+
+ // Now we want to get forcepack flag if it's not overridden
+ if (outputFormat == OutputFormat.Folder && ForcePacking != ForcePacking.None)
+ {
+ switch (ForcePacking)
+ {
+ case ForcePacking.Zip:
+ outputFormat = OutputFormat.TorrentZip;
+ break;
+ case ForcePacking.Unzip:
+ outputFormat = OutputFormat.Folder;
+ break;
+ }
+ }
+
+ // Preload the Skipper list
+ int listcount = Skipper.List.Count;
+
+ #endregion
+
+ bool success = true;
+
+ #region Rebuild from sources in order
+
+ string format = "";
+ switch (outputFormat)
+ {
+ case OutputFormat.Folder:
+ format = "directory";
+ break;
+ case OutputFormat.TapeArchive:
+ format = "TAR";
+ break;
+ case OutputFormat.Torrent7Zip:
+ format = "Torrent7Z";
+ break;
+ case OutputFormat.TorrentGzip:
+ format = "TorrentGZ";
+ break;
+ case OutputFormat.TorrentLRZip:
+ format = "TorrentLRZ";
+ break;
+ case OutputFormat.TorrentRar:
+ format = "TorrentRAR";
+ break;
+ case OutputFormat.TorrentXZ:
+ format = "TorrentXZ";
+ break;
+ case OutputFormat.TorrentZip:
+ format = "TorrentZip";
+ break;
+ }
+
+ InternalStopwatch watch = new InternalStopwatch("Rebuilding all files to {0}", format);
+
+ // Now loop through all of the files in all of the inputs
+ foreach (string input in inputs)
+ {
+ // If the input is a file
+ if (File.Exists(input))
+ {
+ Globals.Logger.User("Checking file: {0}", input);
+ RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse,
+ outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
+ }
+
+ // If the input is a directory
+ else if (Directory.Exists(input))
+ {
+ Globals.Logger.Verbose("Checking directory: {0}", input);
+ foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
+ {
+ Globals.Logger.User("Checking file: {0}", file);
+ RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse,
+ outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, chdsAsFiles);
+ }
+ }
+ }
+
+ watch.Stop();
+
+ #endregion
+
+ // If we're updating the DAT, output to the rebuild directory
+ if (updateDat)
+ {
+ FileName = "fixDAT_" + FileName;
+ Name = "fixDAT_" + Name;
+ Description = "fixDAT_" + Description;
+ RemoveMarkedItems();
+ Write(outDir);
+ }
+
+ return success;
+ }
+
+ ///
+ /// Attempt to add a file to the output if it matches
+ ///
+ /// Name of the file to process
+ /// Output directory to use to build to
+ /// True to enable external scanning of archives, false otherwise
+ /// True if the date from the DAT should be used if available, false otherwise
+ /// True if input files should be deleted, false otherwise
+ /// True if the DAT should be used as a filter instead of a template, false otherwise
+ /// Output format that files should be written to
+ /// True if files should be output in Romba depot folders, false otherwise
+ /// ArchiveScanLevel representing the archive handling levels
+ /// True if the updated DAT should be output, false otherwise
+ /// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
+ /// True if CHDs should be treated like regular files, false otherwise
+ private void RebuildGenericHelper(string file, string outDir, bool quickScan, bool date,
+ bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
+ string headerToCheckAgainst, bool chdsAsFiles)
+ {
+ // If we somehow have a null filename, return
+ if (file == null)
+ {
+ return;
+ }
+
+ // Set the deletion variables
+ bool usedExternally = false;
+ bool usedInternally = false;
+
+ // Get the required scanning level for the file
+ Utilities.GetInternalExternalProcess(file, archiveScanLevel, out bool shouldExternalProcess, out bool shouldInternalProcess);
+
+ // If we're supposed to scan the file externally
+ if (shouldExternalProcess)
+ {
+ // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
+ BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
+ header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
+ DatItem datItem = null;
+ if (fileinfo.Type == FileType.CHD)
+ {
+ datItem = new Disk(fileinfo);
+ }
+ else if (fileinfo.Type == FileType.None)
+ {
+ datItem = new Rom(fileinfo);
+ }
+
+ usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
+ romba, updateDat, null /* isZip */, headerToCheckAgainst);
+ }
+
+ // If we're supposed to scan the file internally
+ if (shouldInternalProcess)
+ {
+ // Create an empty list of BaseFile for archive entries
+ List entries = null;
+ usedInternally = true;
+
+ // Get the TGZ status for later
+ GZipArchive tgz = new GZipArchive(file);
+ bool isTorrentGzip = tgz.IsTorrent();
+
+ // Get the base archive first
+ BaseArchive archive = Utilities.GetArchive(file);
+
+ // Now get all extracted items from the archive
+ if (archive != null)
+ {
+ // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
+ entries = archive.GetChildren(omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), date: date);
+ }
+
+ // If the entries list is null, we encountered an error and should scan exteranlly
+ if (entries == null && File.Exists(file))
+ {
+ // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
+ BaseFile fileinfo = Utilities.GetFileInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: chdsAsFiles);
+ DatItem datItem = null;
+ if (fileinfo.Type == FileType.CHD)
+ {
+ datItem = new Disk(fileinfo);
+ }
+ else if (fileinfo.Type == FileType.None)
+ {
+ datItem = new Rom(fileinfo);
+ }
+
+ usedExternally = RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
+ romba, updateDat, null /* isZip */, headerToCheckAgainst);
+ }
+ // Otherwise, loop through the entries and try to match
+ else
+ {
+ foreach (BaseFile entry in entries)
+ {
+ DatItem datItem = Utilities.GetDatItem(entry);
+ usedInternally &= RebuildIndividualFile(datItem, file, outDir, date, inverse, outputFormat,
+ romba, updateDat, !isTorrentGzip /* isZip */, headerToCheckAgainst);
+ }
+ }
+ }
+
+ // If we are supposed to delete the file, do so
+ if (delete && (usedExternally || usedInternally))
+ {
+ Utilities.TryDeleteFile(file);
+ }
+ }
+
+ ///