[SabreTools.Library] Update folder name

This commit is contained in:
Matt Nadareski
2017-05-04 02:38:56 -07:00
parent 6b5a23247c
commit f72b06a561
111 changed files with 98 additions and 3 deletions

View File

@@ -0,0 +1,70 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Archive : DatItem, ICloneable
{
#region Constructors
/// <summary>
/// Create a default, empty Archive object
/// </summary>
public Archive()
{
_name = "";
_itemType = ItemType.Archive;
}
#endregion
#region Cloning Methods
public object Clone()
{
return new Archive()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
// If we don't have an archive, return false
if (_itemType != other.Type)
{
return false;
}
// Otherwise, treat it as an archive
Archive newOther = (Archive)other;
// If the archive information matches
return (_name == newOther.Name);
}
#endregion
}
}

View File

@@ -0,0 +1,95 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class BiosSet : DatItem, ICloneable
{
#region Private instance variables
private string _description;
private bool? _default;
#endregion
#region Publicly facing variables
public string Description
{
get { return _description; }
set { _description = value; }
}
public bool? Default
{
get { return _default; }
set { _default = value; }
}
#endregion
#region Constructors
/// <summary>
/// Create a default, empty Sample object
/// </summary>
public BiosSet()
{
_name = "";
_itemType = ItemType.BiosSet;
}
#endregion
#region Cloning Methods
public object Clone()
{
return new BiosSet()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
Description = this.Description,
Default = this.Default,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
// If we don't have a biosset, return false
if (_itemType != other.Type)
{
return false;
}
// Otherwise, treat it as a biosset
BiosSet newOther = (BiosSet)other;
// If the archive information matches
return (_name == newOther.Name && _description == newOther.Description && _default == newOther.Default);
}
#endregion
}
}

View File

@@ -0,0 +1,596 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Private instance variables
// Data common to most DAT types
private string _fileName;
private string _name;
private string _description;
private string _rootDir;
private string _category;
private string _version;
private string _date;
private string _author;
private string _email;
private string _homepage;
private string _url;
private string _comment;
private string _header;
private string _type; // Generally only used for SuperDAT
private ForceMerging _forceMerging;
private ForceNodump _forceNodump;
private ForcePacking _forcePacking;
private DatFormat _datFormat;
private bool _excludeOf;
private bool _mergeRoms;
private Hash _stripHash;
private bool _oneGameOneRegion;
private List<string> _regions = new List<string>();
private SortedDictionary<string, List<DatItem>> _files = new SortedDictionary<string, List<DatItem>>();
private SortedBy _sortedBy;
// Data specific to the Miss DAT type
private bool _useGame;
private string _prefix;
private string _postfix;
private bool _quotes;
private string _repExt;
private string _addExt;
private bool _remExt;
private bool _gameName;
private bool _romba;
// Statistical data related to the DAT
private long _romCount;
private long _diskCount;
private long _totalSize;
private long _crcCount;
private long _md5Count;
private long _sha1Count;
private long _sha256Count;
private long _sha384Count;
private long _sha512Count;
private long _baddumpCount;
private long _nodumpCount;
#endregion
#region Publicly facing variables
// Data common to most DAT types
public string FileName
{
get { return _fileName; }
set { _fileName = value; }
}
public string Name
{
get { return _name; }
set { _name = value; }
}
public string Description
{
get { return _description; }
set { _description = value; }
}
public string RootDir
{
get { return _rootDir; }
set { _rootDir = value; }
}
public string Category
{
get { return _category; }
set { _category = value; }
}
public string Version
{
get { return _version; }
set { _version = value; }
}
public string Date
{
get { return _date; }
set { _date = value; }
}
public string Author
{
get { return _author; }
set { _author = value; }
}
public string Email
{
get { return _email; }
set { _email = value; }
}
public string Homepage
{
get { return _homepage; }
set { _homepage = value; }
}
public string Url
{
get { return _url; }
set { _url = value; }
}
public string Comment
{
get { return _comment; }
set { _comment = value; }
}
public string Header
{
get { return _header; }
set { _header = value; }
}
public string Type // Generally only used for SuperDAT
{
get { return _type; }
set { _type = value; }
}
public ForceMerging ForceMerging
{
get { return _forceMerging; }
set { _forceMerging = value; }
}
public ForceNodump ForceNodump
{
get { return _forceNodump; }
set { _forceNodump = value; }
}
public ForcePacking ForcePacking
{
get { return _forcePacking; }
set { _forcePacking = value; }
}
public DatFormat DatFormat
{
get { return _datFormat; }
set { _datFormat = value; }
}
public bool ExcludeOf
{
get { return _excludeOf; }
set { _excludeOf = value; }
}
public bool MergeRoms
{
get { return _mergeRoms; }
set { _mergeRoms = value; }
}
public Hash StripHash
{
get { return _stripHash; }
set { _stripHash = value; }
}
public bool OneGameOneRegion
{
get { return _oneGameOneRegion; }
set { _oneGameOneRegion = value; }
}
public List<string> Regions
{
get { return _regions; }
set { _regions = value; }
}
public SortedBy SortedBy
{
get { return _sortedBy; }
set { _sortedBy = value; }
}
// Data specific to the Miss DAT type
public bool UseGame
{
get { return _useGame; }
set { _useGame = value; }
}
public string Prefix
{
get { return _prefix; }
set { _prefix = value; }
}
public string Postfix
{
get { return _postfix; }
set { _postfix = value; }
}
public bool Quotes
{
get { return _quotes; }
set { _quotes = value; }
}
public string RepExt
{
get { return _repExt; }
set { _repExt = value; }
}
public string AddExt
{
get { return _addExt; }
set { _addExt = value; }
}
public bool RemExt
{
get { return _remExt; }
set { _remExt = value; }
}
public bool GameName
{
get { return _gameName; }
set { _gameName = value; }
}
public bool Romba
{
get { return _romba; }
set { _romba = value; }
}
// Statistical data related to the DAT
public long RomCount
{
get { return _romCount; }
set { _romCount = value; }
}
public long DiskCount
{
get { return _diskCount; }
set { _diskCount = value; }
}
public long TotalSize
{
get { return _totalSize; }
set { _totalSize = value; }
}
public long CRCCount
{
get { return _crcCount; }
set { _crcCount = value; }
}
public long MD5Count
{
get { return _md5Count; }
set { _md5Count = value; }
}
public long SHA1Count
{
get { return _sha1Count; }
set { _sha1Count = value; }
}
public long SHA256Count
{
get { return _sha256Count; }
set { _sha256Count = value; }
}
public long SHA384Count
{
get { return _sha384Count; }
set { _sha384Count = value; }
}
public long SHA512Count
{
get { return _sha512Count; }
set { _sha512Count = value; }
}
public long BaddumpCount
{
get { return _baddumpCount; }
set { _baddumpCount = value; }
}
public long NodumpCount
{
get { return _nodumpCount; }
set { _nodumpCount = value; }
}
#endregion
#region Instance Methods
#region Accessors
/// <summary>
/// Passthrough to access the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to reference</param>
public List<DatItem> this[string key]
{
get
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is missing from the dictionary, add it
if (!_files.ContainsKey(key))
{
_files.Add(key, new List<DatItem>());
}
// Now return the value
return _files[key];
}
}
set
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is missing from the dictionary, add it
if (!_files.ContainsKey(key))
{
_files.Add(key, new List<DatItem>());
}
// Now set the value
_files[key] = value;
}
}
}
/// <summary>
/// Add a new key to the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to add to</param>
public void Add(string key)
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is missing from the dictionary, add it
if (!_files.ContainsKey(key))
{
_files.Add(key, new List<DatItem>());
}
}
}
/// <summary>
/// Add a value to the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to add to</param>
/// <param name="value">Value to add to the dictionary</param>
public void Add(string key, DatItem value)
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is missing from the dictionary, add it
if (!_files.ContainsKey(key))
{
_files.Add(key, new List<DatItem>());
}
// Now add the value
_files[key].Add(value);
}
}
/// <summary>
/// Add a range of values to the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to add to</param>
/// <param name="value">Value to add to the dictionary</param>
public void AddRange(string key, List<DatItem> value)
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is missing from the dictionary, add it
if (!_files.ContainsKey(key))
{
_files.Add(key, new List<DatItem>());
}
// Now add the value
_files[key].AddRange(value);
}
}
/// <summary>
/// Get if the file dictionary contains the key
/// </summary>
/// <param name="key">Key in the dictionary to check</param>
/// <returns>True if the key exists, false otherwise</returns>
public bool ContainsKey(string key)
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
// If the key is null, we return false since keys can't be null
if (key == null)
{
return false;
}
lock (_files)
{
return _files.ContainsKey(key);
}
}
/// <summary>
/// Get the number of DatItems in the file dictionary
/// </summary>
/// <returns>Number of DatItems in the file dictionary</returns>
public long Count
{
get
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
int count = 0;
foreach (string key in _files.Keys)
{
count += _files[key].Count;
}
return count;
}
}
}
/// <summary>
/// Delete the file dictionary
/// </summary>
public void Delete()
{
_files = null;
}
/// <summary>
/// Get the keys from the file dictionary
/// </summary>
/// <returns>IEnumerable of the keys</returns>
public IEnumerable<string> Keys
{
get
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
return _files.Keys;
}
}
}
/// <summary>
/// Remove a key from the file dictionary
/// </summary>
/// <param name="key"></param>
public void Remove(string key)
{
// If the dictionary is null, create it
if (_files == null)
{
_files = new SortedDictionary<string, List<DatItem>>();
}
lock (_files)
{
// If the key is in the dictionary, remove it
if (_files.ContainsKey(key))
{
_files.Remove(key);
}
}
}
/// <summary>
/// Reset the file dictionary
/// </summary>
public void Reset()
{
_files = new SortedDictionary<string, List<DatItem>>();
}
/// <summary>
/// Set a new file dictionary from an existing one
/// </summary>
/// <param name="newdict"></param>
public void Set(SortedDictionary<string, List<DatItem>> newdict)
{
_files = newdict;
}
#endregion
#region Constructors
/// <summary>
/// Create a new, empty DatFile object
/// </summary>
public DatFile()
{
_files = new SortedDictionary<string, List<DatItem>>();
}
/// <summary>
/// Create a new DatFile from an existing one
/// </summary>
/// <param name="df"></param>
public DatFile(DatFile datFile)
{
_fileName = datFile.FileName;
_name = datFile.Name;
_description = datFile.Description;
_rootDir = datFile.RootDir;
_category = datFile.Category;
_version = datFile.Version;
_date = datFile.Date;
_author = datFile.Author;
_email = datFile.Email;
_homepage = datFile.Homepage;
_url = datFile.Url;
_comment = datFile.Comment;
_header = datFile.Header;
_type = datFile.Type;
_forceMerging = datFile.ForceMerging;
_forceNodump = datFile.ForceNodump;
_forcePacking = datFile.ForcePacking;
_excludeOf = datFile.ExcludeOf;
_datFormat = datFile.DatFormat;
_mergeRoms = datFile.MergeRoms;
_stripHash = datFile.StripHash;
_sortedBy = SortedBy.Default;
_useGame = datFile.UseGame;
_prefix = datFile.Prefix;
_postfix = datFile.Postfix;
_quotes = datFile.Quotes;
_repExt = datFile.RepExt;
_addExt = datFile.AddExt;
_remExt = datFile.RemExt;
_gameName = datFile.GameName;
_romba = datFile.Romba;
}
#endregion
#endregion // Instance Methods
}
}

View File

@@ -0,0 +1,723 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
#endif
using NaturalSort;
namespace SabreTools.Helper.Dats
{
public abstract class DatItem : IEquatable<DatItem>, IComparable<DatItem>
{
#region Protected instance variables
// Standard item information
protected string _name;
private string _merge;
protected ItemType _itemType;
protected DupeType _dupeType;
// Machine information
protected Machine _machine;
// Software list information
protected bool? _supported;
protected string _publisher;
protected List<Tuple<string, string>> _infos;
protected string _partName;
protected string _partInterface;
protected List<Tuple<string, string>> _features;
protected string _areaName;
protected long? _areaSize;
// Source metadata information
protected int _systemId;
protected string _systemName;
protected int _sourceId;
protected string _sourceName;
#endregion
#region Publicly facing variables
// Standard item information
public string Name
{
get { return _name; }
set { _name = value; }
}
public string MergeTag
{
get { return _merge; }
set { _merge = value; }
}
public ItemType Type
{
get { return _itemType; }
set { _itemType = value; }
}
public DupeType Dupe
{
get { return _dupeType; }
set { _dupeType = value; }
}
// Machine information
public Machine Machine
{
get { return _machine; }
set { _machine = value; }
}
// Software list information
public bool? Supported
{
get { return _supported; }
set { _supported = value; }
}
public string Publisher
{
get { return _publisher; }
set { _publisher = value; }
}
public List<Tuple<string, string>> Infos
{
get { return _infos; }
set { _infos = value; }
}
public string PartName
{
get { return _partName; }
set { _partName = value; }
}
public string PartInterface
{
get { return _partInterface; }
set { _partInterface = value; }
}
public List<Tuple<string, string>> Features
{
get { return _features; }
set { _features = value; }
}
public string AreaName
{
get { return _areaName; }
set { _areaName = value; }
}
public long? AreaSize
{
get { return _areaSize; }
set { _areaSize = value; }
}
// Source metadata information
public int SystemID
{
get { return _systemId; }
set { _systemId = value; }
}
public string System
{
get { return _systemName; }
set { _systemName = value; }
}
public int SourceID
{
get { return _sourceId; }
set { _sourceId = value; }
}
public string Source
{
get { return _sourceName; }
set { _sourceName = value; }
}
#endregion
#region Instance Methods
#region Comparision Methods
public int CompareTo(DatItem other)
{
int ret = 0;
try
{
if (_name == other.Name)
{
ret = (this.Equals(other) ? 0 : 1);
}
ret = String.Compare(_name, other.Name);
}
catch
{
ret = 1;
}
return ret;
}
/// <summary>
/// Determine if an item is a duplicate using partial matching logic
/// </summary>
/// <param name="other">DatItem to use as a baseline</param>
/// <returns>True if the roms are duplicates, false otherwise</returns>
public abstract bool Equals(DatItem other);
/// <summary>
/// Return the duplicate status of two items
/// </summary>
/// <param name="lastItem">DatItem to check against</param>
/// <returns>The DupeType corresponding to the relationship between the two</returns>
public DupeType GetDuplicateStatus(DatItem lastItem)
{
DupeType output = 0x00;
// If we don't have a duplicate at all, return none
if (!this.Equals(lastItem))
{
return output;
}
// If the duplicate is external already or should be, set it
if ((lastItem.Dupe & DupeType.External) != 0 || lastItem.SystemID != this.SystemID || lastItem.SourceID != this.SourceID)
{
if (lastItem.Machine.Name == this.Machine.Name && lastItem.Name == this.Name)
{
output = DupeType.External | DupeType.All;
}
else
{
output = DupeType.External | DupeType.Hash;
}
}
// Otherwise, it's considered an internal dupe
else
{
if (lastItem.Machine.Name == this.Machine.Name && lastItem.Name == this.Name)
{
output = DupeType.Internal | DupeType.All;
}
else
{
output = DupeType.Internal | DupeType.Hash;
}
}
return output;
}
#endregion
#region Sorting and Merging
/// <summary>
/// Check if a DAT contains the given rom
/// </summary>
/// <param name="datdata">Dat to match against</param>
/// <returns>True if it contains the rom, false otherwise</returns>
public bool HasDuplicates(DatFile datdata)
{
// Check for an empty rom list first
if (datdata.Count == 0)
{
return false;
}
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata);
// If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key))
{
return false;
}
// Try to find duplicates
List<DatItem> roms = datdata[key];
foreach (DatItem rom in roms)
{
if (this.Equals(rom))
{
return true;
}
}
return false;
}
/// <summary>
/// List all duplicates found in a DAT based on a rom
/// </summary>
/// <param name="datdata">Dat to match against</param>
/// <param name="remove">True to remove matched roms from the input, false otherwise (default)</param>
/// <returns>List of matched DatItem objects</returns>
public List<DatItem> GetDuplicates(DatFile datdata, bool remove = false)
{
List<DatItem> output = new List<DatItem>();
// Check for an empty rom list first
if (datdata.Count == 0)
{
return output;
}
// We want to get the proper key for the DatItem
string key = SortAndGetKey(datdata);
// If the key doesn't exist, return the empty list
if (!datdata.ContainsKey(key))
{
return output;
}
// Try to find duplicates
List<DatItem> roms = datdata[key];
List<DatItem> left = new List<DatItem>();
foreach (DatItem rom in roms)
{
if (this.Equals(rom))
{
output.Add(rom);
}
else
{
left.Add(rom);
}
}
// If we're in removal mode, replace the list with the new one
if (remove)
{
datdata[key] = left;
}
return output;
}
/// <summary>
/// Sort the input DAT and get the key to be used by the item
/// </summary>
/// <param name="datdata">Dat to match against</param>
/// <returns>Key to try to use</returns>
private string SortAndGetKey(DatFile datdata)
{
string key = null;
// If all items are supposed to have a SHA-512, we sort by that
if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA512Count
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA512))
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA512))))
{
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
}
else
{
key = ((Disk)this).SHA512;
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
}
}
// If all items are supposed to have a SHA-384, we sort by that
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA384Count
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA384))
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA384))))
{
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
}
else
{
key = ((Disk)this).SHA384;
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
}
}
// If all items are supposed to have a SHA-256, we sort by that
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA256Count
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA256))
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA256))))
{
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
}
else
{
key = ((Disk)this).SHA256;
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
}
}
// If all items are supposed to have a SHA-1, we sort by that
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA1Count
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA1))
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA1))))
{
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
}
else
{
key = ((Disk)this).SHA1;
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
}
}
// If all items are supposed to have an MD5, we sort by that
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.MD5Count
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).MD5))
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).MD5))))
{
if (_itemType == ItemType.Rom)
{
key = ((Rom)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
}
else
{
key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
}
}
// If we've gotten here and we have a Disk, sort by MD5
else if (_itemType == ItemType.Disk)
{
key = ((Disk)this).MD5;
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
}
// If we've gotten here and we have a Rom, sort by CRC
else if (_itemType == ItemType.Rom)
{
key = ((Rom)this).CRC;
datdata.BucketBy(SortedBy.CRC, false /* mergeroms */);
}
// Otherwise, we use -1 as the key
else
{
key = "-1";
datdata.BucketBy(SortedBy.Size, false /* mergeroms */);
}
return key;
}
#endregion
#endregion // Instance Methods
#region Static Methods
#region Sorting and Merging
/// <summary>
/// Merge an arbitrary set of ROMs based on the supplied information
/// </summary>
/// <param name="infiles">List of File objects representing the roms to be merged</param>
/// <returns>A List of RomData objects representing the merged roms</returns>
public static List<DatItem> Merge(List<DatItem> infiles)
{
// Check for null or blank roms first
if (infiles == null || infiles.Count == 0)
{
return new List<DatItem>();
}
// Create output list
List<DatItem> outfiles = new List<DatItem>();
// Then deduplicate them by checking to see if data matches previous saved roms
foreach (DatItem file in infiles)
{
// If it's a nodump, add and skip
if (file.Type == ItemType.Rom && ((Rom)file).ItemStatus == ItemStatus.Nodump)
{
outfiles.Add(file);
continue;
}
else if (file.Type == ItemType.Disk && ((Disk)file).ItemStatus == ItemStatus.Nodump)
{
outfiles.Add(file);
continue;
}
// If it's the first rom in the list, don't touch it
if (outfiles.Count != 0)
{
// Check if the rom is a duplicate
DupeType dupetype = 0x00;
DatItem saveditem = new Rom();
int pos = -1;
for (int i = 0; i < outfiles.Count; i++)
{
DatItem lastrom = outfiles[i];
// Get the duplicate status
dupetype = file.GetDuplicateStatus(lastrom);
// If it's a duplicate, skip adding it to the output but add any missing information
if (dupetype != 0x00)
{
// If we don't have a rom or disk, then just skip adding
if (file.Type != ItemType.Rom && file.Type != ItemType.Disk)
{
continue;
}
saveditem = lastrom;
pos = i;
// Roms have more infomration to save
if (file.Type == ItemType.Rom)
{
((Rom)saveditem).Size = ((Rom)saveditem).Size;
((Rom)saveditem).CRC = (String.IsNullOrEmpty(((Rom)saveditem).CRC) && !String.IsNullOrEmpty(((Rom)file).CRC)
? ((Rom)file).CRC
: ((Rom)saveditem).CRC);
((Rom)saveditem).MD5 = (String.IsNullOrEmpty(((Rom)saveditem).MD5) && !String.IsNullOrEmpty(((Rom)file).MD5)
? ((Rom)file).MD5
: ((Rom)saveditem).MD5);
((Rom)saveditem).SHA1 = (String.IsNullOrEmpty(((Rom)saveditem).SHA1) && !String.IsNullOrEmpty(((Rom)file).SHA1)
? ((Rom)file).SHA1
: ((Rom)saveditem).SHA1);
}
else
{
((Disk)saveditem).MD5 = (String.IsNullOrEmpty(((Disk)saveditem).MD5) && !String.IsNullOrEmpty(((Disk)file).MD5)
? ((Disk)file).MD5
: ((Disk)saveditem).MD5);
((Disk)saveditem).SHA1 = (String.IsNullOrEmpty(((Disk)saveditem).SHA1) && !String.IsNullOrEmpty(((Disk)file).SHA1)
? ((Disk)file).SHA1
: ((Disk)saveditem).SHA1);
}
saveditem.Dupe = dupetype;
// If the current system has a lower ID than the previous, set the system accordingly
if (file.SystemID < saveditem.SystemID)
{
saveditem.SystemID = file.SystemID;
saveditem.System = file.System;
saveditem.Machine = (Machine)file.Machine.Clone();
saveditem.Name = file.Name;
}
// If the current source has a lower ID than the previous, set the source accordingly
if (file.SourceID < saveditem.SourceID)
{
saveditem.SourceID = file.SourceID;
saveditem.Source = file.Source;
saveditem.Machine = (Machine)file.Machine.Clone();
saveditem.Name = file.Name;
}
break;
}
}
// If no duplicate is found, add it to the list
if (dupetype == 0x00)
{
outfiles.Add(file);
}
// Otherwise, if a new rom information is found, add that
else
{
outfiles.RemoveAt(pos);
outfiles.Insert(pos, saveditem);
}
}
else
{
outfiles.Add(file);
}
}
// Then return the result
return outfiles;
}
/// <summary>
/// Resolve name duplicates in an arbitrary set of ROMs based on the supplied information
/// </summary>
/// <param name="infiles">List of File objects representing the roms to be merged</param>
/// <returns>A List of RomData objects representing the renamed roms</returns>
public static List<DatItem> ResolveNames(List<DatItem> infiles)
{
// Create the output list
List<DatItem> output = new List<DatItem>();
// First we want to make sure the list is in alphabetical order
Sort(ref infiles, true);
// Now we want to loop through and check names
DatItem lastItem = null;
string lastrenamed = null;
int lastid = 0;
for (int i = 0; i < infiles.Count; i++)
{
DatItem datItem = infiles[i];
// If we have the first item, we automatically add it
if (lastItem == null)
{
output.Add(datItem);
lastItem = datItem;
continue;
}
// If the current item exactly matches the last item, then we don't add it
if ((datItem.GetDuplicateStatus(lastItem) & DupeType.All) != 0)
{
Globals.Logger.Verbose("Exact duplicate found for '" + datItem.Name + "'");
continue;
}
// If the current name matches the previous name, rename the current item
else if (datItem.Name == lastItem.Name)
{
Globals.Logger.Verbose("Name duplicate found for '" + datItem.Name + "'");
if (datItem.Type == ItemType.Disk)
{
Disk disk = (Disk)datItem;
disk.Name += "_" + (!String.IsNullOrEmpty(disk.MD5)
? disk.MD5
: !String.IsNullOrEmpty(disk.SHA1)
? disk.SHA1
: "1");
datItem = disk;
lastrenamed = lastrenamed ?? datItem.Name;
}
else if (datItem.Type == ItemType.Rom)
{
Rom rom = (Rom)datItem;
rom.Name += "_" + (!String.IsNullOrEmpty(rom.CRC)
? rom.CRC
: !String.IsNullOrEmpty(rom.MD5)
? rom.MD5
: !String.IsNullOrEmpty(rom.SHA1)
? rom.SHA1
: "1");
datItem = rom;
lastrenamed = lastrenamed ?? datItem.Name;
}
// If we have a conflict with the last renamed item, do the right thing
if (datItem.Name == lastrenamed)
{
lastrenamed = datItem.Name;
datItem.Name += (lastid == 0 ? "" : "_" + lastid);
lastid++;
}
// If we have no conflict, then we want to reset the lastrenamed and id
else
{
lastrenamed = null;
lastid = 0;
}
output.Add(datItem);
}
// Otherwise, we say that we have a valid named file
else
{
output.Add(datItem);
lastItem = datItem;
lastrenamed = null;
lastid = 0;
}
}
return output;
}
/// <summary>
/// Sort a list of File objects by SystemID, SourceID, Game, and Name (in order)
/// </summary>
/// <param name="roms">List of File objects representing the roms to be sorted</param>
/// <param name="norename">True if files are not renamed, false otherwise</param>
/// <returns>True if it sorted correctly, false otherwise</returns>
public static bool Sort(ref List<DatItem> roms, bool norename)
{
roms.Sort(delegate (DatItem x, DatItem y)
{
try
{
NaturalComparer nc = new NaturalComparer();
if (x.SystemID == y.SystemID)
{
if (x.SourceID == y.SourceID)
{
if (x.Machine != null && y.Machine != null && x.Machine.Name == y.Machine.Name)
{
if ((x.Type == ItemType.Rom || x.Type == ItemType.Disk) && (y.Type == ItemType.Rom || y.Type == ItemType.Disk))
{
if (Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(x.Name)) == Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(y.Name)))
{
return nc.Compare(Path.GetFileName(Style.RemovePathUnsafeCharacters(x.Name)), Path.GetFileName(Style.RemovePathUnsafeCharacters(y.Name)));
}
return nc.Compare(Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(x.Name)), Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(y.Name)));
}
else if ((x.Type == ItemType.Rom || x.Type == ItemType.Disk) && (y.Type != ItemType.Rom && y.Type != ItemType.Disk))
{
return -1;
}
else if ((x.Type != ItemType.Rom && x.Type != ItemType.Disk) && (y.Type == ItemType.Rom || y.Type == ItemType.Disk))
{
return 1;
}
else
{
if (Path.GetDirectoryName(x.Name) == Path.GetDirectoryName(y.Name))
{
return nc.Compare(Path.GetFileName(x.Name), Path.GetFileName(y.Name));
}
return nc.Compare(Path.GetDirectoryName(x.Name), Path.GetDirectoryName(y.Name));
}
}
return nc.Compare(x.Machine.Name, y.Machine.Name);
}
return (norename ? nc.Compare(x.Machine.Name, y.Machine.Name) : x.SourceID - y.SourceID);
}
return (norename ? nc.Compare(x.Machine.Name, y.Machine.Name) : x.SystemID - y.SystemID);
}
catch (Exception)
{
// Absorb the error
return 0;
}
});
return true;
}
#endregion
#endregion // Static Methods
}
}

View File

@@ -0,0 +1,144 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Disk : DatItem, ICloneable
{
#region Private instance variables
// Disk information
protected string _md5;
protected string _sha1;
protected string _sha256;
protected string _sha384;
protected string _sha512;
protected ItemStatus _itemStatus;
#endregion
#region Publicly facing variables
// Disk information
public string MD5
{
get { return _md5; }
set { _md5 = value; }
}
public string SHA1
{
get { return _sha1; }
set { _sha1 = value; }
}
public string SHA256
{
get { return _sha256; }
set { _sha256 = value; }
}
public string SHA384
{
get { return _sha384; }
set { _sha384 = value; }
}
public string SHA512
{
get { return _sha512; }
set { _sha512 = value; }
}
public ItemStatus ItemStatus
{
get { return _itemStatus; }
set { _itemStatus = value; }
}
#endregion
#region Constructors
/// <summary>
/// Create a default, empty Disk object
/// </summary>
public Disk()
{
_name = "";
_itemType = ItemType.Disk;
_dupeType = 0x00;
_itemStatus = ItemStatus.None;
}
#endregion
#region Cloning Methods
public object Clone()
{
return new Disk()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
MD5 = this.MD5,
SHA1 = this.SHA1,
SHA256 = this.SHA256,
SHA384 = this.SHA384,
SHA512 = this.SHA512,
ItemStatus = this.ItemStatus,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
bool dupefound = false;
// If we don't have a rom, return false
if (_itemType != other.Type)
{
return dupefound;
}
// Otherwise, treat it as a rom
Disk newOther = (Disk)other;
// If either is a nodump, it's never a match
if (_itemStatus == ItemStatus.Nodump || newOther.ItemStatus == ItemStatus.Nodump)
{
return dupefound;
}
if (((String.IsNullOrEmpty(_md5) || String.IsNullOrEmpty(newOther.MD5)) || this.MD5 == newOther.MD5)
&& ((String.IsNullOrEmpty(this.SHA1) || String.IsNullOrEmpty(newOther.SHA1)) || this.SHA1 == newOther.SHA1)
&& ((String.IsNullOrEmpty(this.SHA256) || String.IsNullOrEmpty(newOther.SHA256)) || this.SHA256 == newOther.SHA256)
&& ((String.IsNullOrEmpty(this.SHA384) || String.IsNullOrEmpty(newOther.SHA384)) || this.SHA384 == newOther.SHA384)
&& ((String.IsNullOrEmpty(this.SHA512) || String.IsNullOrEmpty(newOther.SHA512)) || this.SHA256 == newOther.SHA512))
{
dupefound = true;
}
return dupefound;
}
#endregion
}
}

View File

@@ -0,0 +1,715 @@
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Filter
{
#region Private instance variables
#region Positive
private List<string> _gameNames;
private List<string> _romNames;
private List<string> _romTypes;
private List<string> _crcs;
private List<string> _md5s;
private List<string> _sha1s;
private List<string> _sha256s;
private List<string> _sha384s;
private List<string> _sha512s;
private ItemStatus _itemStatuses;
private MachineType _machineTypes;
#endregion
#region Negative
private List<string> _notGameNames;
private List<string> _notRomNames;
private List<string> _notRomTypes;
private List<string> _notCrcs;
private List<string> _notMd5s;
private List<string> _notSha1s;
private List<string> _notSha256s;
private List<string> _notSha384s;
private List<string> _notSha512s;
private ItemStatus _itemNotStatuses;
private MachineType _machineNotTypes;
#endregion
#region Neutral
private long _sizeGreaterThanOrEqual;
private long _sizeLessThanOrEqual;
private long _sizeEqualTo;
private bool _includeOfInGame;
private bool? _runnable;
#endregion
#endregion // Private instance variables
#region Pubically facing variables
#region Positive
public List<string> GameNames
{
get { return _gameNames; }
set { _gameNames = value; }
}
public List<string> RomNames
{
get { return _romNames; }
set { _romNames = value; }
}
public List<string> RomTypes
{
get { return _romTypes; }
set { _romTypes = value; }
}
public List<string> CRCs
{
get { return _crcs; }
set { _crcs = value; }
}
public List<string> MD5s
{
get { return _md5s; }
set { _md5s = value; }
}
public List<string> SHA1s
{
get { return _sha1s; }
set { _sha1s = value; }
}
public List<string> SHA256s
{
get { return _sha256s; }
set { _sha256s = value; }
}
public List<string> SHA384s
{
get { return _sha384s; }
set { _sha384s = value; }
}
public List<string> SHA512s
{
get { return _sha512s; }
set { _sha512s = value; }
}
public ItemStatus ItemStatuses
{
get { return _itemStatuses; }
set { _itemStatuses = value; }
}
public MachineType MachineTypes
{
get { return _machineTypes; }
set { _machineTypes = value; }
}
#endregion
#region Negative
public List<string> NotGameNames
{
get { return _notGameNames; }
set { _notGameNames = value; }
}
public List<string> NotRomNames
{
get { return _notRomNames; }
set { _notRomNames = value; }
}
public List<string> NotRomTypes
{
get { return _notRomTypes; }
set { _notRomTypes = value; }
}
public List<string> NotCRCs
{
get { return _notCrcs; }
set { _notCrcs = value; }
}
public List<string> NotMD5s
{
get { return _notMd5s; }
set { _notMd5s = value; }
}
public List<string> NotSHA1s
{
get { return _notSha1s; }
set { _notSha1s = value; }
}
public List<string> NotSHA256s
{
get { return _notSha256s; }
set { _notSha256s = value; }
}
public List<string> NotSHA384s
{
get { return _notSha384s; }
set { _notSha384s = value; }
}
public List<string> NotSHA512s
{
get { return _notSha512s; }
set { _notSha512s = value; }
}
public ItemStatus NotItemStatuses
{
get { return _itemNotStatuses; }
set { _itemNotStatuses = value; }
}
public MachineType NotMachineTypes
{
get { return _machineNotTypes; }
set { _machineNotTypes = value; }
}
#endregion
#region Neutral
public long SizeGreaterThanOrEqual
{
get { return _sizeGreaterThanOrEqual; }
set { _sizeGreaterThanOrEqual = value; }
}
public long SizeLessThanOrEqual
{
get { return _sizeLessThanOrEqual; }
set { _sizeLessThanOrEqual = value; }
}
public long SizeEqualTo
{
get { return _sizeEqualTo; }
set { _sizeEqualTo = value; }
}
public bool IncludeOfInGame
{
get { return _includeOfInGame; }
set { _includeOfInGame = value; }
}
public bool? Runnable
{
get { return _runnable; }
set { _runnable = value; }
}
#endregion
#endregion // Pubically facing variables
#region Constructors
/// <summary>
/// Create an empty Filter object
/// </summary>
public Filter()
{
// Positive
_gameNames = new List<string>();
_romNames = new List<string>();
_romTypes = new List<string>();
_crcs = new List<string>();
_md5s = new List<string>();
_sha1s = new List<string>();
_sha256s = new List<string>();
_sha384s = new List<string>();
_sha512s = new List<string>();
_itemStatuses = ItemStatus.NULL;
_machineTypes = MachineType.NULL;
// Negative
_notGameNames = new List<string>();
_notRomNames = new List<string>();
_notRomTypes = new List<string>();
_notCrcs = new List<string>();
_notMd5s = new List<string>();
_notSha1s = new List<string>();
_notSha256s = new List<string>();
_notSha384s = new List<string>();
_notSha512s = new List<string>();
_itemNotStatuses = ItemStatus.NULL;
_machineNotTypes = MachineType.NULL;
// Neutral
_sizeGreaterThanOrEqual = -1;
_sizeLessThanOrEqual = -1;
_sizeEqualTo = -1;
_includeOfInGame = false;
_runnable = null;
}
#endregion
#region Instance methods
/// <summary>
/// Check to see if a DatItem passes the filter
/// </summary>
/// <param name="item">DatItem to check</param>
/// <returns>True if the file passed the filter, false otherwise</returns>
public bool ItemPasses(DatItem item)
{
// If the item is null, we automatically fail it
if (item == null)
{
return false;
}
// If the item's machine is null, we automatically fail it
if (item.Machine == null)
{
return false;
}
// Filter on machine type
if (_machineTypes != MachineType.NULL && (item.Machine.MachineType & _machineTypes) == 0)
{
return false;
}
if (_machineNotTypes != MachineType.NULL && (item.Machine.MachineType & _machineNotTypes) != 0)
{
return false;
}
// Filter on machine runability
if (_runnable != null && item.Machine.Runnable != _runnable)
{
return false;
}
// Take care of Rom and Disk specific differences
if (item.Type == ItemType.Rom)
{
Rom rom = (Rom)item;
// Filter on status
if (_itemStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemStatuses) == 0)
{
return false;
}
if (_itemNotStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemNotStatuses) != 0)
{
return false;
}
// Filter on rom size
if (_sizeEqualTo != -1 && rom.Size != _sizeEqualTo)
{
return false;
}
else
{
if (_sizeGreaterThanOrEqual != -1 && rom.Size < _sizeGreaterThanOrEqual)
{
return false;
}
if (_sizeLessThanOrEqual != -1 && rom.Size > _sizeLessThanOrEqual)
{
return false;
}
}
// Filter on CRC
if (_crcs.Count > 0)
{
// If the CRC isn't in the list, return false
if (!FindValueInList(_crcs, rom.CRC))
{
return false;
}
}
if (_notCrcs.Count > 0)
{
// If the CRC is in the list, return false
if (FindValueInList(_notCrcs, rom.CRC))
{
return false;
}
}
// Filter on MD5
if (_md5s.Count > 0)
{
// If the MD5 isn't in the list, return false
if (!FindValueInList(_md5s, rom.MD5))
{
return false;
}
}
if (_notMd5s.Count > 0)
{
// If the MD5 is in the list, return false
if (FindValueInList(_notMd5s, rom.MD5))
{
return false;
}
}
// Filter on SHA1
if (_sha1s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha1s, rom.SHA1))
{
return false;
}
}
if (_notSha1s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha1s, rom.SHA1))
{
return false;
}
}
// Filter on SHA256
if (_sha256s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha256s, rom.SHA256))
{
return false;
}
}
if (_notSha256s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha256s, rom.SHA256))
{
return false;
}
}
// Filter on SHA384
if (_sha384s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha384s, rom.SHA384))
{
return false;
}
}
if (_notSha384s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha384s, rom.SHA384))
{
return false;
}
}
// Filter on SHA512
if (_sha512s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha512s, rom.SHA512))
{
return false;
}
}
if (_notSha512s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha512s, rom.SHA512))
{
return false;
}
}
}
else if (item.Type == ItemType.Disk)
{
Disk rom = (Disk)item;
// Filter on status
if (_itemStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemStatuses) == 0)
{
return false;
}
if (_itemNotStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemNotStatuses) != 0)
{
return false;
}
// Filter on MD5
if (_md5s.Count > 0)
{
// If the MD5 isn't in the list, return false
if (!FindValueInList(_md5s, rom.MD5))
{
return false;
}
}
if (_notMd5s.Count > 0)
{
// If the MD5 is in the list, return false
if (FindValueInList(_notMd5s, rom.MD5))
{
return false;
}
}
// Filter on SHA1
if (_sha1s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha1s, rom.SHA1))
{
return false;
}
}
if (_notSha1s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha1s, rom.SHA1))
{
return false;
}
}
// Filter on SHA256
if (_sha256s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha256s, rom.SHA256))
{
return false;
}
}
if (_notSha256s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha256s, rom.SHA256))
{
return false;
}
}
// Filter on SHA384
if (_sha384s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha384s, rom.SHA384))
{
return false;
}
}
if (_notSha384s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha384s, rom.SHA384))
{
return false;
}
}
// Filter on SHA512
if (_sha512s.Count > 0)
{
// If the SHA-1 isn't in the list, return false
if (!FindValueInList(_sha512s, rom.SHA512))
{
return false;
}
}
if (_notSha512s.Count > 0)
{
// If the SHA-1 is in the list, return false
if (FindValueInList(_notSha512s, rom.SHA512))
{
return false;
}
}
}
// Filter on game name
if (_gameNames.Count > 0)
{
bool found = FindValueInList(_gameNames, item.Machine.Name);
// If we are checking CloneOf and RomOf, add them in as well
if (_includeOfInGame)
{
found |= FindValueInList(_gameNames, item.Machine.CloneOf);
found |= FindValueInList(_gameNames, item.Machine.RomOf);
}
// If the game name was not found in the list, return false
if (!found)
{
return false;
}
}
if (_notGameNames.Count > 0)
{
bool found = FindValueInList(_gameNames, item.Machine.Name);
// If we are checking CloneOf and RomOf, add them in as well
if (_includeOfInGame)
{
found |= FindValueInList(_gameNames, item.Machine.CloneOf);
found |= FindValueInList(_gameNames, item.Machine.RomOf);
}
// If the game name was found in the list, return false
if (found)
{
return false;
}
}
// Filter on rom name
if (_romNames.Count > 0)
{
// If the rom name was not found in the list, return false
if (!FindValueInList(_romNames, item.Name))
{
return false;
}
}
if (_notRomNames.Count > 0)
{
// If the rom name was found in the list, return false
if (FindValueInList(_notRomNames, item.Name))
{
return false;
}
}
// Filter on rom type
if (_romTypes.Count == 0 && _notRomTypes.Count == 0 && item.Type != ItemType.Rom && item.Type != ItemType.Disk)
{
return false;
}
if (_romTypes.Count > 0)
{
// If the rom type was not found in the list, return false
if (!FindValueInList(_romTypes, item.Type.ToString()))
{
return false;
}
}
if (_notRomTypes.Count > 0)
{
// If the rom type was found in the list, return false
if (FindValueInList(_notRomTypes, item.Type.ToString()))
{
return false;
}
}
return true;
}
/// <summary>
/// Generic code to check if a specific value is in the list given
/// </summary>
/// <param name="haystack">List to search for the value in</param>
/// <param name="needle">Value to search the list for</param>
/// <returns>True if the value could be found, false otherwise</returns>
private bool FindValueInList(List<string> haystack, string needle)
{
bool found = false;
foreach (string straw in haystack)
{
if (!String.IsNullOrEmpty(straw))
{
string regexStraw = straw;
// If the straw has no special characters at all, treat it as an exact match
if (regexStraw == Regex.Escape(regexStraw))
{
regexStraw = "^" + regexStraw + "$";
}
// Check if a match is found with the regex
found |= Regex.IsMatch(needle, regexStraw, RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
}
return found;
}
#endregion
#region Static methods
/// <summary>
/// Get the machine type from a string
/// </summary>
/// <param name="gametype">Machine type as a string</param>
/// <returns>A machine type based on the input</returns>
public static MachineType GetMachineTypeFromString(string gametype)
{
MachineType machineType = MachineType.NULL;
switch (gametype.ToLowerInvariant())
{
case "none":
machineType |= MachineType.None;
break;
case "bios":
machineType |= MachineType.Bios;
break;
case "dev":
case "device":
machineType |= MachineType.Device;
break;
case "mech":
case "mechanical":
machineType |= MachineType.Mechanical;
break;
default:
Globals.Logger.Warning(gametype + " is not a valid type");
break;
}
return machineType;
}
/// <summary>
/// Get the item status from a string
/// </summary>
/// <param name="status">Item status as a string</param>
/// <returns>An item status based on the input</returns>
public static ItemStatus GetStatusFromString(string status)
{
ItemStatus itemStatus = ItemStatus.NULL;
switch (status.ToLowerInvariant())
{
case "none":
itemStatus |= ItemStatus.None;
break;
case "good":
itemStatus |= ItemStatus.Good;
break;
case "baddump":
itemStatus |= ItemStatus.BadDump;
break;
case "nodump":
itemStatus |= ItemStatus.Nodump;
break;
case "verified":
itemStatus |= ItemStatus.Verified;
break;
default:
Globals.Logger.Warning(status + " is not a valid status");
break;
}
return itemStatus;
}
#endregion
}
}

View File

@@ -0,0 +1,157 @@
using System;
using System.Collections.Generic;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Machine : ICloneable
{
#region Protected instance variables
// Machine information
protected string _name;
protected string _comment;
protected string _description;
protected string _year;
protected string _manufacturer;
protected string _romOf;
protected string _cloneOf;
protected string _sampleOf;
protected string _sourceFile;
protected bool? _runnable;
protected string _board;
protected string _rebuildTo;
protected List<string> _devices;
protected MachineType _machineType;
#endregion
#region Publicly facing variables
// Machine information
public string Name
{
get { return _name; }
set { _name = value; }
}
public string Comment
{
get { return _comment; }
set { _comment = value; }
}
public string Description
{
get { return _description; }
set { _description = value; }
}
public string Year
{
get { return _year; }
set { _year = value; }
}
public string Manufacturer
{
get { return _manufacturer; }
set { _manufacturer = value; }
}
public string RomOf
{
get { return _romOf; }
set { _romOf = value; }
}
public string CloneOf
{
get { return _cloneOf; }
set { _cloneOf = value; }
}
public string SampleOf
{
get { return _sampleOf; }
set { _sampleOf = value; }
}
public string SourceFile
{
get { return _sourceFile; }
set { _sourceFile = value; }
}
public bool? Runnable
{
get { return _runnable; }
set { _runnable = value; }
}
public string Board
{
get { return _board; }
set { _board = value; }
}
public string RebuildTo
{
get { return _rebuildTo; }
set { _rebuildTo = value; }
}
public List<string> Devices
{
get { return _devices; }
set { _devices = value; }
}
public MachineType MachineType
{
get { return _machineType; }
set { _machineType = value; }
}
#endregion
#region Constructors
/// <summary>
/// Create a default, empty Machine object
/// </summary>
public Machine()
{
_name = "";
_description = "";
_runnable = null;
}
/// <summary>
/// Create a new Machine object with the included information
/// </summary>
/// <param name="name">Name of the machine</param>
/// <param name="description">Description of the machine</param>
public Machine(string name, string description)
{
_name = name;
_description = description;
_runnable = null;
}
#endregion
#region Cloneing
public object Clone()
{
return new Machine()
{
Name = _name,
Comment = _comment,
Description = _description,
Year = _year,
Manufacturer = _manufacturer,
RomOf = _romOf,
CloneOf = _cloneOf,
SampleOf = _sampleOf,
SourceFile = _sourceFile,
Runnable = _runnable,
Board = _board,
RebuildTo = _rebuildTo,
Devices = _devices,
MachineType = _machineType,
};
}
#endregion
}
}

View File

@@ -0,0 +1,539 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using SearchOption = System.IO.SearchOption;
#endif
using NaturalSort;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Converting and Updating
/// <summary>
/// Determine if input files should be merged, diffed, or processed invidually
/// </summary>
/// <param name="inputPaths">Names of the input files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
{
// If we're in merging or diffing mode, use the full list of inputs
if (merge || diff != 0)
{
// Make sure there are no folders in inputs
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
// If we're in inverse cascade, reverse the list
if ((diff & DiffMode.ReverseCascade) != 0)
{
newInputFileNames.Reverse();
}
// Create a dictionary of all ROMs from the input DATs
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean,
remUnicode, descAsName, outDir, filter, splitType, trim, single, root);
// Modify the Dictionary if necessary and output the results
if (diff != 0 && diff < DiffMode.Cascade)
{
DiffNoCascade(diff, outDir, newInputFileNames);
}
// If we're in cascade and diff, output only cascaded diffs
else if (diff != 0 && diff >= DiffMode.Cascade)
{
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip);
}
// Output all entries with user-defined merge
else
{
MergeNoDiff(outDir, newInputFileNames, datHeaders);
}
}
// Otherwise, loop through all of the inputs individually
else
{
Update(inputPaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
}
return;
}
/// <summary>
/// Populate the user DatData object from the input files
/// </summary>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
/// <returns>List of DatData objects representing headers</returns>
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
string outDir, Filter filter, SplitType splitType, bool trim, bool single, string root)
{
DatFile[] datHeaders = new DatFile[inputs.Count];
DateTime start = DateTime.Now;
Globals.Logger.User("Processing individual DATs");
// Parse all of the DATs into their own DatFiles in the array
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
string input = inputs[i];
Globals.Logger.User("Adding DAT: " + input.Split('¬')[0]);
datHeaders[i] = new DatFile
{
DatFormat = (DatFormat != 0 ? DatFormat : 0),
MergeRoms = MergeRoms,
};
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, true, clean, descAsName);
});
Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
Globals.Logger.User("Populating internal DAT");
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
{
// Get the list of keys from the DAT
List<string> keys = datHeaders[i].Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Add everything from the key to the internal DAT
AddRange(key, datHeaders[i][key]);
// Now remove the key from the source DAT
lock (datHeaders)
{
datHeaders[i].Remove(key);
}
});
// Now remove the file dictionary from the souce DAT to save memory
datHeaders[i].Delete();
});
// Now that we have a merged DAT, filter it
Filter(filter, single, trim, root);
Globals.Logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
return datHeaders.ToList();
}
/// <summary>
/// Output non-cascading diffs
/// </summary>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs)
{
DateTime start = DateTime.Now;
Globals.Logger.User("Initializing all output DATs");
// Default vars for use
string post = "";
DatFile outerDiffData = new DatFile();
DatFile dupeData = new DatFile();
// Fill in any information not in the base DAT
if (String.IsNullOrEmpty(_fileName))
{
_fileName = "All DATs";
}
if (String.IsNullOrEmpty(_name))
{
_name = "All DATs";
}
if (String.IsNullOrEmpty(_description))
{
_description = "All DATs";
}
// Don't have External dupes
if ((diff & DiffMode.NoDupes) != 0)
{
post = " (No Duplicates)";
outerDiffData = new DatFile(this);
outerDiffData.FileName += post;
outerDiffData.Name += post;
outerDiffData.Description += post;
outerDiffData.Reset();
}
// Have External dupes
if ((diff & DiffMode.Dupes) != 0)
{
post = " (Duplicates)";
dupeData = new DatFile(this);
dupeData.FileName += post;
dupeData.Name += post;
dupeData.Description += post;
dupeData.Reset();
}
// Create a list of DatData objects representing individual output files
List<DatFile> outDats = new List<DatFile>();
// Loop through each of the inputs and get or create a new DatData object
if ((diff & DiffMode.Individuals) != 0)
{
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
DatFile diffData = new DatFile(this);
diffData.FileName += innerpost;
diffData.Name += innerpost;
diffData.Description += innerpost;
diffData.Reset();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
}
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now;
Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
// Loop through and add the items correctly
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// No duplicates
if ((diff & DiffMode.NoDupes) != 0 || (diff & DiffMode.Individuals) != 0)
{
if ((item.Dupe & DupeType.Internal) != 0)
{
// Individual DATs that are output
if ((diff & DiffMode.Individuals) != 0)
{
outDats[item.SystemID].Add(key, item);
}
// Merged no-duplicates DAT
if ((diff & DiffMode.NoDupes) != 0)
{
DatItem newrom = item;
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
outerDiffData.Add(key, newrom);
}
}
}
// Duplicates only
if ((diff & DiffMode.Dupes) != 0)
{
if ((item.Dupe & DupeType.External) != 0)
{
DatItem newrom = item;
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
dupeData.Add(key, newrom);
}
}
});
});
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs
start = DateTime.Now;
Globals.Logger.User("Outputting all created DATs");
// Output the difflist (a-b)+(b-a) diff
if ((diff & DiffMode.NoDupes) != 0)
{
outerDiffData.WriteToFile(outDir);
}
// Output the (ab) diff
if ((diff & DiffMode.Dupes) != 0)
{
dupeData.WriteToFile(outDir);
}
// Output the individual (a-b) DATs
if ((diff & DiffMode.Individuals) != 0)
{
Parallel.For(0, inputs.Count, j =>
{
// If we have an output directory set, replace the path
string[] split = inputs[j].Split('¬');
string path = outDir + (split[0] == split[1]
? Path.GetFileName(split[0])
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
// Try to output the file
outDats[j].WriteToFile(path);
});
}
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
}
/// <summary>
/// Output cascading diffs
/// </summary>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
{
string post = "";
// Create a list of DatData objects representing output files
List<DatFile> outDats = new List<DatFile>();
// Loop through each of the inputs and get or create a new DatData object
DateTime start = DateTime.Now;
Globals.Logger.User("Initializing all output DATs");
DatFile[] outDatsArray = new DatFile[inputs.Count];
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
{
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
DatFile diffData;
// If we're in inplace mode, take the appropriate DatData object already stored
if (inplace || !String.IsNullOrEmpty(outDir))
{
diffData = datHeaders[j];
}
else
{
diffData = new DatFile(this);
diffData.FileName += post;
diffData.Name += post;
diffData.Description += post;
}
diffData.Reset();
outDatsArray[j] = diffData;
});
outDats = outDatsArray.ToList();
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Now, loop through the dictionary and populate the correct DATs
start = DateTime.Now;
Globals.Logger.User("Populating all output DATs");
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(this[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
{
return;
}
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// There's odd cases where there are items with System ID < 0. Skip them for now
if (item.SystemID < 0)
{
Globals.Logger.Warning("Item found with a <0 SystemID: " + item.Name);
return;
}
outDats[item.SystemID].Add(key, item);
});
});
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
// Finally, loop through and output each of the DATs
start = DateTime.Now;
Globals.Logger.User("Outputting all created DATs");
Parallel.For((skip ? 1 : 0), inputs.Count, j =>
{
// If we have an output directory set, replace the path
string path = "";
if (inplace)
{
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
}
else if (!String.IsNullOrEmpty(outDir))
{
string[] split = inputs[j].Split('¬');
path = outDir + (split[0] == split[1]
? Path.GetFileName(split[0])
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
}
// Try to output the file
outDats[j].WriteToFile(path);
});
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
}
/// <summary>
/// Output user defined merge
/// </summary>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders)
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if (Type == "SuperDAT")
{
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key].ToList();
List<DatItem> newItems = new List<DatItem>();
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
DatItem newItem = item;
string filename = inputs[newItem.SystemID].Split('¬')[0];
string rootpath = inputs[newItem.SystemID].Split('¬')[1];
rootpath += (rootpath == "" ? "" : Path.DirectorySeparatorChar.ToString());
filename = filename.Remove(0, rootpath.Length);
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
+ newItem.Machine.Name;
lock (newItems)
{
newItems.Add(newItem);
}
});
Remove(key);
AddRange(key, newItems);
});
}
// Try to output the file
WriteToFile(outDir);
}
/// <summary>
/// Convert, update, and filter a DAT file or set of files using a base
/// </summary>
/// <param name="inputFileNames">Names of the input files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
public void Update(List<string> inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
Filter filter, SplitType splitType, bool trim, bool single, string root)
{
Parallel.ForEach(inputFileNames, Globals.ParallelOptions, inputFileName =>
{
// Clean the input string
if (inputFileName != "")
{
inputFileName = Path.GetFullPath(inputFileName);
}
if (File.Exists(inputFileName))
{
// If inplace is set, override the output dir
string realOutDir = outDir;
if (inplace)
{
realOutDir = Path.GetDirectoryName(inputFileName);
}
DatFile innerDatdata = new DatFile(this);
Globals.Logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
innerDatdata.Parse(inputFileName, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
innerDatdata.Filter(filter, trim, single, root);
// Try to output the file
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(inputFileName) : realOutDir), overwrite: (realOutDir != ""));
}
else if (Directory.Exists(inputFileName))
{
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
// If inplace is set, override the output dir
string realOutDir = outDir;
if (inplace)
{
realOutDir = Path.GetDirectoryName(inputFileName);
}
List<string> subFiles = Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(subFiles, Globals.ParallelOptions, file =>
{
Globals.Logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
DatFile innerDatdata = new DatFile(this);
innerDatdata.Parse(file, 0, 0, splitType, true, clean, descAsName,
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
innerDatdata.Filter(filter, trim, single, root);
// Try to output the file
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(file) : realOutDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
overwrite: (realOutDir != ""));
});
}
else
{
Globals.Logger.Error("I'm sorry but " + inputFileName + " doesn't exist!");
return;
}
});
}
#endregion
}
}

View File

@@ -0,0 +1,433 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using IOException = System.IO.IOException;
using SearchOption = System.IO.SearchOption;
#endif
using SharpCompress.Common;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Populate DAT from Directory [MODULAR DONE, FOR NOW]
/// <summary>
/// Create a new Dat from a directory
/// </summary>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, bool enableGzip,
SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
{
// If the description is defined but not the name, set the name from the description
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
{
Name = Description;
}
// If the name is defined but not the description, set the description from the name
else if (!String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
{
Description = Name + (bare ? "" : " (" + Date + ")");
}
// If neither the name or description are defined, set them from the automatic values
else if (String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
{
Name = basePath.Split(Path.DirectorySeparatorChar).Last();
Description = Name + (bare ? "" : " (" + Date + ")");
}
// Process the input
if (Directory.Exists(basePath))
{
Globals.Logger.Verbose("Folder found: " + basePath);
// Process the files in the main folder
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
});
// Find all top-level subfolders
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(subfiles, Globals.ParallelOptions, subitem =>
{
PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
});
});
// Now find all folders that are empty, if we are supposed to
if (!Romba && addBlanks)
{
List<string> empties = FileTools.GetEmptyDirectories(basePath).ToList();
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
{
// Get the full path for the directory
string fulldir = Path.GetFullPath(dir);
// Set the temporary variables
string gamename = "";
string romname = "";
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = fulldir.Remove(0, basePath.Length + 1);
romname = "_";
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
}
// Sanitize the names
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
Globals.Logger.Verbose("Adding blank empty folder: " + gamename);
this["null"].Add(new Rom(romname, gamename, omitFromScan));
});
}
}
else if (File.Exists(basePath))
{
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip,
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
}
// Now that we're done, delete the temp folder (if it's not the default)
Globals.Logger.User("Cleaning temp folder");
if (tempDir != Path.GetTempPath())
{
FileTools.TryDeleteDirectory(tempDir);
}
return true;
}
/// <summary>
/// Check a given file for hashes, based on current settings
/// </summary>
/// <param name="item">Filename of the item to be checked</param>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
bool enableGzip, SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
{
// Define the temporary directory
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if (Romba)
{
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item);
// If the rom is valid, write it out
if (rom != null && rom.Name != null)
{
// Add the list if it doesn't exist already
Add(rom.Size + "-" + rom.CRC, rom);
Globals.Logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
}
else
{
Globals.Logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
return;
}
return;
}
// If we're copying files, copy it first and get the new filename
string newItem = item;
string newBasePath = basePath;
if (copyFiles)
{
newBasePath = Path.Combine(tempDir, Path.GetRandomFileName());
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
Directory.CreateDirectory(Path.GetDirectoryName(newItem));
File.Copy(item, newItem, true);
}
// Create a list for all found items
List<Rom> extracted = null;
// Temporarily set the archivesAsFiles if we have a GZip archive and we're not supposed to use it as one
if (archivesAsFiles && !enableGzip && newItem.EndsWith(".gz"))
{
archivesAsFiles = false;
}
// If we don't have archives as files, try to scan the file as an archive
if (!archivesAsFiles)
{
// If all deep hash skip flags are set, do a quickscan
if (omitFromScan == Hash.SecureHashes)
{
extracted = ArchiveTools.GetArchiveFileInfo(newItem, date: addDate);
}
// Otherwise, get the list with whatever hashes are wanted
else
{
extracted = ArchiveTools.GetExtendedArchiveFileInfo(newItem, omitFromScan: omitFromScan, date: addDate);
}
}
// If the file should be skipped based on type, do so now
if ((extracted != null && skipFileType == SkipFileType.Archive)
|| (extracted == null && skipFileType == SkipFileType.File))
{
return;
}
// If the extracted list is null, just scan the item itself
if (extracted == null || archivesAsFiles)
{
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
}
// Otherwise, add all of the found items
else
{
// First take care of the found items
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
{
PopulateFromDirProcessFileHelper(newItem,
rom,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
// Then, if we're looking for blanks, get all of the blank folders and add them
if (addBlanks)
{
List<string> empties = ArchiveTools.GetEmptyFoldersInArchive(newItem);
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
{
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
PopulateFromDirProcessFileHelper(newItem,
emptyRom,
basePath,
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
});
}
}
// Cue to delete the file if it's a copy
if (copyFiles && item != newItem)
{
FileTools.TryDeleteDirectory(newBasePath);
}
// Delete the sub temp directory
FileTools.TryDeleteDirectory(tempSubDir);
}
/// <summary>
/// Process a single file as a file
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="parent">Parent game to be used</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan,
bool addDate, string headerToCheckAgainst)
{
Globals.Logger.Verbose(Path.GetFileName(item) + " treated like a file");
Rom rom = FileTools.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst);
PopulateFromDirProcessFileHelper(item, rom, basePath, parent);
}
/// <summary>
/// Process a single file as a file (with found Rom data)
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
{
// If the datItem isn't a Rom or Disk, return
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
{
return;
}
string key = "";
if (datItem.Type == ItemType.Rom)
{
key = ((Rom)datItem).Size + "-" + ((Rom)datItem).CRC;
}
else
{
key = ((Disk)datItem).MD5;
}
// Add the list if it doesn't exist already
Add(key);
try
{
// If the basepath ends with a directory separator, remove it
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
basepath += Path.DirectorySeparatorChar.ToString();
}
// Make sure we have the full item path
item = Path.GetFullPath(item);
// Get the data to be added as game and item names
string gamename = "";
string romname = "";
// If the parent is blank, then we have a non-archive file
if (parent == "")
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if (Type == "SuperDAT")
{
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
romname = Path.GetFileName(item);
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
}
}
// Otherwise, we assume that we have an archive
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
if (Type == "SuperDAT")
{
gamename = parent;
romname = datItem.Name;
}
// Otherwise, we want the archive name as the game, and the file as everything else
else
{
gamename = parent;
romname = datItem.Name;
}
}
// Sanitize the names
if (romname == null)
{
romname = "";
}
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(1);
}
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
gamename = gamename.Substring(0, gamename.Length - 1);
}
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(1);
}
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
{
romname = romname.Substring(0, romname.Length - 1);
}
if (!String.IsNullOrEmpty(gamename) && String.IsNullOrEmpty(romname))
{
romname = gamename;
gamename = "Default";
}
// Update rom information
datItem.Name = romname;
if (datItem.Machine == null)
{
datItem.Machine = new Machine
{
Name = gamename,
Description = gamename,
};
}
else
{
datItem.Machine.Name = gamename;
datItem.Machine.Description = gamename;
}
// Add the file information to the DAT
Add(key, datItem);
Globals.Logger.User("File added: " + romname + Environment.NewLine);
}
catch (IOException ex)
{
Globals.Logger.Error(ex.ToString());
return;
}
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,638 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
#endif
using NaturalSort;
namespace SabreTools.Helper.Dats
{
public partial class DatFile
{
#region Splitting
/// <summary>
/// Split a DAT by input extensions
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB)
{
// Make sure all of the extensions have a dot at the beginning
List<string> newExtA = new List<string>();
Parallel.ForEach(extA, Globals.ParallelOptions, s =>
{
lock (newExtA)
{
newExtA.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
}
});
string newExtAString = string.Join(",", newExtA);
List<string> newExtB = new List<string>();
Parallel.ForEach(extB, Globals.ParallelOptions, s =>
{
lock (newExtB)
{
newExtB.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
}
});
string newExtBString = string.Join(",", newExtB);
// Set all of the appropriate outputs for each of the subsets
DatFile datdataA = new DatFile
{
FileName = this.FileName + " (" + newExtAString + ")",
Name = this.Name + " (" + newExtAString + ")",
Description = this.Description + " (" + newExtAString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
DatFile datdataB = new DatFile
{
FileName = this.FileName + " (" + newExtBString + ")",
Name = this.Name + " (" + newExtBString + ")",
Description = this.Description + " (" + newExtBString + ")",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
DatFormat = this.DatFormat,
};
// If roms is empty, return false
if (Count == 0)
{
return false;
}
// Now separate the roms accordingly
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
if (newExtA.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
{
datdataA.Add(key, item);
}
else if (newExtB.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
{
datdataB.Add(key, item);
}
else
{
datdataA.Add(key, item);
datdataB.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Then write out both files
bool success = datdataA.WriteToFile(outDir);
success &= datdataB.WriteToFile(outDir);
return success;
}
/// <summary>
/// Split a DAT by best available hashes
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByHash(string outDir, string basepath)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile nodump = new DatFile
{
FileName = this.FileName + " (Nodump)",
Name = this.Name + " (Nodump)",
Description = this.Description + " (Nodump)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha512 = new DatFile
{
FileName = this.FileName + " (SHA-512)",
Name = this.Name + " (SHA-512)",
Description = this.Description + " (SHA-512)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha384 = new DatFile
{
FileName = this.FileName + " (SHA-384)",
Name = this.Name + " (SHA-384)",
Description = this.Description + " (SHA-384)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha256 = new DatFile
{
FileName = this.FileName + " (SHA-256)",
Name = this.Name + " (SHA-256)",
Description = this.Description + " (SHA-256)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sha1 = new DatFile
{
FileName = this.FileName + " (SHA-1)",
Name = this.Name + " (SHA-1)",
Description = this.Description + " (SHA-1)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile md5 = new DatFile
{
FileName = this.FileName + " (MD5)",
Name = this.Name + " (MD5)",
Description = this.Description + " (MD5)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile crc = new DatFile
{
FileName = this.FileName + " (CRC)",
Name = this.Name + " (CRC)",
Description = this.Description + " (CRC)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile other = new DatFile
{
FileName = this.FileName + " (Other)",
Name = this.Name + " (Other)",
Description = this.Description + " (Other)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
// Now populate each of the DAT objects in turn
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// If the file is not a Rom or Disk, continue
if (item.Type != ItemType.Disk && item.Type != ItemType.Rom)
{
return;
}
// If the file is a nodump
if ((item.Type == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|| (item.Type == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
{
nodump.Add(key, item);
}
// If the file has a SHA-512
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA512))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA512)))
{
sha512.Add(key, item);
}
// If the file has a SHA-384
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA384))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA384)))
{
sha384.Add(key, item);
}
// If the file has a SHA-256
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA256))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA256)))
{
sha256.Add(key, item);
}
// If the file has a SHA-1
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
{
sha1.Add(key, item);
}
// If the file has no SHA-1 but has an MD5
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).MD5))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).MD5)))
{
md5.Add(key, item);
}
// If the file has no MD5 but a CRC
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
{
crc.Add(key, item);
}
else
{
other.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= nodump.WriteToFile(outDir);
success &= sha512.WriteToFile(outDir);
success &= sha384.WriteToFile(outDir);
success &= sha256.WriteToFile(outDir);
success &= sha1.WriteToFile(outDir);
success &= md5.WriteToFile(outDir);
success &= crc.WriteToFile(outDir);
return success;
}
/// <summary>
/// Split a SuperDAT by lowest available directory level
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// First, organize by games so that we can do the right thing
BucketBy(SortedBy.Game, false /* mergeroms */, lower: false, norename: true);
// Create a temporary DAT to add things to
DatFile tempDat = new DatFile(this)
{
Name = null,
};
// Sort the input keys
List<string> keys = Keys.ToList();
keys.Sort(SplitByLevelSort);
// Then, we loop over the games
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Here, the key is the name of the game to be used for comparison
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
{
// Process and output the DAT
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
// Reset the DAT for the next items
tempDat = new DatFile(this)
{
Name = null,
};
}
// Clean the input list and set all games to be pathless
List<DatItem> items = this[key];
items.ForEach(item => item.Machine.Name = Style.GetFileName(item.Machine.Name));
items.ForEach(item => item.Machine.Description = Style.GetFileName(item.Machine.Description));
// Now add the game to the output DAT
tempDat.AddRange(key, items);
// Then set the DAT name to be the parent directory name
tempDat.Name = Style.GetDirectoryName(key);
});
// Then we write the last DAT out since it would be skipped otherwise
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
return true;
}
/// <summary>
/// Helper function for SplitByLevel to sort the input game names
/// </summary>
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
int adeep = a.Count(c => c == '/' || c == '\\');
int bdeep = b.Count(c => c == '/' || c == '\\');
if (adeep == bdeep)
{
return nc.Compare(a, b);
}
return adeep - bdeep;
}
/// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT
/// </summary>
/// <param name="datFile">DAT to clean and write out</param>
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
{
// Get the name from the DAT to use separately
string name = datFile.Name;
string expName = name.Replace("/", " - ").Replace("\\", " - ");
// Get the path that the file will be written out to
string path = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
? outDir
: Path.Combine(outDir, name));
// Now set the new output values
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
? FileName
: (shortname
? Style.GetFileName(name)
: expName
)
);
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
datFile.Name = Name + " (" + expName + ")";
datFile.Description = (String.IsNullOrEmpty(Description) ? datFile.Name : Description + " (" + expName + ")");
datFile.Type = null;
// Write out the temporary DAT to the proper directory
datFile.WriteToFile(path);
}
/// <summary>
/// Split a DAT by type of Rom
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="basepath">Parent path for replacement</param>
/// <returns>True if split succeeded, false otherwise</returns>
public bool SplitByType(string outDir, string basepath)
{
// Sanitize the basepath to be more predictable
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
// Create each of the respective output DATs
Globals.Logger.User("Creating and populating new DATs");
DatFile romdat = new DatFile
{
FileName = this.FileName + " (ROM)",
Name = this.Name + " (ROM)",
Description = this.Description + " (ROM)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile diskdat = new DatFile
{
FileName = this.FileName + " (Disk)",
Name = this.Name + " (Disk)",
Description = this.Description + " (Disk)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
DatFile sampledat = new DatFile
{
FileName = this.FileName + " (Sample)",
Name = this.Name + " (Sample)",
Description = this.Description + " (Sample)",
Category = this.Category,
Version = this.Version,
Date = this.Date,
Author = this.Author,
Email = this.Email,
Homepage = this.Homepage,
Url = this.Url,
Comment = this.Comment,
Header = this.Header,
Type = this.Type,
ForceMerging = this.ForceMerging,
ForceNodump = this.ForceNodump,
ForcePacking = this.ForcePacking,
DatFormat = this.DatFormat,
MergeRoms = this.MergeRoms,
};
// Now populate each of the DAT objects in turn
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
// If the file is a Rom
if (item.Type == ItemType.Rom)
{
romdat.Add(key, item);
}
// If the file is a Disk
else if (item.Type == ItemType.Disk)
{
diskdat.Add(key, item);
}
// If the file is a Sample
else if (item.Type == ItemType.Sample)
{
sampledat.Add(key, item);
}
});
});
// Get the output directory
if (outDir != "")
{
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
}
else
{
outDir = Path.GetDirectoryName(this.FileName);
}
// Now, output all of the files to the output directory
Globals.Logger.User("DAT information created, outputting new files");
bool success = true;
success &= romdat.WriteToFile(outDir);
success &= diskdat.WriteToFile(outDir);
success &= sampledat.WriteToFile(outDir);
return success;
}
#endregion
}
}

View File

@@ -0,0 +1,752 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Web;
using SabreTools.Helper.Data;
using SabreTools.Helper.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using SearchOption = System.IO.SearchOption;
using StreamWriter = System.IO.StreamWriter;
#endif
namespace SabreTools.Helper.Dats
{
/*
* TODO: Make output standard width (HTML, without making the entire thing a table)
* TODO: Multithreading? Either StringBuilder or locking
*/
public partial class DatFile
{
#region Instance Methods
#region Statistics
/// <summary>
/// Recalculate the statistics for the Dat
/// </summary>
public void RecalculateStats()
{
// Wipe out any stats already there
RomCount = 0;
DiskCount = 0;
TotalSize = 0;
CRCCount = 0;
MD5Count = 0;
SHA1Count = 0;
SHA256Count = 0;
BaddumpCount = 0;
NodumpCount = 0;
// If we have a blank Dat in any way, return
if (this == null || Count == 0)
{
return;
}
// Loop through and add
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = this[key];
Parallel.ForEach(items, Globals.ParallelOptions, item =>
{
switch (item.Type)
{
case ItemType.Archive:
break;
case ItemType.BiosSet:
break;
case ItemType.Disk:
Disk disk = (Disk)item;
DiskCount += 1;
MD5Count += (String.IsNullOrEmpty(disk.MD5) ? 0 : 1);
SHA1Count += (String.IsNullOrEmpty(disk.SHA1) ? 0 : 1);
SHA256Count += (String.IsNullOrEmpty(disk.SHA256) ? 0 : 1);
SHA384Count += (String.IsNullOrEmpty(disk.SHA384) ? 0 : 1);
SHA512Count += (String.IsNullOrEmpty(disk.SHA512) ? 0 : 1);
BaddumpCount += (disk.ItemStatus == ItemStatus.BadDump ? 1 : 0);
NodumpCount += (disk.ItemStatus == ItemStatus.Nodump ? 1 : 0);
break;
case ItemType.Release:
break;
case ItemType.Rom:
Rom rom = (Rom)item;
RomCount += 1;
TotalSize += (rom.ItemStatus == ItemStatus.Nodump ? 0 : rom.Size);
CRCCount += (String.IsNullOrEmpty(rom.CRC) ? 0 : 1);
MD5Count += (String.IsNullOrEmpty(rom.MD5) ? 0 : 1);
SHA1Count += (String.IsNullOrEmpty(rom.SHA1) ? 0 : 1);
SHA256Count += (String.IsNullOrEmpty(rom.SHA256) ? 0 : 1);
SHA384Count += (String.IsNullOrEmpty(rom.SHA384) ? 0 : 1);
SHA512Count += (String.IsNullOrEmpty(rom.SHA512) ? 0 : 1);
BaddumpCount += (rom.ItemStatus == ItemStatus.BadDump ? 1 : 0);
NodumpCount += (rom.ItemStatus == ItemStatus.Nodump ? 1 : 0);
break;
case ItemType.Sample:
break;
}
});
});
}
/// <summary>
/// Output the stats for the Dat in a human-readable format
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">Set the statistics output format to use</param>
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat,
bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
{
// If we're supposed to recalculate the statistics, do so
if (recalculate)
{
RecalculateStats();
}
BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
if (TotalSize < 0)
{
TotalSize = Int64.MaxValue + TotalSize;
}
// Log the results to screen
string results = @"For '" + FileName + @"':
--------------------------------------------------
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
Games found: " + (game == -1 ? Keys.Count() : game) + @"
Roms found: " + RomCount + @"
Disks found: " + DiskCount + @"
Roms with CRC: " + CRCCount + @"
Roms with MD5: " + MD5Count + @"
Roms with SHA-1: " + SHA1Count + @"
Roms with SHA-256: " + SHA256Count + @"
Roms with SHA-384: " + SHA384Count + @"
Roms with SHA-512: " + SHA512Count + "\n";
if (baddumpCol)
{
results += " Roms with BadDump status: " + BaddumpCount + "\n";
}
if (nodumpCol)
{
results += " Roms with Nodump status: " + NodumpCount + "\n";
}
// For spacing between DATs
results += "\n\n";
Globals.Logger.User(results);
// Now write it out to file as well
string line = "";
if (outputs.ContainsKey(StatDatFormat.None))
{
line = @"'" + FileName + @"':
--------------------------------------------------
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
Games found: " + (game == -1 ? Keys.Count() : game) + @"
Roms found: " + RomCount + @"
Disks found: " + DiskCount + @"
Roms with CRC: " + CRCCount + @"
Roms with SHA-1: " + SHA1Count + @"
Roms with SHA-256: " + SHA256Count + @"
Roms with SHA-384: " + SHA384Count + @"
Roms with SHA-512: " + SHA512Count + "\n";
if (baddumpCol)
{
line += " Roms with BadDump status: " + BaddumpCount + "\n";
}
if (nodumpCol)
{
line += " Roms with Nodump status: " + NodumpCount + "\n";
}
// For spacing between DATs
line += "\n\n";
outputs[StatDatFormat.None].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
line = "\"" + FileName + "\","
+ "\"" + TotalSize + "\","
+ "\"" + (game == -1 ? Keys.Count() : game) + "\","
+ "\"" + RomCount + "\","
+ "\"" + DiskCount + "\","
+ "\"" + CRCCount + "\","
+ "\"" + MD5Count + "\","
+ "\"" + SHA1Count + "\","
+ "\"" + SHA256Count + "\","
+ "\"" + SHA384Count + "\","
+ "\"" + SHA512Count + "\"";
if (baddumpCol)
{
line += ",\"" + BaddumpCount + "\"";
}
if (nodumpCol)
{
line += ",\"" + NodumpCount + "\"";
}
line += "\n";
outputs[StatDatFormat.CSV].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
line = "\t\t\t<tr" + (FileName.StartsWith("DIR: ")
? " class=\"dir\"><td>" + HttpUtility.HtmlEncode(FileName.Remove(0, 5))
: "><td>" + HttpUtility.HtmlEncode(FileName)) + "</td>"
+ "<td align=\"right\">" + Style.GetBytesReadable(TotalSize) + "</td>"
+ "<td align=\"right\">" + (game == -1 ? Keys.Count() : game) + "</td>"
+ "<td align=\"right\">" + RomCount + "</td>"
+ "<td align=\"right\">" + DiskCount + "</td>"
+ "<td align=\"right\">" + CRCCount + "</td>"
+ "<td align=\"right\">" + MD5Count + "</td>"
+ "<td align=\"right\">" + SHA1Count + "</td>"
+ "<td align=\"right\">" + SHA256Count + "</td>";
if (baddumpCol)
{
line += "<td align=\"right\">" + BaddumpCount + "</td>";
}
if (nodumpCol)
{
line += "<td align=\"right\">" + NodumpCount + "</td>";
}
line += "</tr>\n";
outputs[StatDatFormat.HTML].Write(line);
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
line = "\"" + FileName + "\"\t"
+ "\"" + TotalSize + "\"\t"
+ "\"" + (game == -1 ? Keys.Count() : game) + "\"\t"
+ "\"" + RomCount + "\"\t"
+ "\"" + DiskCount + "\"\t"
+ "\"" + CRCCount + "\"\t"
+ "\"" + MD5Count + "\"\t"
+ "\"" + SHA1Count + "\"\t"
+ "\"" + SHA256Count + "\"\t"
+ "\"" + SHA384Count + "\"\t"
+ "\"" + SHA512Count + "\"";
if (baddumpCol)
{
line += "\t\"" + BaddumpCount + "\"";
}
if (nodumpCol)
{
line += "\t\"" + NodumpCount + "\"";
}
line += "\n";
outputs[StatDatFormat.TSV].Write(line);
}
}
#endregion
#endregion // Instance Methods
#region Static Methods
#region Statistics
/// <summary>
/// Output the stats for a list of input dats as files in a human-readable format
/// </summary>
/// <param name="inputs">List of input files and folders</param>
/// <param name="reportName">Name of the output file</param>
/// <param name="single">True if single DAT stats are output, false otherwise</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat" > Set the statistics output format to use</param>
public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single,
bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
{
// If there's no output format, set the default
if (statDatFormat == 0x0)
{
statDatFormat = StatDatFormat.None;
}
// Get the proper output file name
if (String.IsNullOrEmpty(outDir))
{
outDir = Environment.CurrentDirectory;
}
if (String.IsNullOrEmpty(reportName))
{
reportName = "report";
}
outDir = Path.GetFullPath(outDir);
// Get the dictionary of desired outputs
Dictionary<StatDatFormat, StreamWriter> outputs = OutputStatsGetOutputWriters(statDatFormat, reportName, outDir);
// Make sure we have all files
List<Tuple<string, string>> newinputs = new List<Tuple<string, string>>(); // item, basepath
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
{
if (File.Exists(input))
{
lock (newinputs)
{
newinputs.Add(Tuple.Create(Path.GetFullPath(input), Path.GetDirectoryName(Path.GetFullPath(input))));
}
}
if (Directory.Exists(input))
{
foreach (string file in Directory.GetFiles(input, "*", SearchOption.AllDirectories))
{
lock (newinputs)
{
newinputs.Add(Tuple.Create(Path.GetFullPath(file), Path.GetFullPath(input)));
}
}
}
});
newinputs = newinputs
.OrderBy(i => Path.GetDirectoryName(i.Item1))
.ThenBy(i => Path.GetFileName(i.Item1))
.ToList();
// Write the header, if any
OutputStatsWriteHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Init all total variables
long totalSize = 0;
long totalGame = 0;
long totalRom = 0;
long totalDisk = 0;
long totalCRC = 0;
long totalMD5 = 0;
long totalSHA1 = 0;
long totalSHA256 = 0;
long totalBaddump = 0;
long totalNodump = 0;
// Init directory-level variables
string lastdir = null;
string basepath = null;
long dirSize = 0;
long dirGame = 0;
long dirRom = 0;
long dirDisk = 0;
long dirCRC = 0;
long dirMD5 = 0;
long dirSHA1 = 0;
long dirSHA256 = 0;
long dirBaddump = 0;
long dirNodump = 0;
// Now process each of the input files
foreach (Tuple<string, string> filename in newinputs)
{
// Get the directory for the current file
string thisdir = Path.GetDirectoryName(filename.Item1);
basepath = Path.GetDirectoryName(filename.Item2);
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
if (lastdir != null && thisdir != lastdir)
{
// Output separator if needed
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
DatFile lastdirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
TotalSize = dirSize,
RomCount = dirRom,
DiskCount = dirDisk,
CRCCount = dirCRC,
MD5Count = dirMD5,
SHA1Count = dirSHA1,
SHA256Count = dirSHA256,
BaddumpCount = dirBaddump,
NodumpCount = dirNodump,
};
lastdirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Write the mid-footer, if any
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
// Write the header, if any
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Reset the directory stats
dirSize = 0;
dirGame = 0;
dirRom = 0;
dirDisk = 0;
dirCRC = 0;
dirMD5 = 0;
dirSHA1 = 0;
dirSHA256 = 0;
dirBaddump = 0;
dirNodump = 0;
}
Globals.Logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false);
List<string> games = new List<string>();
DatFile datdata = new DatFile();
datdata.Parse(filename.Item1, 0, 0);
datdata.BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
// Output single DAT stats (if asked)
Globals.Logger.User("Adding stats for file '" + filename.Item1 + "'\n", false);
if (single)
{
datdata.OutputStats(outputs, statDatFormat,
baddumpCol: baddumpCol, nodumpCol: nodumpCol);
}
// Add single DAT stats to dir
dirSize += datdata.TotalSize;
dirGame += datdata.Keys.Count();
dirRom += datdata.RomCount;
dirDisk += datdata.DiskCount;
dirCRC += datdata.CRCCount;
dirMD5 += datdata.MD5Count;
dirSHA1 += datdata.SHA1Count;
dirSHA256 += datdata.SHA256Count;
dirBaddump += datdata.BaddumpCount;
dirNodump += datdata.NodumpCount;
// Add single DAT stats to totals
totalSize += datdata.TotalSize;
totalGame += datdata.Keys.Count();
totalRom += datdata.RomCount;
totalDisk += datdata.DiskCount;
totalCRC += datdata.CRCCount;
totalMD5 += datdata.MD5Count;
totalSHA1 += datdata.SHA1Count;
totalSHA256 += datdata.SHA256Count;
totalBaddump += datdata.BaddumpCount;
totalNodump += datdata.NodumpCount;
// Make sure to assign the new directory
lastdir = thisdir;
}
// Output the directory stats one last time
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
if (single)
{
DatFile dirdat = new DatFile
{
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
TotalSize = dirSize,
RomCount = dirRom,
DiskCount = dirDisk,
CRCCount = dirCRC,
MD5Count = dirMD5,
SHA1Count = dirSHA1,
SHA256Count = dirSHA256,
BaddumpCount = dirBaddump,
NodumpCount = dirNodump,
};
dirdat.OutputStats(outputs, statDatFormat,
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
}
// Write the mid-footer, if any
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
// Write the header, if any
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
// Reset the directory stats
dirSize = 0;
dirGame = 0;
dirRom = 0;
dirDisk = 0;
dirCRC = 0;
dirMD5 = 0;
dirSHA1 = 0;
dirSHA256 = 0;
dirNodump = 0;
// Output total DAT stats
DatFile totaldata = new DatFile
{
FileName = "DIR: All DATs",
TotalSize = totalSize,
RomCount = totalRom,
DiskCount = totalDisk,
CRCCount = totalCRC,
MD5Count = totalMD5,
SHA1Count = totalSHA1,
SHA256Count = totalSHA256,
BaddumpCount = totalBaddump,
NodumpCount = totalNodump,
};
totaldata.OutputStats(outputs, statDatFormat,
game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
// Output footer if needed
OutputStatsWriteFooter(outputs, statDatFormat);
// Flush and dispose of the stream writers
foreach (StatDatFormat format in outputs.Keys)
{
outputs[format].Flush();
outputs[format].Dispose();
}
Globals.Logger.User(@"
Please check the log folder if the stats scrolled offscreen", false);
}
/// <summary>
/// Get the proper extension for the stat output format
/// </summary>
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
/// <param name="reportName">Name of the input file to use</param>
/// <param name="outDir">Output path to use</param>
/// <returns>Dictionary of file types to StreamWriters</returns>
private static Dictionary<StatDatFormat, StreamWriter> OutputStatsGetOutputWriters(StatDatFormat statDatFormat, string reportName, string outDir)
{
Dictionary<StatDatFormat, StreamWriter> output = new Dictionary<StatDatFormat, StreamWriter>();
// First try to create the output directory if we need to
if (!Directory.Exists(outDir))
{
Directory.CreateDirectory(outDir);
}
// For each output format, get the appropriate stream writer
if ((statDatFormat & StatDatFormat.None) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".txt";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.None, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.CSV) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.CSV, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.HTML) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".html";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.HTML, new StreamWriter(FileTools.TryCreate(reportName)));
}
if ((statDatFormat & StatDatFormat.TSV) != 0)
{
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
reportName = Path.Combine(outDir, reportName);
// Create the StreamWriter for this file
output.Add(StatDatFormat.TSV, new StreamWriter(FileTools.TryCreate(reportName)));
}
return output;
}
/// <summary>
/// Write out the header to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
outputs[StatDatFormat.CSV].Write("\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\",\"# with SHA-256\""
+ (baddumpCol ? ",\"BadDumps\"" : "") + (nodumpCol ? ",\"Nodumps\"" : "") + "\n");
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@"<!DOCTYPE html>
<html>
<header>
<title>DAT Statistics Report</title>
<style>
body {
background-color: lightgray;
}
.dir {
color: #0088FF;
}
.right {
align: right;
}
</style>
</header>
<body>
<h2>DAT Statistics Report (" + DateTime.Now.ToShortDateString() + @")</h2>
<table border=""1"" cellpadding=""5"" cellspacing=""0"">
");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
outputs[StatDatFormat.TSV].Write("\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\"\t\"# with SHA-256\""
+ (baddumpCol ? "\t\"BadDumps\"" : "") + (nodumpCol ? "\t\"Nodumps\"" : "") + "\n");
}
// Now write the mid header for those who need it
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
}
/// <summary>
/// Write out the mid-header to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
+ @"<th align=""right"">Disks</th><th align=""right"">&#35; with CRC</th><th align=""right"">&#35; with MD5</th><th align=""right"">&#35; with SHA-1</th><th align=""right"">&#35; with SHA-256</th>"
+ (baddumpCol ? "<th class=\".right\">Baddumps</th>" : "") + (nodumpCol ? "<th class=\".right\">Nodumps</th>" : "") + "</tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
/// <summary>
/// Write out the separator to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidSeparator(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write("<tr><td colspan=\""
+ (baddumpCol && nodumpCol
? "12"
: (baddumpCol ^ nodumpCol
? "11"
: "10")
)
+ "\"></td></tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
/// <summary>
/// Write out the footer-separator to the stream, if any exists
/// </summary>
/// <param name="outputs">Dictionary representing the outputs</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
private static void OutputStatsWriteMidFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
outputs[StatDatFormat.None].Write("\n");
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
outputs[StatDatFormat.CSV].Write("\n");
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write("<tr border=\"0\"><td colspan=\""
+ (baddumpCol && nodumpCol
? "12"
: (baddumpCol ^ nodumpCol
? "11"
: "10")
)
+ "\"></td></tr>\n");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
outputs[StatDatFormat.TSV].Write("\n");
}
}
/// <summary>
/// Write out the footer to the stream, if any exists
/// </summary>
/// <param name="sw">StreamWriter representing the output</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
private static void OutputStatsWriteFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat)
{
if (outputs.ContainsKey(StatDatFormat.None))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.CSV))
{
// Nothing
}
if (outputs.ContainsKey(StatDatFormat.HTML))
{
outputs[StatDatFormat.HTML].Write(@" </table>
</body>
</html>
");
}
if (outputs.ContainsKey(StatDatFormat.TSV))
{
// Nothing
}
}
#endregion
#endregion // Static Methods
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,118 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Release : DatItem, ICloneable
{
#region Private instance variables
private string _region;
private string _language;
private string _date;
private bool? _default;
#endregion
#region Publicly facing variables
public string Region
{
get { return _region; }
set { _region = value; }
}
public string Language
{
get { return _language; }
set { _language = value; }
}
public string Date
{
get { return _date; }
set { _date = value; }
}
public bool? Default
{
get { return _default; }
set { _default = value; }
}
#endregion
#region Constructors
/// <summary>
/// Create a default, empty Release object
/// </summary>
public Release()
{
_name = "";
_itemType = ItemType.Release;
_region = "";
_language = "";
_date = "";
_default = null;
}
#endregion
#region Cloning Methods
public object Clone()
{
return new Release()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
Region = this.Region,
Language = this.Language,
Date = this.Date,
Default = this.Default,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
// If we don't have a release return false
if (_itemType != other.Type)
{
return false;
}
// Otherwise, treat it as a reease
Release newOther = (Release)other;
// If the archive information matches
return (_name == newOther.Name
&& _region == newOther.Region
&& _language == newOther.Language
&& _date == newOther.Date
&& _default == newOther.Default);
}
#endregion
}
}

View File

@@ -0,0 +1,177 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Rom : Disk, ICloneable
{
#region Private instance variables
// Rom information
protected long _size;
protected string _crc;
private string _date;
#endregion
#region Publicly facing variables
// Rom information
public long Size
{
get { return _size; }
set { _size = value; }
}
public string CRC
{
get { return _crc; }
set { _crc = value; }
}
public string Date
{
get { return _date; }
set { _date = value; }
}
#endregion
#region Constructors
/// <summary>
/// Create a default, empty Rom object
/// </summary>
public Rom()
{
_name = "";
_itemType = ItemType.Rom;
_dupeType = 0x00;
_itemStatus = ItemStatus.None;
_date = "";
}
/// <summary>
/// Create a "blank" Rom object
/// </summary>
/// <param name="name"></param>
/// <param name="machineName"></param>
/// <param name="omitFromScan"></param>
/// <remarks>TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually</remarks>
public Rom(string name, string machineName, Hash omitFromScan = Hash.DeepHashes)
{
_name = name;
_itemType = ItemType.Rom;
_size = -1;
if ((omitFromScan & Hash.CRC) == 0)
{
_crc = "null";
}
if ((omitFromScan & Hash.MD5) == 0)
{
_md5 = "null";
}
if ((omitFromScan & Hash.SHA1) == 0)
{
_sha1 = "null";
}
if ((omitFromScan & Hash.SHA256) == 0)
{
_sha256 = "null";
}
if ((omitFromScan & Hash.SHA384) == 0)
{
_sha384 = "null";
}
if ((omitFromScan & Hash.SHA512) == 0)
{
_sha512 = "null";
}
_itemStatus = ItemStatus.None;
_machine = new Machine
{
Name = machineName,
Description = machineName,
};
}
#endregion
#region Cloning Methods
public new object Clone()
{
return new Rom()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
MD5 = this.MD5,
SHA1 = this.SHA1,
SHA256 = this.SHA256,
SHA384 = this.SHA384,
SHA512 = this.SHA512,
ItemStatus = this.ItemStatus,
Size = this.Size,
CRC = this.CRC,
Date = this.Date,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
bool dupefound = false;
// If we don't have a rom, return false
if (_itemType != other.Type)
{
return dupefound;
}
// Otherwise, treat it as a rom
Rom newOther = (Rom)other;
// If either is a nodump, it's never a match
if (_itemStatus == ItemStatus.Nodump || newOther.ItemStatus == ItemStatus.Nodump)
{
return dupefound;
}
if ((this.Size == newOther.Size)
&& ((String.IsNullOrEmpty(this.CRC) || String.IsNullOrEmpty(newOther.CRC)) || this.CRC == newOther.CRC)
&& ((String.IsNullOrEmpty(this.MD5) || String.IsNullOrEmpty(newOther.MD5)) || this.MD5 == newOther.MD5)
&& ((String.IsNullOrEmpty(this.SHA1) || String.IsNullOrEmpty(newOther.SHA1)) || this.SHA1 == newOther.SHA1)
&& ((String.IsNullOrEmpty(this.SHA256) || String.IsNullOrEmpty(newOther.SHA256)) || this.SHA256 == newOther.SHA256)
&& ((String.IsNullOrEmpty(this.SHA384) || String.IsNullOrEmpty(newOther.SHA384)) || this.SHA384 == newOther.SHA384)
&& ((String.IsNullOrEmpty(this.SHA512) || String.IsNullOrEmpty(newOther.SHA512)) || this.SHA512 == newOther.SHA512))
{
dupefound = true;
}
return dupefound;
}
#endregion
}
}

View File

@@ -0,0 +1,71 @@
using System;
using SabreTools.Helper.Data;
namespace SabreTools.Helper.Dats
{
public class Sample : DatItem, ICloneable
{
#region Constructors
/// <summary>
/// Create a default, empty Sample object
/// </summary>
public Sample()
{
_name = "";
_itemType = ItemType.Sample;
}
#endregion
#region Cloning Methods
public object Clone()
{
return new Sample()
{
Name = this.Name,
Type = this.Type,
Dupe = this.Dupe,
Machine = this.Machine,
Supported = this.Supported,
Publisher = this.Publisher,
Infos = this.Infos,
PartName = this.PartName,
PartInterface = this.PartInterface,
Features = this.Features,
AreaName = this.AreaName,
AreaSize = this.AreaSize,
SystemID = this.SystemID,
System = this.System,
SourceID = this.SourceID,
Source = this.Source,
};
}
#endregion
#region Comparision Methods
public override bool Equals(DatItem other)
{
// If we don't have a sample, return false
if (_itemType != other.Type)
{
return false;
}
// Otherwise, treat it as a sample
Sample newOther = (Sample)other;
// If the archive information matches
return (_name == newOther.Name);
}
#endregion
}
}