2017-10-09 18:04:49 -07:00
using System ;
2017-10-09 20:38:15 -07:00
using System.Collections.Concurrent ;
2017-10-09 18:04:49 -07:00
using System.Collections.Generic ;
2017-09-25 12:21:52 -07:00
using System.Linq ;
2017-10-30 15:17:13 -07:00
using System.Text.RegularExpressions ;
2017-10-09 18:04:49 -07:00
using System.Threading.Tasks ;
2017-10-09 20:38:15 -07:00
using System.Web ;
2016-10-24 13:51:39 -07:00
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Data ;
2017-11-02 00:29:20 -07:00
using SabreTools.Library.FileTypes ;
2017-11-02 15:44:15 -07:00
using SabreTools.Library.DatItems ;
2017-11-07 13:56:15 -08:00
using SabreTools.Library.Reports ;
2017-10-09 20:38:15 -07:00
using SabreTools.Library.Skippers ;
2017-10-09 18:04:49 -07:00
using SabreTools.Library.Tools ;
#if MONO
using System.IO ;
#else
using Alphaleonis.Win32.Filesystem ;
using FileStream = System . IO . FileStream ;
2017-10-09 20:38:15 -07:00
using IOException = System . IO . IOException ;
using MemoryStream = System . IO . MemoryStream ;
using SearchOption = System . IO . SearchOption ;
using SeekOrigin = System . IO . SeekOrigin ;
using Stream = System . IO . Stream ;
2017-10-09 18:04:49 -07:00
#endif
2017-10-09 20:38:15 -07:00
using NaturalSort ;
2016-10-26 22:10:47 -07:00
2017-10-06 20:46:43 -07:00
namespace SabreTools.Library.DatFiles
2016-04-19 01:11:23 -07:00
{
2017-10-09 13:08:15 -07:00
/// <summary>
/// Represents a format-agnostic DAT
/// </summary>
2017-11-07 14:02:28 -08:00
public class DatFile
2016-04-19 01:11:23 -07:00
{
2016-09-19 20:08:25 -07:00
#region Private instance variables
2017-10-06 15:49:32 -07:00
// Internal DatHeader values
2017-10-16 14:02:41 -07:00
internal DatHeader _datHeader = new DatHeader ( ) ;
2017-10-06 15:49:32 -07:00
// DatItems dictionary
2017-10-16 14:02:41 -07:00
internal SortedDictionary < string , List < DatItem > > _items = new SortedDictionary < string , List < DatItem > > ( ) ;
internal SortedBy _sortedBy ;
2017-12-14 14:46:03 -08:00
internal DedupeType _mergedBy ;
2016-09-19 20:08:25 -07:00
2017-10-06 15:49:32 -07:00
// Internal statistical data
2017-10-16 14:02:41 -07:00
internal DatStats _datStats = new DatStats ( ) ;
2016-09-19 20:08:25 -07:00
#endregion
#region Publicly facing variables
// Data common to most DAT types
public string FileName
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . FileName ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . FileName = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Name
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Name ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Name = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Description
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Description ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Description = value ;
}
2016-09-19 20:08:25 -07:00
}
public string RootDir
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . RootDir ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . RootDir = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Category
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Category ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Category = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Version
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Version ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Version = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Date
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Date ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Date = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Author
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Author ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Author = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Email
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Email ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Email = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Homepage
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Homepage ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Homepage = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Url
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Url ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Url = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Comment
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Comment ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Comment = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Header
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Header ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Header = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Type // Generally only used for SuperDAT
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Type ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Type = value ;
}
2016-09-19 20:08:25 -07:00
}
public ForceMerging ForceMerging
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . ForceMerging ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . ForceMerging = value ;
}
2016-09-19 20:08:25 -07:00
}
public ForceNodump ForceNodump
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . ForceNodump ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . ForceNodump = value ;
}
2016-09-19 20:08:25 -07:00
}
public ForcePacking ForcePacking
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . ForcePacking ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . ForcePacking = value ;
}
2016-09-19 20:08:25 -07:00
}
2016-10-25 15:02:02 -07:00
public DatFormat DatFormat
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . DatFormat ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . DatFormat = value ;
}
2016-09-19 20:08:25 -07:00
}
2016-10-04 10:26:19 -07:00
public bool ExcludeOf
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . ExcludeOf ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . ExcludeOf = value ;
}
2016-10-04 10:26:19 -07:00
}
2017-10-30 15:17:13 -07:00
public bool SceneDateStrip
{
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . SceneDateStrip ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . SceneDateStrip = value ;
}
}
2017-08-29 11:46:01 -07:00
public DedupeType DedupeRoms
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . DedupeRoms ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . DedupeRoms = value ;
}
2016-09-19 20:08:25 -07:00
}
2017-02-26 22:41:17 -08:00
public Hash StripHash
2017-02-25 20:35:06 -08:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . StripHash ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . StripHash = value ;
}
2017-02-25 20:35:06 -08:00
}
2016-10-06 11:42:55 -07:00
public SortedBy SortedBy
{
get { return _sortedBy ; }
}
2016-09-19 20:08:25 -07:00
// Data specific to the Miss DAT type
public bool UseGame
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . UseGame ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . UseGame = value ;
}
}
public string Prefix
{
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Prefix ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Prefix = value ;
}
2016-09-19 20:08:25 -07:00
}
public string Postfix
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Postfix ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Postfix = value ;
}
2016-09-19 20:08:25 -07:00
}
public bool Quotes
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Quotes ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Quotes = value ;
}
2016-09-19 20:08:25 -07:00
}
2017-12-05 18:04:11 -08:00
public string ReplaceExtension
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
return _datHeader . ReplaceExtension ;
2017-10-06 17:38:00 -07:00
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
_datHeader . ReplaceExtension = value ;
2017-10-06 17:38:00 -07:00
}
2016-09-19 20:08:25 -07:00
}
2017-12-05 18:04:11 -08:00
public string AddExtension
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
return _datHeader . AddExtension ;
2017-10-06 17:38:00 -07:00
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
_datHeader . AddExtension = value ;
2017-10-06 17:38:00 -07:00
}
2016-09-19 20:08:25 -07:00
}
2017-12-05 18:04:11 -08:00
public bool RemoveExtension
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
return _datHeader . RemoveExtension ;
2017-10-06 17:38:00 -07:00
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
2017-12-05 18:04:11 -08:00
_datHeader . RemoveExtension = value ;
2017-10-06 17:38:00 -07:00
}
2016-09-19 20:08:25 -07:00
}
public bool GameName
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . GameName ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . GameName = value ;
}
2016-09-19 20:08:25 -07:00
}
public bool Romba
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
return _datHeader . Romba ;
}
set
{
if ( _datHeader = = null )
{
_datHeader = new DatHeader ( ) ;
}
_datHeader . Romba = value ;
}
2016-09-19 20:08:25 -07:00
}
// Statistical data related to the DAT
2017-11-08 01:00:21 -08:00
public StatReportFormat ReportFormat
{
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . ReportFormat ;
}
set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . ReportFormat = value ;
}
}
2017-06-13 13:18:41 -07:00
public long Count
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . Count = value ;
}
2017-06-13 13:18:41 -07:00
}
2017-10-06 15:49:32 -07:00
public long ArchiveCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . ArchiveCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . ArchiveCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long BiosSetCount
2016-09-19 20:08:25 -07:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . BiosSetCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . BiosSetCount = value ;
}
2016-09-19 20:08:25 -07:00
}
public long DiskCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . DiskCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . DiskCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long ReleaseCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . ReleaseCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . ReleaseCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long RomCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . RomCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . RomCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long SampleCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . SampleCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . SampleCount = value ;
}
2016-09-19 20:08:25 -07:00
}
public long TotalSize
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . TotalSize ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . TotalSize = value ;
}
2016-09-19 20:08:25 -07:00
}
public long CRCCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . CRCCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . CRCCount = value ;
}
2016-09-19 20:08:25 -07:00
}
public long MD5Count
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . MD5Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . MD5Count = value ;
}
2016-09-19 20:08:25 -07:00
}
public long SHA1Count
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . SHA1Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . SHA1Count = value ;
}
2016-09-19 20:08:25 -07:00
}
2017-02-23 14:34:01 -08:00
public long SHA256Count
2017-02-23 14:23:41 -08:00
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . SHA256Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . SHA256Count = value ;
}
2017-02-23 14:23:41 -08:00
}
2017-02-27 00:01:24 -08:00
public long SHA384Count
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . SHA384Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . SHA384Count = value ;
}
2017-02-27 00:01:24 -08:00
}
public long SHA512Count
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . SHA512Count ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . SHA512Count = value ;
}
2017-02-27 00:01:24 -08:00
}
2016-09-26 16:42:06 -07:00
public long BaddumpCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . BaddumpCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . BaddumpCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long GoodCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . GoodCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . GoodCount = value ;
}
2016-09-26 16:42:06 -07:00
}
2016-09-19 20:08:25 -07:00
public long NodumpCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . NodumpCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . NodumpCount = value ;
}
2017-10-06 15:49:32 -07:00
}
public long VerifiedCount
{
2017-10-06 17:38:00 -07:00
get
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
return _datStats . VerifiedCount ;
}
private set
{
if ( _datStats = = null )
{
_datStats = new DatStats ( ) ;
}
_datStats . VerifiedCount = value ;
}
2016-09-19 20:08:25 -07:00
}
#endregion
2016-10-06 11:42:55 -07:00
#region Instance Methods
2016-11-08 15:29:52 -08:00
#region Accessors
/// <summary>
/// Passthrough to access the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to reference</param>
2017-09-25 12:21:52 -07:00
/// <remarks>We don't want to allow direct setting of values because it bypasses the statistics</remarks>
2016-11-08 15:29:52 -08:00
public List < DatItem > this [ string key ]
{
get
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-11-08 15:29:52 -08:00
{
2017-12-05 12:03:53 -08:00
// Ensure the key exists
EnsureKey ( key ) ;
2016-11-08 15:29:52 -08:00
2016-12-01 11:43:09 -08:00
// Now return the value
2017-09-25 12:38:13 -07:00
return _items [ key ] ;
2016-12-01 11:43:09 -08:00
}
2016-11-08 15:29:52 -08:00
}
}
/// <summary>
/// Add a new key to the file dictionary
/// </summary>
2017-06-13 13:18:41 -07:00
/// <param name="key">Key in the dictionary to add</param>
2016-11-08 15:29:52 -08:00
public void Add ( string key )
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2017-12-05 12:03:53 -08:00
// Ensure the key exists
EnsureKey ( key ) ;
2016-11-08 15:29:52 -08:00
}
}
/// <summary>
/// Add a value to the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to add to</param>
/// <param name="value">Value to add to the dictionary</param>
public void Add ( string key , DatItem value )
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-06-13 13:18:41 -07:00
// Add the key, if necessary
Add ( key ) ;
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2016-11-08 15:29:52 -08:00
// Now add the value
2017-09-25 12:38:13 -07:00
_items [ key ] . Add ( value ) ;
2017-06-13 13:18:41 -07:00
// Now update the statistics
2017-10-06 15:49:32 -07:00
_datStats . AddItem ( value ) ;
2016-11-08 15:29:52 -08:00
}
}
/// <summary>
/// Add a range of values to the file dictionary
/// </summary>
/// <param name="key">Key in the dictionary to add to</param>
/// <param name="value">Value to add to the dictionary</param>
public void AddRange ( string key , List < DatItem > value )
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-06-13 13:18:41 -07:00
// Add the key, if necessary
Add ( key ) ;
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2016-11-08 15:29:52 -08:00
// Now add the value
2017-09-25 12:38:13 -07:00
_items [ key ] . AddRange ( value ) ;
2017-06-13 13:18:41 -07:00
// Now update the statistics
foreach ( DatItem item in value )
{
2017-10-06 15:49:32 -07:00
_datStats . AddItem ( item ) ;
2017-06-13 13:18:41 -07:00
}
2016-11-08 15:29:52 -08:00
}
}
/// <summary>
/// Get if the file dictionary contains the key
/// </summary>
/// <param name="key">Key in the dictionary to check</param>
/// <returns>True if the key exists, false otherwise</returns>
2017-09-25 12:56:45 -07:00
public bool Contains ( string key )
2016-11-08 15:29:52 -08:00
{
2017-09-25 12:56:45 -07:00
bool contains = false ;
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-03-05 10:22:58 -08:00
// If the key is null, we return false since keys can't be null
if ( key = = null )
{
2017-09-25 12:56:45 -07:00
return contains ;
2017-03-05 10:22:58 -08:00
}
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2017-09-25 12:56:45 -07:00
contains = _items . ContainsKey ( key ) ;
2016-11-08 15:29:52 -08:00
}
2017-09-25 12:56:45 -07:00
return contains ;
}
/// <summary>
/// Get if the file dictionary contains the key and value
/// </summary>
/// <param name="key">Key in the dictionary to check</param>
/// <param name="value">Value in the dictionary to check</param>
/// <returns>True if the key exists, false otherwise</returns>
public bool Contains ( string key , DatItem value )
{
bool contains = false ;
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2017-09-25 12:56:45 -07:00
// If the key is null, we return false since keys can't be null
if ( key = = null )
{
return contains ;
}
lock ( _items )
{
if ( _items . ContainsKey ( key ) )
{
contains = _items . ContainsKey ( key ) ;
}
}
return contains ;
2016-11-08 15:29:52 -08:00
}
/// <summary>
/// Get the keys from the file dictionary
/// </summary>
2017-11-09 20:38:08 -08:00
/// <returns>List of the keys</returns>
2017-10-30 21:15:37 -07:00
public List < string > Keys
2016-11-08 15:29:52 -08:00
{
get
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2017-11-09 20:38:08 -08:00
return _items . Keys . Select ( item = > ( String ) item . Clone ( ) ) . ToList ( ) ;
2016-11-08 15:29:52 -08:00
}
}
}
/// <summary>
2017-10-09 13:55:28 -07:00
/// Remove a key from the file dictionary if it exists
2016-11-08 15:29:52 -08:00
/// </summary>
2017-09-25 12:21:52 -07:00
/// <param name="key">Key in the dictionary to remove</param>
2016-11-08 15:29:52 -08:00
public void Remove ( string key )
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2016-11-08 15:29:52 -08:00
2017-09-25 12:56:45 -07:00
// If the key doesn't exist, return
if ( ! Contains ( key ) )
{
return ;
}
2017-09-25 12:38:13 -07:00
lock ( _items )
2016-12-01 11:43:09 -08:00
{
2017-09-25 12:56:45 -07:00
// Remove the statistics first
foreach ( DatItem item in _items [ key ] )
2016-11-08 15:29:52 -08:00
{
2017-10-06 15:49:32 -07:00
_datStats . RemoveItem ( item ) ;
2016-11-08 15:29:52 -08:00
}
2017-09-25 12:56:45 -07:00
// Remove the key from the dictionary
_items . Remove ( key ) ;
2016-11-08 15:29:52 -08:00
}
}
2017-09-25 12:21:52 -07:00
/// <summary>
2017-10-09 13:55:28 -07:00
/// Remove a value from the file dictionary if it exists
2017-09-25 12:21:52 -07:00
/// </summary>
/// <param name="key">Key in the dictionary to remove from</param>
/// <param name="value">Value to remove from the dictionary</param>
public void Remove ( string key , DatItem value )
{
2017-12-05 12:03:53 -08:00
// Ensure the dictionary is created
EnsureDictionary ( ) ;
2017-09-25 12:21:52 -07:00
2017-09-25 12:56:45 -07:00
// If the key and value doesn't exist, return
if ( ! Contains ( key , value ) )
{
return ;
}
2017-09-25 12:38:13 -07:00
lock ( _items )
2017-09-25 12:21:52 -07:00
{
// While the key is in the dictionary and the item is there, remove it
2017-09-25 12:38:13 -07:00
while ( _items . ContainsKey ( key ) & & _items [ key ] . Contains ( value ) )
2017-09-25 12:21:52 -07:00
{
// Remove the statistics first
2017-10-06 15:49:32 -07:00
_datStats . RemoveItem ( value ) ;
2017-09-25 12:21:52 -07:00
2017-09-25 12:38:13 -07:00
_items [ key ] . Remove ( value ) ;
2017-09-25 12:21:52 -07:00
}
}
}
/// <summary>
2017-10-09 13:55:28 -07:00
/// Remove a range of values from the file dictionary if they exists
2017-09-25 12:21:52 -07:00
/// </summary>
/// <param name="key">Key in the dictionary to remove from</param>
/// <param name="value">Value to remove from the dictionary</param>
public void RemoveRange ( string key , List < DatItem > value )
{
foreach ( DatItem item in value )
{
Remove ( key , item ) ;
}
}
2017-12-05 12:03:53 -08:00
/// <summary>
/// Ensure the items dictionary
/// </summary>
private void EnsureDictionary ( )
{
// If the dictionary is null, create it
if ( _items = = null )
{
_items = new SortedDictionary < string , List < DatItem > > ( ) ;
}
}
/// <summary>
/// Ensure the key exists in the items dictionary
/// </summary>
/// <param name="key">Key to ensure</param>
private void EnsureKey ( string key )
{
// If the key is missing from the dictionary, add it
if ( ! _items . ContainsKey ( key ) )
{
_items . Add ( key , new List < DatItem > ( ) ) ;
}
}
2017-09-25 12:21:52 -07:00
2016-11-08 15:29:52 -08:00
#endregion
2017-10-09 20:38:15 -07:00
#region Bucketing
/// <summary>
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by a user-defined method
/// </summary>
/// <param name="bucketBy">SortedBy enum representing how to sort the individual items</param>
/// <param name="deduperoms">Dedupe type that should be used</param>
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
public void BucketBy ( SortedBy bucketBy , DedupeType deduperoms , bool lower = true , bool norename = true )
{
// If we have a situation where there's no dictionary or no keys at all, we skip
if ( _items = = null | | _items . Count = = 0 )
{
return ;
}
// If the sorted type isn't the same, we want to sort the dictionary accordingly
if ( _sortedBy ! = bucketBy )
{
2017-12-14 14:46:03 -08:00
Globals . Logger . User ( "Organizing roms by {0}" , bucketBy ) ;
2017-12-05 12:04:58 -08:00
2017-10-09 20:38:15 -07:00
// Set the sorted type
_sortedBy = bucketBy ;
// First do the initial sort of all of the roms inplace
2017-10-30 21:15:37 -07:00
List < string > oldkeys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( oldkeys , Globals . ParallelOptions , key = >
{
// Get the unsorted current list
List < DatItem > roms = this [ key ] ;
2017-11-08 17:20:29 -08:00
// Now add each of the roms to their respective keys
for ( int i = 0 ; i < roms . Count ; i + + )
2017-10-09 20:38:15 -07:00
{
2017-11-08 17:20:29 -08:00
DatItem rom = roms [ i ] ;
2017-10-09 20:38:15 -07:00
// We want to get the key most appropriate for the given sorting type
string newkey = GetKey ( rom , bucketBy , lower , norename ) ;
2017-11-08 17:20:29 -08:00
// If the key is different, move the item to the new key
if ( newkey ! = key )
{
2017-11-09 00:20:47 -08:00
Add ( newkey , rom ) ;
Remove ( key , rom ) ;
2017-11-08 17:20:29 -08:00
i - - ; // This make sure that the pointer stays on the correct since one was removed
}
2017-10-09 20:38:15 -07:00
}
} ) ;
}
2017-12-14 14:46:03 -08:00
// If the merge type isn't the same, we want to merge the dictionary accordingly
if ( _mergedBy ! = deduperoms )
2017-10-09 20:38:15 -07:00
{
2017-12-14 14:46:03 -08:00
Globals . Logger . User ( "Deduping roms by {0}" , deduperoms ) ;
2017-10-09 20:38:15 -07:00
2017-12-14 14:46:03 -08:00
// Set the sorted type
_mergedBy = deduperoms ;
2017-10-09 20:38:15 -07:00
2017-12-14 14:46:03 -08:00
List < string > keys = Keys ;
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
2017-10-09 20:38:15 -07:00
{
2017-12-14 14:46:03 -08:00
// Get the possibly unsorted list
List < DatItem > sortedlist = this [ key ] ;
2017-10-09 20:38:15 -07:00
2017-12-14 14:46:03 -08:00
// Sort the list of items to be consistent
DatItem . Sort ( ref sortedlist , false ) ;
// If we're merging the roms, do so
if ( deduperoms = = DedupeType . Full | | ( deduperoms = = DedupeType . Game & & bucketBy = = SortedBy . Game ) )
{
sortedlist = DatItem . Merge ( sortedlist ) ;
}
// Add the list back to the dictionary
Remove ( key ) ;
AddRange ( key , sortedlist ) ;
} ) ;
}
2017-10-09 20:38:15 -07:00
}
/// <summary>
/// Get the dictionary key that should be used for a given item and sorting type
/// </summary>
/// <param name="item">DatItem to get the key for</param>
/// <param name="sortedBy">SortedBy enum representing what key to get</param>
/// <param name="lower">True if the key should be lowercased (default), false otherwise</param>
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
/// <returns>String representing the key to be used for the DatItem</returns>
private string GetKey ( DatItem item , SortedBy sortedBy , bool lower = true , bool norename = true )
{
// Set the output key as the default blank string
string key = "" ;
// Now determine what the key should be based on the sortedBy value
switch ( sortedBy )
{
case SortedBy . CRC :
key = ( item . Type = = ItemType . Rom ? ( ( Rom ) item ) . CRC : Constants . CRCZero ) ;
break ;
case SortedBy . Game :
key = ( norename ? ""
: item . SystemID . ToString ( ) . PadLeft ( 10 , '0' )
+ "-"
+ item . SourceID . ToString ( ) . PadLeft ( 10 , '0' ) + "-" )
2017-11-08 13:15:44 -08:00
+ ( String . IsNullOrWhiteSpace ( item . MachineName )
2017-10-09 20:38:15 -07:00
? "Default"
: item . MachineName ) ;
if ( lower )
{
key = key . ToLowerInvariant ( ) ;
}
if ( key = = null )
{
key = "null" ;
}
key = HttpUtility . HtmlEncode ( key ) ;
break ;
case SortedBy . MD5 :
key = ( item . Type = = ItemType . Rom
? ( ( Rom ) item ) . MD5
: ( item . Type = = ItemType . Disk
? ( ( Disk ) item ) . MD5
: Constants . MD5Zero ) ) ;
break ;
case SortedBy . SHA1 :
key = ( item . Type = = ItemType . Rom
? ( ( Rom ) item ) . SHA1
: ( item . Type = = ItemType . Disk
? ( ( Disk ) item ) . SHA1
: Constants . SHA1Zero ) ) ;
break ;
case SortedBy . SHA256 :
key = ( item . Type = = ItemType . Rom
? ( ( Rom ) item ) . SHA256
: ( item . Type = = ItemType . Disk
? ( ( Disk ) item ) . SHA256
: Constants . SHA256Zero ) ) ;
break ;
case SortedBy . SHA384 :
key = ( item . Type = = ItemType . Rom
? ( ( Rom ) item ) . SHA384
: ( item . Type = = ItemType . Disk
? ( ( Disk ) item ) . SHA384
: Constants . SHA384Zero ) ) ;
break ;
case SortedBy . SHA512 :
key = ( item . Type = = ItemType . Rom
? ( ( Rom ) item ) . SHA512
: ( item . Type = = ItemType . Disk
? ( ( Disk ) item ) . SHA512
: Constants . SHA512Zero ) ) ;
break ;
}
// Double and triple check the key for corner cases
if ( key = = null )
{
key = "" ;
}
return key ;
}
#endregion
2016-11-08 16:04:26 -08:00
#region Constructors
2016-09-19 20:08:25 -07:00
2016-11-08 16:04:26 -08:00
/// <summary>
/// Create a new, empty DatFile object
/// </summary>
public DatFile ( )
2016-09-19 20:08:25 -07:00
{
2017-09-25 12:38:13 -07:00
_items = new SortedDictionary < string , List < DatItem > > ( ) ;
2016-11-08 16:04:26 -08:00
}
/// <summary>
2017-10-06 15:49:32 -07:00
/// Create a new DatFile from an existing one using the header values only
2016-11-08 16:04:26 -08:00
/// </summary>
/// <param name="df"></param>
public DatFile ( DatFile datFile )
{
2017-10-06 15:49:32 -07:00
_datHeader = ( DatHeader ) datFile . _datHeader . Clone ( ) ;
2016-09-19 20:08:25 -07:00
}
2017-12-05 18:04:11 -08:00
/// <summary>
/// Create a new DatFile from an existing DatHeader
/// </summary>
/// <param name="datHeader"></param>
public DatFile ( DatHeader datHeader )
{
_datHeader = ( DatHeader ) datHeader . Clone ( ) ;
}
2016-09-19 20:08:25 -07:00
#endregion
2017-10-09 20:38:15 -07:00
#region Converting and Updating
2017-09-25 12:21:52 -07:00
/// <summary>
2017-10-09 20:38:15 -07:00
/// Determine if input files should be merged, diffed, or processed invidually
2017-09-25 12:21:52 -07:00
/// </summary>
2017-10-09 20:38:15 -07:00
/// <param name="inputPaths">Names of the input files and/or folders</param>
/// <param name="basePaths">Names of base files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
2017-10-30 16:29:34 -07:00
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
2017-10-09 20:38:15 -07:00
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
2017-12-14 13:38:04 -08:00
/// <param name="updateNames">True if names should be updated, false otherwise [only for base replacement]</param>
/// <param name="updateHashes">True if hashes should be updated, false otherwise [only for base replacement]</param>
2017-12-05 12:23:24 -08:00
public void DetermineUpdateType ( List < string > inputPaths , List < string > basePaths , string outDir , UpdateMode updateMode , bool inplace , bool skip ,
2017-12-14 13:38:04 -08:00
bool bare , bool clean , bool remUnicode , bool descAsName , Filter filter , SplitType splitType , bool updateNames , bool updateHashes )
2017-09-25 12:21:52 -07:00
{
2017-12-05 12:33:11 -08:00
// Ensure we only have files in the inputs
List < string > inputFileNames = Utilities . GetOnlyFilesFromInputs ( inputPaths , appendparent : true ) ;
List < string > baseFileNames = Utilities . GetOnlyFilesFromInputs ( basePaths ) ;
2017-12-05 12:23:24 -08:00
// If we're in standard update mode, run through all of the inputs
if ( updateMode = = UpdateMode . None )
{
2017-12-05 12:53:15 -08:00
Update ( inputFileNames , outDir , inplace , clean , remUnicode , descAsName , filter , splitType ) ;
2017-12-05 12:33:11 -08:00
return ;
2017-12-05 12:23:24 -08:00
}
2017-10-09 20:38:15 -07:00
2017-12-05 12:36:52 -08:00
// Reverse inputs if we're in a required mode
2017-12-05 12:33:11 -08:00
if ( ( updateMode & UpdateMode . DiffReverseCascade ) ! = 0 )
{
inputFileNames . Reverse ( ) ;
}
2017-12-05 12:36:52 -08:00
if ( ( updateMode & UpdateMode . ReverseBaseReplace ) ! = 0 )
{
baseFileNames . Reverse ( ) ;
}
2017-09-25 12:21:52 -07:00
2017-12-05 12:33:11 -08:00
// If we're in merging mode
if ( ( updateMode & UpdateMode . Merge ) ! = 0 )
{
2017-12-14 14:46:03 -08:00
// Populate the combined data and get the headers
List < DatFile > datHeaders = PopulateUserData ( inputFileNames , inplace , clean , remUnicode , descAsName , outDir , filter , splitType ) ;
2017-12-05 12:33:11 -08:00
MergeNoDiff ( outDir , inputFileNames , datHeaders ) ;
}
// If we have one of the standard diffing modes
else if ( ( updateMode & UpdateMode . DiffDupesOnly ) ! = 0
| | ( updateMode & UpdateMode . DiffNoDupesOnly ) ! = 0
| | ( updateMode & UpdateMode . DiffIndividualsOnly ) ! = 0 )
{
DiffNoCascade ( updateMode , outDir , inputFileNames ) ;
}
// If we have one of the cascaded diffing modes
else if ( ( updateMode & UpdateMode . DiffCascade ) ! = 0
| | ( updateMode & UpdateMode . DiffReverseCascade ) ! = 0 )
{
2017-12-14 14:46:03 -08:00
// Populate the combined data and get the headers
List < DatFile > datHeaders = PopulateUserData ( inputFileNames , inplace , clean , remUnicode , descAsName , outDir , filter , splitType ) ;
2017-12-05 12:33:11 -08:00
DiffCascade ( outDir , inplace , inputFileNames , datHeaders , skip ) ;
}
// If we have diff against mode
else if ( ( updateMode & UpdateMode . DiffAgainst ) ! = 0 )
{
2017-12-05 12:53:15 -08:00
DiffAgainst ( inputFileNames , baseFileNames , outDir , inplace , clean , remUnicode , descAsName , filter , splitType ) ;
2017-12-05 12:33:11 -08:00
}
// If we have one of the base replacement modes
else if ( ( updateMode & UpdateMode . BaseReplace ) ! = 0
| | ( updateMode & UpdateMode . ReverseBaseReplace ) ! = 0 )
{
2017-12-14 13:38:04 -08:00
BaseReplace ( inputFileNames , baseFileNames , outDir , inplace , clean , remUnicode , descAsName , filter , splitType , updateNames , updateHashes ) ;
2017-09-25 12:21:52 -07:00
}
2017-12-05 12:23:24 -08:00
2017-10-09 20:38:15 -07:00
return ;
2017-09-25 12:21:52 -07:00
}
2017-09-25 14:28:55 -07:00
/// <summary>
2017-10-09 20:38:15 -07:00
/// Populate the user DatData object from the input files
2017-09-25 14:28:55 -07:00
/// </summary>
2017-10-09 20:38:15 -07:00
/// <param name="inputs">Paths to DATs to parse</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <returns>List of DatData objects representing headers</returns>
private List < DatFile > PopulateUserData ( List < string > inputs , bool inplace , bool clean , bool remUnicode , bool descAsName ,
2017-12-05 12:53:15 -08:00
string outDir , Filter filter , SplitType splitType )
2017-09-25 14:28:55 -07:00
{
2017-10-09 20:38:15 -07:00
DatFile [ ] datHeaders = new DatFile [ inputs . Count ] ;
InternalStopwatch watch = new InternalStopwatch ( "Processing individual DATs" ) ;
2017-09-25 14:28:55 -07:00
2017-10-09 20:38:15 -07:00
// Parse all of the DATs into their own DatFiles in the array
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , i = >
{
string input = inputs [ i ] ;
Globals . Logger . User ( "Adding DAT: {0}" , input . Split ( '¬' ) [ 0 ] ) ;
datHeaders [ i ] = new DatFile
{
DatFormat = ( DatFormat ! = 0 ? DatFormat : 0 ) ,
DedupeRoms = DedupeRoms ,
} ;
2017-12-01 15:28:41 -08:00
datHeaders [ i ] . Parse ( input , i , 0 , splitType , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ) ;
2017-10-09 20:38:15 -07:00
} ) ;
watch . Stop ( ) ;
watch . Start ( "Populating internal DAT" ) ;
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , i = >
{
// Get the list of keys from the DAT
2017-10-30 21:15:37 -07:00
List < string > keys = datHeaders [ i ] . Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string key in keys )
{
// Add everything from the key to the internal DAT
AddRange ( key , datHeaders [ i ] [ key ] ) ;
// Now remove the key from the source DAT
datHeaders [ i ] . Remove ( key ) ;
}
// Now remove the file dictionary from the souce DAT to save memory
datHeaders [ i ] . DeleteDictionary ( ) ;
} ) ;
// Now that we have a merged DAT, filter it
2017-12-05 12:53:15 -08:00
filter . FilterDatFile ( this ) ;
2017-10-09 20:38:15 -07:00
watch . Stop ( ) ;
return datHeaders . ToList ( ) ;
2017-09-25 14:28:55 -07:00
}
2017-10-30 16:09:58 -07:00
/// <summary>
2017-12-14 13:38:04 -08:00
/// Replace item values from the base set
2017-10-30 16:09:58 -07:00
/// </summary>
2017-12-05 12:33:11 -08:00
/// <param name="inputFileNames">Names of the input files</param>
/// <param name="baseFileNames">Names of base files</param>
2017-10-30 16:09:58 -07:00
/// <param name="outDir">Optional param for output directory</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
2017-12-14 13:38:04 -08:00
/// <param name="updateNames">True if names should be updated, false otherwise</param>
/// <param name="updateHashes">True if hashes should be updated, false otherwise</param>
2017-12-05 12:33:11 -08:00
public void BaseReplace ( List < string > inputFileNames , List < string > baseFileNames , string outDir , bool inplace , bool clean , bool remUnicode ,
2017-12-14 13:38:04 -08:00
bool descAsName , Filter filter , SplitType splitType , bool updateNames , bool updateHashes )
2017-10-30 16:09:58 -07:00
{
// First we want to parse all of the base DATs into the input
InternalStopwatch watch = new InternalStopwatch ( "Populating base DAT for replacement..." ) ;
Parallel . For ( 0 , baseFileNames . Count , Globals . ParallelOptions , i = >
{
string path = "" ;
int id = 0 ;
lock ( baseFileNames )
{
path = baseFileNames [ i ] ;
2017-12-05 12:36:52 -08:00
id = baseFileNames . Count - i ;
2017-10-30 16:09:58 -07:00
}
Parse ( path , id , id , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ) ;
} ) ;
watch . Stop ( ) ;
// For comparison's sake, we want to use CRC as the base ordering
BucketBy ( SortedBy . CRC , DedupeType . Full ) ;
// Now we want to try to replace each item in each input DAT from the base
foreach ( string path in inputFileNames )
{
2017-12-14 14:46:03 -08:00
Globals . Logger . User ( "Replacing items in '{0}' from the base DAT" , path . Split ( '¬' ) [ 0 ] ) ;
2017-10-30 16:09:58 -07:00
// First we parse in the DAT internally
DatFile intDat = new DatFile ( ) ;
2017-12-01 15:28:41 -08:00
intDat . Parse ( path , 1 , 1 , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ) ;
2017-10-30 16:09:58 -07:00
// For comparison's sake, we want to use CRC as the base ordering
2017-12-14 14:46:03 -08:00
intDat . BucketBy ( SortedBy . CRC , DedupeType . None ) ;
2017-10-30 16:09:58 -07:00
// Then we do a hashwise comparison against the base DAT
2017-10-30 21:15:37 -07:00
List < string > keys = intDat . Keys ;
2017-10-30 16:09:58 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > datItems = intDat [ key ] ;
List < DatItem > newDatItems = new List < DatItem > ( ) ;
foreach ( DatItem datItem in datItems )
{
2017-12-14 13:55:15 -08:00
// If we have something other than a Rom or Disk, then this doesn't do anything
if ( datItem . Type ! = ItemType . Disk & & datItem . Type ! = ItemType . Rom )
{
newDatItems . Add ( ( DatItem ) datItem . Clone ( ) ) ;
continue ;
}
2017-10-30 21:15:37 -07:00
List < DatItem > dupes = datItem . GetDuplicates ( this , sorted : true ) ;
DatItem newDatItem = ( DatItem ) datItem . Clone ( ) ;
2017-10-30 16:09:58 -07:00
if ( dupes . Count > 0 )
{
2017-12-14 13:38:04 -08:00
// If we're updating names, replace using the first found name
if ( updateNames )
{
newDatItem . Name = dupes [ 0 ] . Name ;
}
2017-12-13 13:00:47 -08:00
2017-12-14 13:38:04 -08:00
// If we're updating hashes, only replace if the current item doesn't have them
2017-12-13 13:00:47 -08:00
if ( updateHashes )
{
if ( newDatItem . Type = = ItemType . Rom )
{
Rom newRomItem = ( Rom ) newDatItem ;
if ( String . IsNullOrEmpty ( newRomItem . CRC ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . CRC ) )
{
newRomItem . CRC = ( ( Rom ) dupes [ 0 ] ) . CRC ;
}
if ( String . IsNullOrEmpty ( newRomItem . MD5 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . MD5 ) )
{
newRomItem . MD5 = ( ( Rom ) dupes [ 0 ] ) . MD5 ;
}
if ( String . IsNullOrEmpty ( newRomItem . SHA1 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA1 ) )
{
newRomItem . SHA1 = ( ( Rom ) dupes [ 0 ] ) . SHA1 ;
}
if ( String . IsNullOrEmpty ( newRomItem . SHA256 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA256 ) )
{
newRomItem . SHA256 = ( ( Rom ) dupes [ 0 ] ) . SHA256 ;
}
if ( String . IsNullOrEmpty ( newRomItem . SHA384 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA384 ) )
{
newRomItem . SHA384 = ( ( Rom ) dupes [ 0 ] ) . SHA384 ;
}
if ( String . IsNullOrEmpty ( newRomItem . SHA512 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA512 ) )
{
newRomItem . SHA512 = ( ( Rom ) dupes [ 0 ] ) . SHA512 ;
}
newDatItem = ( Rom ) newRomItem . Clone ( ) ;
}
else if ( newDatItem . Type = = ItemType . Disk )
{
Disk newDiskItem = ( Disk ) newDatItem ;
if ( String . IsNullOrEmpty ( newDiskItem . MD5 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . MD5 ) )
{
newDiskItem . MD5 = ( ( Rom ) dupes [ 0 ] ) . MD5 ;
}
if ( String . IsNullOrEmpty ( newDiskItem . SHA1 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA1 ) )
{
newDiskItem . SHA1 = ( ( Rom ) dupes [ 0 ] ) . SHA1 ;
}
if ( String . IsNullOrEmpty ( newDiskItem . SHA256 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA256 ) )
{
newDiskItem . SHA256 = ( ( Rom ) dupes [ 0 ] ) . SHA256 ;
}
if ( String . IsNullOrEmpty ( newDiskItem . SHA384 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA384 ) )
{
newDiskItem . SHA384 = ( ( Rom ) dupes [ 0 ] ) . SHA384 ;
}
if ( String . IsNullOrEmpty ( newDiskItem . SHA512 ) & & ! String . IsNullOrEmpty ( ( ( Rom ) dupes [ 0 ] ) . SHA512 ) )
{
newDiskItem . SHA512 = ( ( Rom ) dupes [ 0 ] ) . SHA512 ;
}
newDatItem = ( Disk ) newDiskItem . Clone ( ) ;
}
}
2017-10-30 16:09:58 -07:00
}
2017-10-30 21:15:37 -07:00
newDatItems . Add ( newDatItem ) ;
2017-10-30 16:09:58 -07:00
}
// Now add the new list to the key
intDat . Remove ( key ) ;
intDat . AddRange ( key , newDatItems ) ;
} ) ;
// Determine the output path for the DAT
2017-12-01 15:28:41 -08:00
string interOutDir = Utilities . GetOutputPath ( outDir , path , inplace ) ;
2017-10-30 16:09:58 -07:00
// Once we're done, try writing out
2017-11-17 14:58:54 -08:00
intDat . Write ( interOutDir ) ;
2017-10-30 16:09:58 -07:00
// Due to possible memory requirements, we force a garbage collection
GC . Collect ( ) ;
}
}
2017-09-25 14:28:55 -07:00
/// <summary>
2017-10-09 20:38:15 -07:00
/// Output diffs against a base set
2017-09-25 14:28:55 -07:00
/// </summary>
2017-12-05 12:33:11 -08:00
/// <param name="inputFileNames">Names of the input files</param>
/// <param name="baseFileNames">Names of base files</param>
2017-10-09 20:38:15 -07:00
/// <param name="outDir">Optional param for output directory</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
2017-12-05 12:33:11 -08:00
public void DiffAgainst ( List < string > inputFileNames , List < string > baseFileNames , string outDir , bool inplace , bool clean , bool remUnicode ,
2017-12-05 12:53:15 -08:00
bool descAsName , Filter filter , SplitType splitType )
2017-09-25 14:28:55 -07:00
{
2017-10-09 20:38:15 -07:00
// First we want to parse all of the base DATs into the input
InternalStopwatch watch = new InternalStopwatch ( "Populating base DAT for comparison..." ) ;
2017-09-25 14:28:55 -07:00
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( baseFileNames , Globals . ParallelOptions , path = >
{
Parse ( path , 0 , 0 , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ) ;
} ) ;
2017-09-25 14:28:55 -07:00
2017-10-09 20:38:15 -07:00
watch . Stop ( ) ;
2017-09-25 12:21:52 -07:00
2017-10-09 20:38:15 -07:00
// For comparison's sake, we want to use CRC as the base ordering
BucketBy ( SortedBy . CRC , DedupeType . Full ) ;
// Now we want to compare each input DAT against the base
foreach ( string path in inputFileNames )
{
2017-12-01 15:28:41 -08:00
Globals . Logger . User ( "Comparing '{0}'' to base DAT" , path . Split ( '¬' ) [ 0 ] ) ;
2017-10-09 20:38:15 -07:00
// First we parse in the DAT internally
DatFile intDat = new DatFile ( ) ;
2017-12-01 15:28:41 -08:00
intDat . Parse ( path , 1 , 1 , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ) ;
2017-10-09 20:38:15 -07:00
// For comparison's sake, we want to use CRC as the base ordering
intDat . BucketBy ( SortedBy . CRC , DedupeType . Full ) ;
// Then we do a hashwise comparison against the base DAT
2017-10-30 21:15:37 -07:00
List < string > keys = intDat . Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > datItems = intDat [ key ] ;
List < DatItem > keepDatItems = new List < DatItem > ( ) ;
foreach ( DatItem datItem in datItems )
{
if ( ! datItem . HasDuplicates ( this , true ) )
{
keepDatItems . Add ( datItem ) ;
}
}
// Now add the new list to the key
intDat . Remove ( key ) ;
intDat . AddRange ( key , keepDatItems ) ;
} ) ;
// Determine the output path for the DAT
2017-12-01 15:28:41 -08:00
string interOutDir = Utilities . GetOutputPath ( outDir , path , inplace ) ;
2017-10-09 20:38:15 -07:00
// Once we're done, try writing out
2017-11-17 14:58:54 -08:00
intDat . Write ( interOutDir ) ;
2017-10-09 20:38:15 -07:00
// Due to possible memory requirements, we force a garbage collection
GC . Collect ( ) ;
}
}
2017-10-09 18:04:49 -07:00
/// <summary>
2017-10-09 20:38:15 -07:00
/// Output cascading diffs
2017-10-09 18:04:49 -07:00
/// </summary>
2017-10-09 20:38:15 -07:00
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
public void DiffCascade ( string outDir , bool inplace , List < string > inputs , List < DatFile > datHeaders , bool skip )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
string post = "" ;
// Create a list of DatData objects representing output files
List < DatFile > outDats = new List < DatFile > ( ) ;
// Loop through each of the inputs and get or create a new DatData object
InternalStopwatch watch = new InternalStopwatch ( "Initializing all output DATs" ) ;
DatFile [ ] outDatsArray = new DatFile [ inputs . Count ] ;
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , j = >
{
string innerpost = " (" + Path . GetFileNameWithoutExtension ( inputs [ j ] . Split ( '¬' ) [ 0 ] ) + " Only)" ;
DatFile diffData ;
// If we're in inplace mode, take the appropriate DatData object already stored
if ( inplace | | outDir ! = Environment . CurrentDirectory )
{
diffData = datHeaders [ j ] ;
}
else
{
diffData = new DatFile ( this ) ;
diffData . FileName + = post ;
diffData . Name + = post ;
diffData . Description + = post ;
}
diffData . ResetDictionary ( ) ;
outDatsArray [ j ] = diffData ;
} ) ;
outDats = outDatsArray . ToList ( ) ;
watch . Stop ( ) ;
// Now, loop through the dictionary and populate the correct DATs
watch . Start ( "Populating all output DATs" ) ;
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = DatItem . Merge ( this [ key ] ) ;
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
{
return ;
}
foreach ( DatItem item in items )
{
// There's odd cases where there are items with System ID < 0. Skip them for now
if ( item . SystemID < 0 )
{
Globals . Logger . Warning ( "Item found with a <0 SystemID: {0}" , item . Name ) ;
continue ;
}
outDats [ item . SystemID ] . Add ( key , item ) ;
}
} ) ;
watch . Stop ( ) ;
// Finally, loop through and output each of the DATs
watch . Start ( "Outputting all created DATs" ) ;
Parallel . For ( ( skip ? 1 : 0 ) , inputs . Count , Globals . ParallelOptions , j = >
{
2017-12-01 15:28:41 -08:00
string path = Utilities . GetOutputPath ( outDir , inputs [ j ] , inplace ) ;
2017-10-09 20:38:15 -07:00
// Try to output the file
2017-11-17 14:58:54 -08:00
outDats [ j ] . Write ( path ) ;
2017-10-09 20:38:15 -07:00
} ) ;
watch . Stop ( ) ;
2017-10-09 18:04:49 -07:00
}
/// <summary>
2017-10-09 20:38:15 -07:00
/// Output non-cascading diffs
2017-10-09 18:04:49 -07:00
/// </summary>
2017-10-09 20:38:15 -07:00
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
2017-10-30 16:29:34 -07:00
public void DiffNoCascade ( UpdateMode diff , string outDir , List < string > inputs )
2017-10-09 20:38:15 -07:00
{
InternalStopwatch watch = new InternalStopwatch ( "Initializing all output DATs" ) ;
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
// Default vars for use
string post = "" ;
DatFile outerDiffData = new DatFile ( ) ;
DatFile dupeData = new DatFile ( ) ;
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
// Fill in any information not in the base DAT
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( FileName ) )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
FileName = "All DATs" ;
2017-10-09 18:04:49 -07:00
}
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( Name ) )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
Name = "All DATs" ;
}
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 20:38:15 -07:00
{
Description = "All DATs" ;
2017-10-09 18:04:49 -07:00
}
2017-10-09 20:38:15 -07:00
// Don't have External dupes
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffNoDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
post = " (No Duplicates)" ;
outerDiffData = new DatFile ( this ) ;
outerDiffData . FileName + = post ;
outerDiffData . Name + = post ;
outerDiffData . Description + = post ;
outerDiffData . ResetDictionary ( ) ;
}
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
// Have External dupes
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
post = " (Duplicates)" ;
dupeData = new DatFile ( this ) ;
dupeData . FileName + = post ;
dupeData . Name + = post ;
dupeData . Description + = post ;
dupeData . ResetDictionary ( ) ;
}
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
// Create a list of DatData objects representing individual output files
List < DatFile > outDats = new List < DatFile > ( ) ;
// Loop through each of the inputs and get or create a new DatData object
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffIndividualsOnly ) ! = 0 )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
DatFile [ ] outDatsArray = new DatFile [ inputs . Count ] ;
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , j = >
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
string innerpost = " (" + Path . GetFileNameWithoutExtension ( inputs [ j ] . Split ( '¬' ) [ 0 ] ) + " Only)" ;
DatFile diffData = new DatFile ( this ) ;
diffData . FileName + = innerpost ;
diffData . Name + = innerpost ;
diffData . Description + = innerpost ;
diffData . ResetDictionary ( ) ;
outDatsArray [ j ] = diffData ;
} ) ;
outDats = outDatsArray . ToList ( ) ;
}
watch . Stop ( ) ;
// Now, loop through the dictionary and populate the correct DATs
watch . Start ( "Populating all output DATs" ) ;
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = DatItem . Merge ( this [ key ] ) ;
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
{
return ;
}
// Loop through and add the items correctly
foreach ( DatItem item in items )
{
// No duplicates
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffNoDupesOnly ) ! = 0 | | ( diff & UpdateMode . DiffIndividualsOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
if ( ( item . Dupe & DupeType . Internal ) ! = 0 )
{
// Individual DATs that are output
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffIndividualsOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
outDats [ item . SystemID ] . Add ( key , item ) ;
}
// Merged no-duplicates DAT
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffNoDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
DatItem newrom = item . Clone ( ) as DatItem ;
newrom . MachineName + = " (" + Path . GetFileNameWithoutExtension ( inputs [ newrom . SystemID ] . Split ( '¬' ) [ 0 ] ) + ")" ;
outerDiffData . Add ( key , newrom ) ;
}
}
}
// Duplicates only
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
if ( ( item . Dupe & DupeType . External ) ! = 0 )
{
DatItem newrom = item . Clone ( ) as DatItem ;
newrom . MachineName + = " (" + Path . GetFileNameWithoutExtension ( inputs [ newrom . SystemID ] . Split ( '¬' ) [ 0 ] ) + ")" ;
dupeData . Add ( key , newrom ) ;
}
}
}
} ) ;
watch . Stop ( ) ;
// Finally, loop through and output each of the DATs
watch . Start ( "Outputting all created DATs" ) ;
// Output the difflist (a-b)+(b-a) diff
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffNoDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
2017-11-17 14:58:54 -08:00
outerDiffData . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
// Output the (ab) diff
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffDupesOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
2017-11-17 14:58:54 -08:00
dupeData . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
// Output the individual (a-b) DATs
2017-10-30 16:32:59 -07:00
if ( ( diff & UpdateMode . DiffIndividualsOnly ) ! = 0 )
2017-10-09 20:38:15 -07:00
{
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , j = >
{
2017-12-01 15:28:41 -08:00
string path = Utilities . GetOutputPath ( outDir , inputs [ j ] , false /* inplace */ ) ;
2017-10-09 20:38:15 -07:00
// Try to output the file
2017-11-17 14:58:54 -08:00
outDats [ j ] . Write ( path ) ;
2017-10-09 20:38:15 -07:00
} ) ;
}
watch . Stop ( ) ;
}
/// <summary>
/// Output user defined merge
/// </summary>
/// <param name="outDir">Output directory to write the DATs to</param>
/// <param name="inputs">List of inputs to write out from</param>
/// <param name="datHeaders">Dat headers used optionally</param>
public void MergeNoDiff ( string outDir , List < string > inputs , List < DatFile > datHeaders )
{
// If we're in SuperDAT mode, prefix all games with their respective DATs
if ( Type = = "SuperDAT" )
{
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] . ToList ( ) ;
List < DatItem > newItems = new List < DatItem > ( ) ;
foreach ( DatItem item in items )
{
DatItem newItem = item ;
string filename = inputs [ newItem . SystemID ] . Split ( '¬' ) [ 0 ] ;
string rootpath = inputs [ newItem . SystemID ] . Split ( '¬' ) [ 1 ] ;
2017-12-14 13:22:22 -08:00
rootpath + = ( String . IsNullOrWhiteSpace ( rootpath ) ? "" : Path . DirectorySeparatorChar . ToString ( ) ) ;
2017-10-09 20:38:15 -07:00
filename = filename . Remove ( 0 , rootpath . Length ) ;
newItem . MachineName = Path . GetDirectoryName ( filename ) + Path . DirectorySeparatorChar
+ Path . GetFileNameWithoutExtension ( filename ) + Path . DirectorySeparatorChar
+ newItem . MachineName ;
newItems . Add ( newItem ) ;
}
Remove ( key ) ;
AddRange ( key , newItems ) ;
} ) ;
}
// Try to output the file
2017-11-17 14:58:54 -08:00
Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
/// <summary>
/// Convert, update, and filter a DAT file or set of files using a base
/// </summary>
/// <param name="inputFileNames">Names of the input files and/or folders</param>
/// <param name="outDir">Optional param for output directory</param>
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
/// <param name="filter">Filter object to be passed to the DatItem level</param>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
/// <param name="root">String representing root directory to compare against for length calculation</param>
public void Update ( List < string > inputFileNames , string outDir , bool inplace , bool clean , bool remUnicode , bool descAsName ,
2017-12-05 12:53:15 -08:00
Filter filter , SplitType splitType )
2017-10-09 20:38:15 -07:00
{
2017-11-08 23:49:50 -08:00
// Iterate over the files
foreach ( string file in inputFileNames )
{
DatFile innerDatdata = new DatFile ( this ) ;
2017-12-01 15:28:41 -08:00
Globals . Logger . User ( "Processing '{0}'" , Path . GetFileName ( file . Split ( '¬' ) [ 0 ] ) ) ;
2017-11-08 23:49:50 -08:00
innerDatdata . Parse ( file , 0 , 0 , splitType , keep : true , clean : clean , remUnicode : remUnicode , descAsName : descAsName ,
keepext : ( ( innerDatdata . DatFormat & DatFormat . TSV ) ! = 0 | | ( innerDatdata . DatFormat & DatFormat . CSV ) ! = 0 ) ) ;
2017-12-05 12:53:15 -08:00
filter . FilterDatFile ( innerDatdata ) ;
2017-10-09 20:38:15 -07:00
2017-11-08 23:49:50 -08:00
// Get the correct output path
2017-12-01 15:28:41 -08:00
string realOutDir = Utilities . GetOutputPath ( outDir , file , inplace ) ;
2017-11-08 14:39:04 -08:00
2017-11-08 23:49:50 -08:00
// Try to output the file, overwriting only if it's not in the current directory
// TODO: Figure out if overwriting should always happen of if there should be a user flag
2017-11-17 14:58:54 -08:00
innerDatdata . Write ( realOutDir , overwrite : ( realOutDir ! = Environment . CurrentDirectory ) ) ;
2017-10-09 20:38:15 -07:00
}
}
#endregion
#region Dictionary Manipulation
/// <summary>
/// Clones the files dictionary
/// </summary>
/// <returns>A new files dictionary instance</returns>
public SortedDictionary < string , List < DatItem > > CloneDictionary ( )
{
// Create the placeholder dictionary to be used
SortedDictionary < string , List < DatItem > > sorted = new SortedDictionary < string , List < DatItem > > ( ) ;
// Now perform a deep clone on the entire dictionary
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string key in keys )
{
// Clone each list of DATs in the dictionary
List < DatItem > olditems = this [ key ] ;
List < DatItem > newitems = new List < DatItem > ( ) ;
foreach ( DatItem item in olditems )
{
newitems . Add ( ( DatItem ) item . Clone ( ) ) ;
}
// If the key is missing from the new dictionary, add it
if ( ! sorted . ContainsKey ( key ) )
{
sorted . Add ( key , new List < DatItem > ( ) ) ;
}
// Now add the list of items
sorted [ key ] . AddRange ( newitems ) ;
}
return sorted ;
}
/// <summary>
/// Delete the file dictionary
/// </summary>
public void DeleteDictionary ( )
{
_items = null ;
// Reset statistics
_datStats . Reset ( ) ;
}
/// <summary>
/// Reset the file dictionary
/// </summary>
public void ResetDictionary ( )
{
_items = new SortedDictionary < string , List < DatItem > > ( ) ;
// Reset statistics
_datStats . Reset ( ) ;
}
#endregion
#region Filtering
/// <summary>
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
/// </summary>
2017-10-09 21:16:03 -07:00
private void MachineDescriptionToName ( )
2017-10-09 20:38:15 -07:00
{
try
{
// First we want to get a mapping for all games to description
ConcurrentDictionary < string , string > mapping = new ConcurrentDictionary < string , string > ( ) ;
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
foreach ( DatItem item in items )
{
// If the key mapping doesn't exist, add it
if ( ! mapping . ContainsKey ( item . MachineName ) )
{
mapping . TryAdd ( item . MachineName , item . MachineDescription . Replace ( '/' , '_' ) . Replace ( "\"" , "''" ) ) ;
}
}
} ) ;
// Now we loop through every item and update accordingly
2017-10-30 21:15:37 -07:00
keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
List < DatItem > newItems = new List < DatItem > ( ) ;
foreach ( DatItem item in items )
{
// Update machine name
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( item . MachineName ) & & mapping . ContainsKey ( item . MachineName ) )
2017-10-09 20:38:15 -07:00
{
item . MachineName = mapping [ item . MachineName ] ;
}
// Update cloneof
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( item . CloneOf ) & & mapping . ContainsKey ( item . CloneOf ) )
2017-10-09 20:38:15 -07:00
{
item . CloneOf = mapping [ item . CloneOf ] ;
}
// Update romof
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( item . RomOf ) & & mapping . ContainsKey ( item . RomOf ) )
2017-10-09 20:38:15 -07:00
{
item . RomOf = mapping [ item . RomOf ] ;
}
// Update sampleof
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( item . SampleOf ) & & mapping . ContainsKey ( item . SampleOf ) )
2017-10-09 20:38:15 -07:00
{
item . SampleOf = mapping [ item . SampleOf ] ;
}
// Add the new item to the output list
newItems . Add ( item ) ;
}
// Replace the old list of roms with the new one
Remove ( key ) ;
AddRange ( key , newItems ) ;
} ) ;
}
catch ( Exception ex )
{
Globals . Logger . Warning ( ex . ToString ( ) ) ;
}
}
2017-10-31 14:53:02 -07:00
/// <summary>
/// Remove all items marked for removal from the DAT
/// </summary>
private void RemoveMarkedItems ( )
{
List < string > keys = Keys ;
foreach ( string key in keys )
{
List < DatItem > items = this [ key ] ;
List < DatItem > newItems = new List < DatItem > ( ) ;
foreach ( DatItem item in items )
{
if ( ! item . Remove )
{
newItems . Add ( item ) ;
}
}
Remove ( key ) ;
AddRange ( key , newItems ) ;
}
}
2017-10-09 20:38:15 -07:00
/// <summary>
/// Strip the given hash types from the DAT
/// </summary>
2017-10-09 21:16:03 -07:00
private void StripHashesFromItems ( )
2017-10-09 20:38:15 -07:00
{
// Output the logging statement
Globals . Logger . User ( "Stripping requested hashes" ) ;
// Now process all of the roms
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
for ( int j = 0 ; j < items . Count ; j + + )
{
DatItem item = items [ j ] ;
if ( item . Type = = ItemType . Rom )
{
Rom rom = ( Rom ) item ;
if ( ( StripHash & Hash . MD5 ) ! = 0 )
{
rom . MD5 = null ;
}
if ( ( StripHash & Hash . SHA1 ) ! = 0 )
{
rom . SHA1 = null ;
}
if ( ( StripHash & Hash . SHA256 ) ! = 0 )
{
rom . SHA256 = null ;
}
if ( ( StripHash & Hash . SHA384 ) ! = 0 )
{
rom . SHA384 = null ;
}
if ( ( StripHash & Hash . SHA512 ) ! = 0 )
{
rom . SHA512 = null ;
}
items [ j ] = rom ;
}
else if ( item . Type = = ItemType . Disk )
{
Disk disk = ( Disk ) item ;
if ( ( StripHash & Hash . MD5 ) ! = 0 )
{
disk . MD5 = null ;
}
if ( ( StripHash & Hash . SHA1 ) ! = 0 )
{
disk . SHA1 = null ;
}
if ( ( StripHash & Hash . SHA256 ) ! = 0 )
{
disk . SHA256 = null ;
}
if ( ( StripHash & Hash . SHA384 ) ! = 0 )
{
disk . SHA384 = null ;
}
if ( ( StripHash & Hash . SHA512 ) ! = 0 )
{
disk . SHA512 = null ;
}
items [ j ] = disk ;
}
}
Remove ( key ) ;
AddRange ( key , items ) ;
} ) ;
}
2017-10-30 15:17:13 -07:00
/// <summary>
/// Strip the dates from the beginning of scene-style set names
/// </summary>
private void StripSceneDatesFromItems ( )
{
// Output the logging statement
Globals . Logger . User ( "Stripping scene-style dates" ) ;
// Set the regex pattern to use
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)" ;
// Now process all of the roms
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-30 15:17:13 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
for ( int j = 0 ; j < items . Count ; j + + )
{
DatItem item = items [ j ] ;
if ( Regex . IsMatch ( item . MachineName , pattern ) )
{
2017-10-30 15:30:03 -07:00
item . MachineName = Regex . Replace ( item . MachineName , pattern , "$2" ) ;
2017-10-30 15:17:13 -07:00
}
if ( Regex . IsMatch ( item . MachineDescription , pattern ) )
{
2017-10-30 15:30:03 -07:00
item . MachineDescription = Regex . Replace ( item . MachineDescription , pattern , "$2" ) ;
2017-10-30 15:17:13 -07:00
}
items [ j ] = item ;
}
Remove ( key ) ;
AddRange ( key , items ) ;
} ) ;
}
2017-10-09 20:38:15 -07:00
#endregion
2017-11-08 21:45:05 -08:00
#region Internal Merging / Splitting
2017-10-09 20:38:15 -07:00
/// <summary>
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
public void CreateDeviceNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating device non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is sort by game
BucketBy ( SortedBy . Game , mergeroms , norename : true ) ;
_sortedBy = SortedBy . Default ;
// Now we want to loop through all of the games and set the correct information
AddRomsFromDevices ( ) ;
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
// Finally, remove all sets that are labeled as bios or device
//RemoveBiosAndDeviceSets(logger);
}
/// <summary>
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
public void CreateFullyNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating fully non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is sort by game
BucketBy ( SortedBy . Game , mergeroms , norename : true ) ;
_sortedBy = SortedBy . Default ;
// Now we want to loop through all of the games and set the correct information
AddRomsFromDevices ( ) ;
AddRomsFromParent ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
AddRomsFromBios ( ) ;
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
// Finally, remove all sets that are labeled as bios or device
//RemoveBiosAndDeviceSets(logger);
}
/// <summary>
/// Use cloneof tags to create merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
public void CreateMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is sort by game
BucketBy ( SortedBy . Game , mergeroms , norename : true ) ;
_sortedBy = SortedBy . Default ;
// Now we want to loop through all of the games and set the correct information
AddRomsFromChildren ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof tags to create non-merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
public void CreateNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is sort by game
BucketBy ( SortedBy . Game , mergeroms , norename : true ) ;
_sortedBy = SortedBy . Default ;
// Now we want to loop through all of the games and set the correct information
AddRomsFromParent ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof and romof tags to create split sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
public void CreateSplitSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating split sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is sort by game
BucketBy ( SortedBy . Game , mergeroms , norename : true ) ;
_sortedBy = SortedBy . Default ;
// Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use romof tags to add roms to the children
/// </summary>
private void AddRomsFromBios ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( this [ game ] . Count = = 0 )
{
continue ;
}
// Determine if the game has a parent or not
string parent = null ;
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( this [ game ] [ 0 ] . RomOf ) )
2017-10-09 20:38:15 -07:00
{
parent = this [ game ] [ 0 ] . RomOf ;
}
// If the parent doesnt exist, we want to continue
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
continue ;
}
// If the parent doesn't have any items, we want to continue
if ( this [ parent ] . Count = = 0 )
{
continue ;
}
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = this [ game ] [ 0 ] ;
List < DatItem > parentItems = this [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
if ( this [ game ] . Where ( i = > i . Name = = datItem . Name ) . Count ( ) = = 0 & & ! this [ game ] . Contains ( datItem ) )
{
Add ( game , datItem ) ;
}
}
}
}
/// <summary>
/// Use device_ref tags to add roms to the children
/// </summary>
private void AddRomsFromDevices ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// If the game has no devices, we continue
if ( this [ game ] [ 0 ] . Devices = = null | | this [ game ] [ 0 ] . Devices . Count = = 0 )
{
continue ;
}
// Determine if the game has any devices or not
List < string > devices = this [ game ] [ 0 ] . Devices ;
foreach ( string device in devices )
{
// If the device doesn't exist then we continue
if ( this [ device ] . Count = = 0 )
{
continue ;
}
// Otherwise, copy the items from the device to the current game
DatItem copyFrom = this [ game ] [ 0 ] ;
List < DatItem > devItems = this [ device ] ;
foreach ( DatItem item in devItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
if ( this [ game ] . Where ( i = > i . Name = = datItem . Name ) . Count ( ) = = 0 & & ! this [ game ] . Contains ( datItem ) )
{
Add ( game , datItem ) ;
}
}
}
}
}
/// <summary>
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
/// </summary>
private void AddRomsFromParent ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( this [ game ] . Count = = 0 )
{
continue ;
}
// Determine if the game has a parent or not
string parent = null ;
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( this [ game ] [ 0 ] . CloneOf ) )
2017-10-09 20:38:15 -07:00
{
parent = this [ game ] [ 0 ] . CloneOf ;
}
// If the parent doesnt exist, we want to continue
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
continue ;
}
// If the parent doesn't have any items, we want to continue
if ( this [ parent ] . Count = = 0 )
{
continue ;
}
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = this [ game ] [ 0 ] ;
List < DatItem > parentItems = this [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
if ( this [ game ] . Where ( i = > i . Name = = datItem . Name ) . Count ( ) = = 0 & & ! this [ game ] . Contains ( datItem ) )
{
Add ( game , datItem ) ;
}
}
// Now we want to get the parent romof tag and put it in each of the items
List < DatItem > items = this [ game ] ;
string romof = this [ parent ] [ 0 ] . RomOf ;
foreach ( DatItem item in items )
{
item . RomOf = romof ;
}
}
}
/// <summary>
/// Use cloneof tags to add roms to the parents, removing the child sets in the process
/// </summary>
private void AddRomsFromChildren ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// Determine if the game has a parent or not
string parent = null ;
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( this [ game ] [ 0 ] . CloneOf ) )
2017-10-09 20:38:15 -07:00
{
parent = this [ game ] [ 0 ] . CloneOf ;
}
// If there is no parent, then we continue
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
continue ;
}
// Otherwise, move the items from the current game to a subfolder of the parent game
DatItem copyFrom = this [ parent ] . Count = = 0 ? new Rom { MachineName = parent , MachineDescription = parent } : this [ parent ] [ 0 ] ;
List < DatItem > items = this [ game ] ;
foreach ( DatItem item in items )
{
// If the disk doesn't have a valid merge tag OR the merged file doesn't exist in the parent, then add it
if ( item . Type = = ItemType . Disk & & ( item . MergeTag = = null | | ! this [ parent ] . Select ( i = > i . Name ) . Contains ( item . MergeTag ) ) )
{
item . CopyMachineInformation ( copyFrom ) ;
Add ( parent , item ) ;
}
// Otherwise, if the parent doesn't already contain the non-disk, add it
else if ( item . Type ! = ItemType . Disk & & ! this [ parent ] . Contains ( item ) )
{
// Rename the child so it's in a subfolder
item . Name = item . Name + "\\" + item . Name ;
// Update the machine to be the new parent
item . CopyMachineInformation ( copyFrom ) ;
// Add the rom to the parent set
Add ( parent , item ) ;
}
}
// Then, remove the old game so it's not picked up by the writer
Remove ( game ) ;
}
}
/// <summary>
/// Remove all BIOS and device sets
/// </summary>
private void RemoveBiosAndDeviceSets ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
if ( this [ game ] . Count > 0
& & ( this [ game ] [ 0 ] . MachineType = = MachineType . Bios
| | this [ game ] [ 0 ] . MachineType = = MachineType . Device ) )
{
Remove ( game ) ;
}
}
}
/// <summary>
/// Use romof tags to remove roms from the children
/// </summary>
private void RemoveBiosRomsFromChild ( )
{
// Loop through the romof tags
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( this [ game ] . Count = = 0 )
{
continue ;
}
// Determine if the game has a parent or not
string parent = null ;
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( this [ game ] [ 0 ] . RomOf ) )
2017-10-09 20:38:15 -07:00
{
parent = this [ game ] [ 0 ] . RomOf ;
}
// If the parent doesnt exist, we want to continue
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
continue ;
}
// If the parent doesn't have any items, we want to continue
if ( this [ parent ] . Count = = 0 )
{
continue ;
}
// If the parent exists and has items, we remove the items that are in the parent from the current game
List < DatItem > parentItems = this [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
Remove ( game , datItem ) ;
}
}
}
/// <summary>
/// Use cloneof tags to remove roms from the children
/// </summary>
private void RemoveRomsFromChild ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( this [ game ] . Count = = 0 )
{
continue ;
}
// Determine if the game has a parent or not
string parent = null ;
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( this [ game ] [ 0 ] . CloneOf ) )
2017-10-09 20:38:15 -07:00
{
parent = this [ game ] [ 0 ] . CloneOf ;
}
// If the parent doesnt exist, we want to continue
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
continue ;
}
// If the parent doesn't have any items, we want to continue
if ( this [ parent ] . Count = = 0 )
{
continue ;
}
// If the parent exists and has items, we copy the items from the parent to the current game
List < DatItem > parentItems = this [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
Remove ( game , datItem ) ;
}
// Now we want to get the parent romof tag and put it in each of the items
List < DatItem > items = this [ game ] ;
string romof = this [ parent ] [ 0 ] . RomOf ;
foreach ( DatItem item in items )
{
item . RomOf = romof ;
}
}
}
/// <summary>
/// Remove all romof and cloneof tags from all games
/// </summary>
private void RemoveTagsFromChild ( )
{
2017-10-30 21:15:37 -07:00
List < string > games = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string game in games )
{
List < DatItem > items = this [ game ] ;
foreach ( DatItem item in items )
{
item . CloneOf = null ;
item . RomOf = null ;
}
}
}
#endregion
#region Parsing
/// <summary>
/// Parse a DAT and return all found games and roms within
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="sysid">System ID for the DAT</param>
/// <param name="srcid">Source ID for the DAT</param>
/// <param name="datdata">The DatData object representing found roms to this point</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="useTags">True if tags from the DAT should be used to merge the output, false otherwise (default)</param>
public void Parse ( string filename , int sysid , int srcid , bool keep = false , bool clean = false ,
bool remUnicode = false , bool descAsName = false , bool keepext = false , bool useTags = false )
{
Parse ( filename , sysid , srcid , SplitType . None , keep : keep , clean : clean ,
remUnicode : remUnicode , descAsName : descAsName , keepext : keepext , useTags : useTags ) ;
}
/// <summary>
/// Parse a DAT and return all found games and roms within
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="sysid">System ID for the DAT</param>
/// <param name="srcid">Source ID for the DAT</param>>
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <param name="descAsName">True if descriptions should be used as names, false otherwise (default)</param>
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
/// <param name="useTags">True if tags from the DAT should be used to merge the output, false otherwise (default)</param>
public void Parse (
// Standard Dat parsing
string filename ,
int sysid ,
int srcid ,
// Rom renaming
SplitType splitType ,
// Miscellaneous
bool keep = false ,
bool clean = false ,
bool remUnicode = false ,
bool descAsName = false ,
bool keepext = false ,
bool useTags = false )
{
2017-12-01 15:28:41 -08:00
// Check if we have a split path and get the filename accordingly
if ( filename . Contains ( "¬" ) )
{
filename = filename . Split ( '¬' ) [ 0 ] ;
}
2017-10-09 20:38:15 -07:00
// Check the file extension first as a safeguard
2017-12-05 13:26:25 -08:00
if ( ! Utilities . HasValidDatExtension ( filename ) )
2017-10-09 20:38:15 -07:00
{
return ;
}
// If the output filename isn't set already, get the internal filename
2017-11-08 13:15:44 -08:00
FileName = ( String . IsNullOrWhiteSpace ( FileName ) ? ( keepext ? Path . GetFileName ( filename ) : Path . GetFileNameWithoutExtension ( filename ) ) : FileName ) ;
2017-10-09 20:38:15 -07:00
// If the output type isn't set already, get the internal output type
2017-11-08 00:27:00 -08:00
DatFormat = ( DatFormat = = 0 ? Utilities . GetDatFormat ( filename ) : DatFormat ) ;
2017-10-09 20:38:15 -07:00
// Now parse the correct type of DAT
try
{
2017-11-17 15:54:29 -08:00
Utilities . GetDatFile ( filename , this ) ? . ParseFile ( filename , sysid , srcid , keep , clean , remUnicode ) ;
2017-10-09 20:38:15 -07:00
}
catch ( Exception ex )
{
Globals . Logger . Error ( "Error with file '{0}': {1}" , filename , ex ) ;
}
// If we want to use descriptions as names, update everything
if ( descAsName )
{
MachineDescriptionToName ( ) ;
}
// If we are using tags from the DAT, set the proper input for split type unless overridden
if ( useTags & & splitType = = SplitType . None )
{
2017-12-05 13:07:19 -08:00
splitType = Utilities . GetSplitType ( ForceMerging ) ;
2017-10-09 20:38:15 -07:00
}
// Now we pre-process the DAT with the splitting/merging mode
switch ( splitType )
{
case SplitType . None :
// No-op
break ;
case SplitType . DeviceNonMerged :
CreateDeviceNonMergedSets ( DedupeType . None ) ;
break ;
case SplitType . FullNonMerged :
CreateFullyNonMergedSets ( DedupeType . None ) ;
break ;
case SplitType . NonMerged :
CreateNonMergedSets ( DedupeType . None ) ;
break ;
case SplitType . Merged :
CreateMergedSets ( DedupeType . None ) ;
break ;
case SplitType . Split :
CreateSplitSets ( DedupeType . None ) ;
break ;
}
}
/// <summary>
/// Add a rom to the Dat after checking
/// </summary>
/// <param name="item">Item data to check against</param>
/// <param name="clean">True if the names should be cleaned to WoD standards, false otherwise</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <returns>The key for the item</returns>
public string ParseAddHelper ( DatItem item , bool clean , bool remUnicode )
{
string key = "" ;
// If there's no name in the rom, we log and skip it
if ( item . Name = = null )
{
Globals . Logger . Warning ( "{0}: Rom with no name found! Skipping..." , FileName ) ;
return key ;
}
// If the name ends with a directory separator, we log and skip it (DOSCenter only?)
if ( item . Name . EndsWith ( "/" ) | | item . Name . EndsWith ( "\\" ) )
{
Globals . Logger . Warning ( "{0}: Rom ending with directory separator found: '{1}'. Skipping..." , FileName , item . Name ) ;
return key ;
}
// If we're in cleaning mode, sanitize the game name
2017-11-08 00:27:00 -08:00
item . MachineName = ( clean ? Utilities . CleanGameName ( item . MachineName ) : item . MachineName ) ;
2017-10-09 20:38:15 -07:00
// If we're stripping unicode characters, do so from all relevant things
if ( remUnicode )
{
2017-11-08 00:27:00 -08:00
item . Name = Utilities . RemoveUnicodeCharacters ( item . Name ) ;
item . MachineName = Utilities . RemoveUnicodeCharacters ( item . MachineName ) ;
item . MachineDescription = Utilities . RemoveUnicodeCharacters ( item . MachineDescription ) ;
2017-10-09 20:38:15 -07:00
}
// If we have a Rom or a Disk, clean the hash data
if ( item . Type = = ItemType . Rom )
{
Rom itemRom = ( Rom ) item ;
// Sanitize the hashes from null, hex sizes, and "true blank" strings
2017-11-08 00:27:00 -08:00
itemRom . CRC = Utilities . CleanHashData ( itemRom . CRC , Constants . CRCLength ) ;
itemRom . MD5 = Utilities . CleanHashData ( itemRom . MD5 , Constants . MD5Length ) ;
itemRom . SHA1 = Utilities . CleanHashData ( itemRom . SHA1 , Constants . SHA1Length ) ;
itemRom . SHA256 = Utilities . CleanHashData ( itemRom . SHA256 , Constants . SHA256Length ) ;
itemRom . SHA384 = Utilities . CleanHashData ( itemRom . SHA384 , Constants . SHA384Length ) ;
itemRom . SHA512 = Utilities . CleanHashData ( itemRom . SHA512 , Constants . SHA512Length ) ;
2017-10-09 20:38:15 -07:00
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
if ( ( itemRom . Size = = 0 | | itemRom . Size = = - 1 )
2017-11-08 13:15:44 -08:00
& & ( ( itemRom . CRC = = Constants . CRCZero | | String . IsNullOrWhiteSpace ( itemRom . CRC ) )
2017-10-09 20:38:15 -07:00
| | itemRom . MD5 = = Constants . MD5Zero
| | itemRom . SHA1 = = Constants . SHA1Zero
| | itemRom . SHA256 = = Constants . SHA256Zero
| | itemRom . SHA384 = = Constants . SHA384Zero
| | itemRom . SHA512 = = Constants . SHA512Zero ) )
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
itemRom . Size = Constants . SizeZero ;
itemRom . CRC = Constants . CRCZero ;
itemRom . MD5 = Constants . MD5Zero ;
itemRom . SHA1 = Constants . SHA1Zero ;
itemRom . SHA256 = null ;
itemRom . SHA384 = null ;
itemRom . SHA512 = null ;
//itemRom.SHA256 = Constants.SHA256Zero;
//itemRom.SHA384 = Constants.SHA384Zero;
//itemRom.SHA512 = Constants.SHA512Zero;
}
// If the file has no size and it's not the above case, skip and log
else if ( itemRom . ItemStatus ! = ItemStatus . Nodump & & ( itemRom . Size = = 0 | | itemRom . Size = = - 1 ) )
{
Globals . Logger . Verbose ( "{0}: Incomplete entry for '{1}' will be output as nodump" , FileName , itemRom . Name ) ;
itemRom . ItemStatus = ItemStatus . Nodump ;
}
// If the file has a size but aboslutely no hashes, skip and log
else if ( itemRom . ItemStatus ! = ItemStatus . Nodump
& & itemRom . Size > 0
2017-11-08 13:15:44 -08:00
& & String . IsNullOrWhiteSpace ( itemRom . CRC )
& & String . IsNullOrWhiteSpace ( itemRom . MD5 )
& & String . IsNullOrWhiteSpace ( itemRom . SHA1 )
& & String . IsNullOrWhiteSpace ( itemRom . SHA256 )
& & String . IsNullOrWhiteSpace ( itemRom . SHA384 )
& & String . IsNullOrWhiteSpace ( itemRom . SHA512 ) )
2017-10-09 20:38:15 -07:00
{
Globals . Logger . Verbose ( "{0}: Incomplete entry for '{1}' will be output as nodump" , FileName , itemRom . Name ) ;
itemRom . ItemStatus = ItemStatus . Nodump ;
}
item = itemRom ;
}
else if ( item . Type = = ItemType . Disk )
{
Disk itemDisk = ( Disk ) item ;
// Sanitize the hashes from null, hex sizes, and "true blank" strings
2017-11-08 00:27:00 -08:00
itemDisk . MD5 = Utilities . CleanHashData ( itemDisk . MD5 , Constants . MD5Length ) ;
itemDisk . SHA1 = Utilities . CleanHashData ( itemDisk . SHA1 , Constants . SHA1Length ) ;
itemDisk . SHA256 = Utilities . CleanHashData ( itemDisk . SHA256 , Constants . SHA256Length ) ;
itemDisk . SHA384 = Utilities . CleanHashData ( itemDisk . SHA384 , Constants . SHA384Length ) ;
itemDisk . SHA512 = Utilities . CleanHashData ( itemDisk . SHA512 , Constants . SHA512Length ) ;
2017-10-09 20:38:15 -07:00
// If the file has aboslutely no hashes, skip and log
if ( itemDisk . ItemStatus ! = ItemStatus . Nodump
2017-11-08 13:15:44 -08:00
& & String . IsNullOrWhiteSpace ( itemDisk . MD5 )
& & String . IsNullOrWhiteSpace ( itemDisk . SHA1 )
& & String . IsNullOrWhiteSpace ( itemDisk . SHA256 )
& & String . IsNullOrWhiteSpace ( itemDisk . SHA384 )
& & String . IsNullOrWhiteSpace ( itemDisk . SHA512 ) )
2017-10-09 20:38:15 -07:00
{
Globals . Logger . Verbose ( "Incomplete entry for '{0}' will be output as nodump" , itemDisk . Name ) ;
itemDisk . ItemStatus = ItemStatus . Nodump ;
}
item = itemDisk ;
}
// Get the key and add statistical data
switch ( item . Type )
{
case ItemType . Archive :
case ItemType . BiosSet :
case ItemType . Release :
case ItemType . Sample :
key = item . Type . ToString ( ) ;
break ;
case ItemType . Disk :
2017-11-08 17:20:29 -08:00
key = ( ( Disk ) item ) . MD5 ? ? ( ( Disk ) item ) . SHA1 ;
2017-10-09 20:38:15 -07:00
break ;
case ItemType . Rom :
key = ( ( Rom ) item ) . Size + "-" + ( ( Rom ) item ) . CRC ;
break ;
default :
key = "default" ;
break ;
}
// Add the item to the DAT
Add ( key , item ) ;
return key ;
}
/// <summary>
/// Add a rom to the Dat after checking
/// </summary>
/// <param name="item">Item data to check against</param>
/// <param name="clean">True if the names should be cleaned to WoD standards, false otherwise</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
/// <returns>The key for the item</returns>
public async Task < string > ParseAddHelperAsync ( DatItem item , bool clean , bool remUnicode )
{
return await Task . Run ( ( ) = > ParseAddHelper ( item , clean , remUnicode ) ) ;
}
2017-11-17 14:58:54 -08:00
/// <summary>
/// Parse DatFile and return all found games and roms within
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="sysid">System ID for the DAT</param>
/// <param name="srcid">Source ID for the DAT</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="clean">True if game names are sanitized, false otherwise (default)</param>
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
public virtual void ParseFile (
// Standard Dat parsing
string filename ,
int sysid ,
int srcid ,
// Miscellaneous
bool keep ,
bool clean ,
bool remUnicode )
{
throw new NotImplementedException ( ) ;
}
2017-10-09 20:38:15 -07:00
#endregion
#region Populate DAT from Directory
/// <summary>
/// Create a new Dat from a directory
/// </summary>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-31 16:44:47 -07:00
public bool PopulateFromDir ( string basePath , Hash omitFromScan , bool bare , bool archivesAsFiles , SkipFileType skipFileType ,
2017-10-31 21:22:05 -07:00
bool addBlanks , bool addDate , string tempDir , bool copyFiles , string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
// If the description is defined but not the name, set the name from the description
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( Name ) & & ! String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 20:38:15 -07:00
{
Name = Description ;
}
// If the name is defined but not the description, set the description from the name
2017-11-08 13:15:44 -08:00
else if ( ! String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 20:38:15 -07:00
{
Description = Name + ( bare ? "" : " (" + Date + ")" ) ;
}
// If neither the name or description are defined, set them from the automatic values
2017-11-08 13:15:44 -08:00
else if ( String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 20:38:15 -07:00
{
2017-11-26 21:00:49 -08:00
string [ ] splitpath = basePath . Split ( Path . DirectorySeparatorChar ) ;
Name = String . IsNullOrWhiteSpace ( splitpath . Last ( ) ) ? splitpath [ splitpath . Length - 2 ] : splitpath . Last ( ) ;
2017-10-09 20:38:15 -07:00
Description = Name + ( bare ? "" : " (" + Date + ")" ) ;
}
2017-11-17 17:27:23 -08:00
// Clean the temp directory path
tempDir = ( String . IsNullOrWhiteSpace ( tempDir ) ? Path . GetTempPath ( ) : tempDir ) ;
2017-10-09 20:38:15 -07:00
// Process the input
if ( Directory . Exists ( basePath ) )
{
Globals . Logger . Verbose ( "Folder found: {0}" , basePath ) ;
// Process the files in the main folder
List < string > files = Directory . EnumerateFiles ( basePath , "*" , SearchOption . TopDirectoryOnly ) . ToList ( ) ;
Parallel . ForEach ( files , Globals . ParallelOptions , item = >
{
2017-10-31 16:44:47 -07:00
CheckFileForHashes ( item , basePath , omitFromScan , bare , archivesAsFiles , skipFileType ,
2017-10-31 21:22:05 -07:00
addBlanks , addDate , tempDir , copyFiles , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
} ) ;
// Find all top-level subfolders
files = Directory . EnumerateDirectories ( basePath , "*" , SearchOption . TopDirectoryOnly ) . ToList ( ) ;
foreach ( string item in files )
{
List < string > subfiles = Directory . EnumerateFiles ( item , "*" , SearchOption . AllDirectories ) . ToList ( ) ;
Parallel . ForEach ( subfiles , Globals . ParallelOptions , subitem = >
{
2017-10-31 16:44:47 -07:00
CheckFileForHashes ( subitem , basePath , omitFromScan , bare , archivesAsFiles , skipFileType ,
2017-10-31 21:22:05 -07:00
addBlanks , addDate , tempDir , copyFiles , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
} ) ;
}
// Now find all folders that are empty, if we are supposed to
if ( ! Romba & & addBlanks )
{
2017-11-08 00:27:00 -08:00
List < string > empties = Utilities . GetEmptyDirectories ( basePath ) . ToList ( ) ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( empties , Globals . ParallelOptions , dir = >
{
// Get the full path for the directory
string fulldir = Path . GetFullPath ( dir ) ;
// Set the temporary variables
string gamename = "" ;
string romname = "" ;
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if ( Type = = "SuperDAT" )
{
gamename = fulldir . Remove ( 0 , basePath . Length + 1 ) ;
romname = "_" ;
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = fulldir . Remove ( 0 , basePath . Length + 1 ) . Split ( Path . DirectorySeparatorChar ) [ 0 ] ;
romname = Path . Combine ( fulldir . Remove ( 0 , basePath . Length + 1 + gamename . Length ) , "_" ) ;
}
// Sanitize the names
if ( gamename . StartsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
gamename = gamename . Substring ( 1 ) ;
}
if ( gamename . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
gamename = gamename . Substring ( 0 , gamename . Length - 1 ) ;
}
if ( romname . StartsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
romname = romname . Substring ( 1 ) ;
}
if ( romname . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
romname = romname . Substring ( 0 , romname . Length - 1 ) ;
}
Globals . Logger . Verbose ( "Adding blank empty folder: {0}" , gamename ) ;
this [ "null" ] . Add ( new Rom ( romname , gamename , omitFromScan ) ) ;
} ) ;
}
}
else if ( File . Exists ( basePath ) )
{
2017-10-31 16:44:47 -07:00
CheckFileForHashes ( basePath , Path . GetDirectoryName ( Path . GetDirectoryName ( basePath ) ) , omitFromScan , bare , archivesAsFiles ,
2017-10-31 21:22:05 -07:00
skipFileType , addBlanks , addDate , tempDir , copyFiles , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
}
// Now that we're done, delete the temp folder (if it's not the default)
Globals . Logger . User ( "Cleaning temp folder" ) ;
if ( tempDir ! = Path . GetTempPath ( ) )
{
2017-11-08 00:27:00 -08:00
Utilities . TryDeleteDirectory ( tempDir ) ;
2017-10-09 20:38:15 -07:00
}
return true ;
}
/// <summary>
/// Check a given file for hashes, based on current settings
/// </summary>
/// <param name="item">Filename of the item to be checked</param>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-09 21:16:03 -07:00
private void CheckFileForHashes ( string item , string basePath , Hash omitFromScan , bool bare , bool archivesAsFiles ,
2017-10-31 21:22:05 -07:00
SkipFileType skipFileType , bool addBlanks , bool addDate , string tempDir , bool copyFiles , string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
if ( Romba )
{
2017-11-02 01:03:36 -07:00
GZipArchive archive = new GZipArchive ( item ) ;
Rom rom = archive . GetTorrentGZFileInfo ( ) ;
2017-10-09 20:38:15 -07:00
// If the rom is valid, write it out
if ( rom ! = null & & rom . Name ! = null )
{
// Add the list if it doesn't exist already
Add ( rom . Size + "-" + rom . CRC , rom ) ;
Globals . Logger . User ( "File added: {0}" , Path . GetFileNameWithoutExtension ( item ) + Environment . NewLine ) ;
}
else
{
Globals . Logger . User ( "File not added: {0}" , Path . GetFileNameWithoutExtension ( item ) + Environment . NewLine ) ;
return ;
}
return ;
}
// If we're copying files, copy it first and get the new filename
string newItem = item ;
string newBasePath = basePath ;
if ( copyFiles )
{
2017-12-05 15:10:04 -08:00
newBasePath = Path . Combine ( tempDir , new Guid ( ) . ToString ( ) ) ;
2017-10-09 20:38:15 -07:00
newItem = Path . GetFullPath ( Path . Combine ( newBasePath , Path . GetFullPath ( item ) . Remove ( 0 , basePath . Length + 1 ) ) ) ;
Directory . CreateDirectory ( Path . GetDirectoryName ( newItem ) ) ;
File . Copy ( item , newItem , true ) ;
}
// Create a list for all found items
List < Rom > extracted = null ;
// If we don't have archives as files, try to scan the file as an archive
if ( ! archivesAsFiles )
{
2017-11-02 00:29:20 -07:00
// Get the base archive first
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( newItem ) ;
2017-11-02 00:29:20 -07:00
// Now get all extracted items from the archive
if ( archive ! = null )
2017-10-09 20:38:15 -07:00
{
2017-11-02 00:29:20 -07:00
extracted = archive . GetArchiveFileInfo ( omitFromScan : omitFromScan , date : addDate ) ;
2017-10-09 20:38:15 -07:00
}
}
// If the file should be skipped based on type, do so now
if ( ( extracted ! = null & & skipFileType = = SkipFileType . Archive )
| | ( extracted = = null & & skipFileType = = SkipFileType . File ) )
{
return ;
}
// If the extracted list is null, just scan the item itself
if ( extracted = = null | | archivesAsFiles )
{
2017-10-31 21:22:05 -07:00
ProcessFile ( newItem , "" , newBasePath , omitFromScan , addDate , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
}
// Otherwise, add all of the found items
else
{
// First take care of the found items
Parallel . ForEach ( extracted , Globals . ParallelOptions , rom = >
{
2017-10-09 21:16:03 -07:00
ProcessFileHelper ( newItem ,
2017-10-09 20:38:15 -07:00
rom ,
basePath ,
( Path . GetDirectoryName ( Path . GetFullPath ( item ) ) + Path . DirectorySeparatorChar ) . Remove ( 0 , basePath . Length ) + Path . GetFileNameWithoutExtension ( item ) ) ;
} ) ;
// Then, if we're looking for blanks, get all of the blank folders and add them
if ( addBlanks )
{
2017-11-02 00:29:20 -07:00
List < string > empties = new List < string > ( ) ;
// Get the base archive first
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( newItem ) ;
2017-11-02 00:29:20 -07:00
// Now get all blank folders from the archive
if ( archive ! = null )
{
empties = archive . GetEmptyFolders ( ) ;
}
// Add add all of the found empties to the DAT
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( empties , Globals . ParallelOptions , empty = >
{
Rom emptyRom = new Rom ( Path . Combine ( empty , "_" ) , newItem , omitFromScan ) ;
2017-10-09 21:16:03 -07:00
ProcessFileHelper ( newItem ,
2017-10-09 20:38:15 -07:00
emptyRom ,
basePath ,
( Path . GetDirectoryName ( Path . GetFullPath ( item ) ) + Path . DirectorySeparatorChar ) . Remove ( 0 , basePath . Length ) + Path . GetFileNameWithoutExtension ( item ) ) ;
} ) ;
}
}
// Cue to delete the file if it's a copy
if ( copyFiles & & item ! = newItem )
{
2017-11-08 00:27:00 -08:00
Utilities . TryDeleteDirectory ( newBasePath ) ;
2017-10-09 20:38:15 -07:00
}
}
/// <summary>
/// Process a single file as a file
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="parent">Parent game to be used</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-09 21:16:03 -07:00
private void ProcessFile ( string item , string parent , string basePath , Hash omitFromScan ,
2017-10-31 21:22:05 -07:00
bool addDate , string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
Globals . Logger . Verbose ( "'{0}' treated like a file" , Path . GetFileName ( item ) ) ;
2017-11-08 00:27:00 -08:00
DatItem datItem = Utilities . GetFileInfo ( item , omitFromScan : omitFromScan , date : addDate , header : headerToCheckAgainst , chdsAsFiles : chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
2017-10-31 10:54:17 -07:00
ProcessFileHelper ( item , datItem , basePath , parent ) ;
2017-10-09 20:38:15 -07:00
}
/// <summary>
/// Process a single file as a file (with found Rom data)
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
2017-10-09 21:16:03 -07:00
private void ProcessFileHelper ( string item , DatItem datItem , string basepath , string parent )
2017-10-09 20:38:15 -07:00
{
// If the datItem isn't a Rom or Disk, return
if ( datItem . Type ! = ItemType . Rom & & datItem . Type ! = ItemType . Disk )
{
return ;
}
string key = "" ;
if ( datItem . Type = = ItemType . Rom )
{
key = ( ( Rom ) datItem ) . Size + "-" + ( ( Rom ) datItem ) . CRC ;
}
2017-10-31 03:04:54 -07:00
else if ( datItem . Type = = ItemType . Disk )
2017-10-09 20:38:15 -07:00
{
2017-10-31 03:04:54 -07:00
key = ( ( Disk ) datItem ) . SHA1 ;
2017-10-09 20:38:15 -07:00
}
// Add the list if it doesn't exist already
Add ( key ) ;
try
{
// If the basepath ends with a directory separator, remove it
if ( ! basepath . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
basepath + = Path . DirectorySeparatorChar . ToString ( ) ;
}
// Make sure we have the full item path
item = Path . GetFullPath ( item ) ;
// Get the data to be added as game and item names
string gamename = "" ;
string romname = "" ;
// If the parent is blank, then we have a non-archive file
2017-12-14 13:22:22 -08:00
if ( String . IsNullOrWhiteSpace ( parent ) )
2017-10-09 20:38:15 -07:00
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if ( Type = = "SuperDAT" )
{
gamename = Path . GetDirectoryName ( item . Remove ( 0 , basepath . Length ) ) ;
romname = Path . GetFileName ( item ) ;
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = item . Remove ( 0 , basepath . Length ) . Split ( Path . DirectorySeparatorChar ) [ 0 ] ;
romname = item . Remove ( 0 , ( Path . Combine ( basepath , gamename ) . Length ) ) ;
}
}
// Otherwise, we assume that we have an archive
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
if ( Type = = "SuperDAT" )
{
gamename = parent ;
romname = datItem . Name ;
}
// Otherwise, we want the archive name as the game, and the file as everything else
else
{
gamename = parent ;
romname = datItem . Name ;
}
}
// Sanitize the names
if ( romname = = null )
{
romname = "" ;
}
if ( gamename . StartsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
gamename = gamename . Substring ( 1 ) ;
}
if ( gamename . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
gamename = gamename . Substring ( 0 , gamename . Length - 1 ) ;
}
if ( romname . StartsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
romname = romname . Substring ( 1 ) ;
}
if ( romname . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
romname = romname . Substring ( 0 , romname . Length - 1 ) ;
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( gamename ) & & String . IsNullOrWhiteSpace ( romname ) )
2017-10-09 20:38:15 -07:00
{
romname = gamename ;
gamename = "Default" ;
}
// Update rom information
datItem . Name = romname ;
datItem . MachineName = gamename ;
datItem . MachineDescription = gamename ;
2017-10-31 14:21:08 -07:00
// If we have a Disk, then the ".chd" extension needs to be removed
if ( datItem . Type = = ItemType . Disk )
{
datItem . Name = datItem . Name . Replace ( ".chd" , "" ) ;
}
2017-10-09 20:38:15 -07:00
// Add the file information to the DAT
Add ( key , datItem ) ;
Globals . Logger . User ( "File added: {0}" , romname + Environment . NewLine ) ;
}
catch ( IOException ex )
{
Globals . Logger . Error ( ex . ToString ( ) ) ;
return ;
}
}
#endregion
#region Rebuilding and Verifying
/// <summary>
/// Process the DAT and find all matches in input files and folders assuming they're a depot
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
2017-10-31 16:37:36 -07:00
public bool RebuildDepot ( List < string > inputs , string outDir , bool date , bool delete ,
2017-10-09 20:38:15 -07:00
bool inverse , OutputFormat outputFormat , bool romba , bool updateDat , string headerToCheckAgainst )
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
if ( Count = = 0 & & ! inverse )
{
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
return false ;
}
// Check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Now we want to get forcepack flag if it's not overridden
if ( outputFormat = = OutputFormat . Folder & & ForcePacking ! = ForcePacking . None )
{
switch ( ForcePacking )
{
case ForcePacking . Zip :
outputFormat = OutputFormat . TorrentZip ;
break ;
case ForcePacking . Unzip :
outputFormat = OutputFormat . Folder ;
2017-10-09 18:04:49 -07:00
break ;
2017-10-09 20:38:15 -07:00
}
}
// Preload the Skipper list
int listcount = Skipper . List . Count ;
#endregion
bool success = true ;
#region Rebuild from depots in order
string format = "" ;
switch ( outputFormat )
{
case OutputFormat . Folder :
format = "directory" ;
break ;
case OutputFormat . TapeArchive :
format = "TAR" ;
break ;
case OutputFormat . Torrent7Zip :
format = "Torrent7Z" ;
break ;
case OutputFormat . TorrentGzip :
format = "TorrentGZ" ;
break ;
2017-11-02 10:14:52 -07:00
case OutputFormat . TorrentLRZip :
2017-10-09 20:38:15 -07:00
format = "TorrentLRZ" ;
break ;
case OutputFormat . TorrentRar :
format = "TorrentRAR" ;
break ;
case OutputFormat . TorrentXZ :
format = "TorrentXZ" ;
break ;
case OutputFormat . TorrentZip :
format = "TorrentZip" ;
break ;
}
InternalStopwatch watch = new InternalStopwatch ( "Rebuilding all files to {0}" , format ) ;
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
Parallel . ForEach ( inputs , Globals . ParallelOptions , input = >
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
Globals . Logger . Verbose ( "Adding depot: {0}" , input ) ;
lock ( directories )
{
directories . Add ( input ) ;
}
}
} ) ;
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
{
return success ;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy ( SortedBy . SHA1 , DedupeType . None ) ;
// Then we want to loop through each of the hashes and see if we can rebuild
2017-10-30 21:15:37 -07:00
List < string > hashes = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string hash in hashes )
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
{
continue ;
}
Globals . Logger . User ( "Checking hash '{0}'" , hash ) ;
// Get the extension path for the hash
2017-11-08 00:27:00 -08:00
string subpath = Utilities . GetRombaPath ( hash ) ;
2017-10-09 20:38:15 -07:00
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
2017-10-09 18:04:49 -07:00
break ;
2017-10-09 20:38:15 -07:00
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
{
continue ;
}
// If we have a path, we want to try to get the rom information
2017-11-02 01:03:36 -07:00
GZipArchive archive = new GZipArchive ( foundpath ) ;
Rom fileinfo = archive . GetTorrentGZFileInfo ( ) ;
2017-10-09 20:38:15 -07:00
// If the file information is null, then we continue
if ( fileinfo = = null )
{
continue ;
}
// Otherwise, we rebuild that file to all locations that we need to
2017-10-31 16:37:36 -07:00
RebuildIndividualFile ( fileinfo , foundpath , outDir , date , inverse , outputFormat , romba ,
2017-10-09 20:38:15 -07:00
updateDat , false /* isZip */ , headerToCheckAgainst ) ;
}
watch . Stop ( ) ;
#endregion
// If we're updating the DAT, output to the rebuild directory
if ( updateDat )
{
FileName = "fixDAT_" + FileName ;
Name = "fixDAT_" + Name ;
Description = "fixDAT_" + Description ;
2017-10-31 14:53:02 -07:00
RemoveMarkedItems ( ) ;
2017-11-17 14:58:54 -08:00
Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
return success ;
}
/// <summary>
/// Process the DAT and find all matches in input files and folders
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-09 20:38:15 -07:00
/// <returns>True if rebuilding was a success, false otherwise</returns>
2017-10-31 16:37:36 -07:00
public bool RebuildGeneric ( List < string > inputs , string outDir , bool quickScan , bool date ,
2017-10-09 20:38:15 -07:00
bool delete , bool inverse , OutputFormat outputFormat , bool romba , ArchiveScanLevel archiveScanLevel , bool updateDat ,
2017-10-31 21:22:05 -07:00
string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
if ( Count = = 0 & & ! inverse )
{
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
return false ;
}
// Check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Now we want to get forcepack flag if it's not overridden
if ( outputFormat = = OutputFormat . Folder & & ForcePacking ! = ForcePacking . None )
{
switch ( ForcePacking )
{
case ForcePacking . Zip :
outputFormat = OutputFormat . TorrentZip ;
2017-10-09 18:04:49 -07:00
break ;
2017-10-09 20:38:15 -07:00
case ForcePacking . Unzip :
outputFormat = OutputFormat . Folder ;
2017-10-09 18:04:49 -07:00
break ;
2017-10-09 20:38:15 -07:00
}
}
// Preload the Skipper list
int listcount = Skipper . List . Count ;
#endregion
bool success = true ;
#region Rebuild from sources in order
string format = "" ;
switch ( outputFormat )
{
case OutputFormat . Folder :
format = "directory" ;
break ;
case OutputFormat . TapeArchive :
format = "TAR" ;
break ;
case OutputFormat . Torrent7Zip :
format = "Torrent7Z" ;
break ;
case OutputFormat . TorrentGzip :
format = "TorrentGZ" ;
break ;
2017-11-02 10:14:52 -07:00
case OutputFormat . TorrentLRZip :
2017-10-09 20:38:15 -07:00
format = "TorrentLRZ" ;
break ;
case OutputFormat . TorrentRar :
format = "TorrentRAR" ;
break ;
case OutputFormat . TorrentXZ :
format = "TorrentXZ" ;
break ;
case OutputFormat . TorrentZip :
format = "TorrentZip" ;
break ;
}
InternalStopwatch watch = new InternalStopwatch ( "Rebuilding all files to {0}" , format ) ;
// Now loop through all of the files in all of the inputs
foreach ( string input in inputs )
{
// If the input is a file
if ( File . Exists ( input ) )
{
Globals . Logger . User ( "Checking file: {0}" , input ) ;
2017-10-31 16:37:36 -07:00
RebuildGenericHelper ( input , outDir , quickScan , date , delete , inverse ,
2017-10-31 21:22:05 -07:00
outputFormat , romba , archiveScanLevel , updateDat , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
}
// If the input is a directory
else if ( Directory . Exists ( input ) )
{
Globals . Logger . Verbose ( "Checking directory: {0}" , input ) ;
foreach ( string file in Directory . EnumerateFiles ( input , "*" , SearchOption . AllDirectories ) )
{
Globals . Logger . User ( "Checking file: {0}" , file ) ;
2017-10-31 16:37:36 -07:00
RebuildGenericHelper ( file , outDir , quickScan , date , delete , inverse ,
2017-10-31 21:22:05 -07:00
outputFormat , romba , archiveScanLevel , updateDat , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
}
}
}
watch . Stop ( ) ;
#endregion
// If we're updating the DAT, output to the rebuild directory
if ( updateDat )
{
FileName = "fixDAT_" + FileName ;
Name = "fixDAT_" + Name ;
Description = "fixDAT_" + Description ;
2017-10-31 14:53:02 -07:00
RemoveMarkedItems ( ) ;
2017-11-17 14:58:54 -08:00
Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
return success ;
}
/// <summary>
/// Attempt to add a file to the output if it matches
/// </summary>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-31 16:37:36 -07:00
private void RebuildGenericHelper ( string file , string outDir , bool quickScan , bool date ,
2017-10-09 20:38:15 -07:00
bool delete , bool inverse , OutputFormat outputFormat , bool romba , ArchiveScanLevel archiveScanLevel , bool updateDat ,
2017-10-31 21:22:05 -07:00
string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
// If we somehow have a null filename, return
if ( file = = null )
{
return ;
}
// Set the deletion variables
bool usedExternally = false ;
bool usedInternally = false ;
// Get the required scanning level for the file
2017-11-08 00:27:00 -08:00
Utilities . GetInternalExternalProcess ( file , archiveScanLevel , out bool shouldExternalProcess , out bool shouldInternalProcess ) ;
2017-10-09 20:38:15 -07:00
// If we're supposed to scan the file externally
if ( shouldExternalProcess )
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-11-08 00:27:00 -08:00
DatItem fileinfo = Utilities . GetFileInfo ( file , omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) ,
2017-10-31 21:22:05 -07:00
header : headerToCheckAgainst , chdsAsFiles : chdsAsFiles ) ;
2017-10-31 16:37:36 -07:00
usedExternally = RebuildIndividualFile ( fileinfo , file , outDir , date , inverse , outputFormat ,
2017-10-09 20:38:15 -07:00
romba , updateDat , null /* isZip */ , headerToCheckAgainst ) ;
}
// If we're supposed to scan the file internally
if ( shouldInternalProcess )
{
// Create an empty list of Roms for archive entries
2017-11-02 00:29:20 -07:00
List < Rom > entries = null ;
2017-10-09 20:38:15 -07:00
usedInternally = true ;
// Get the TGZ status for later
2017-11-02 01:03:36 -07:00
GZipArchive tgz = new GZipArchive ( file ) ;
bool isTorrentGzip = tgz . IsTorrent ( ) ;
2017-10-09 20:38:15 -07:00
2017-11-02 00:29:20 -07:00
// Get the base archive first
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( file ) ;
2017-11-02 00:29:20 -07:00
// Now get all extracted items from the archive
if ( archive ! = null )
2017-10-09 20:38:15 -07:00
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-11-02 00:29:20 -07:00
entries = archive . GetArchiveFileInfo ( omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) , date : date ) ;
2017-10-09 20:38:15 -07:00
}
// If the entries list is null, we encountered an error and should scan exteranlly
if ( entries = = null & & File . Exists ( file ) )
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-11-08 00:27:00 -08:00
DatItem fileinfo = Utilities . GetFileInfo ( file , omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) , chdsAsFiles : chdsAsFiles ) ;
2017-10-31 16:37:36 -07:00
usedExternally = RebuildIndividualFile ( fileinfo , file , outDir , date , inverse , outputFormat ,
2017-10-09 20:38:15 -07:00
romba , updateDat , null /* isZip */ , headerToCheckAgainst ) ;
}
// Otherwise, loop through the entries and try to match
else
{
foreach ( Rom entry in entries )
{
2017-10-31 16:37:36 -07:00
usedInternally & = RebuildIndividualFile ( entry , file , outDir , date , inverse , outputFormat ,
2017-10-09 20:38:15 -07:00
romba , updateDat , ! isTorrentGzip /* isZip */ , headerToCheckAgainst ) ;
}
}
}
// If we are supposed to delete the file, do so
if ( delete & & ( usedExternally | | usedInternally ) )
{
2017-11-08 00:27:00 -08:00
Utilities . TryDeleteFile ( file ) ;
2017-10-09 20:38:15 -07:00
}
}
/// <summary>
/// Find duplicates and rebuild individual files to output
/// </summary>
2017-10-30 21:49:55 -07:00
/// <param name="datItem">Information for the current file to rebuild from</param>
2017-10-09 20:38:15 -07:00
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
2017-10-31 16:37:36 -07:00
private bool RebuildIndividualFile ( DatItem datItem , string file , string outDir , bool date ,
2017-10-09 20:38:15 -07:00
bool inverse , OutputFormat outputFormat , bool romba , bool updateDat , bool? isZip , string headerToCheckAgainst )
{
// Set the output value
bool rebuilt = false ;
2017-10-31 02:37:32 -07:00
// If the DatItem is a Disk, force rebuilding to a folder except if TGZ
if ( datItem . Type = = ItemType . Disk & & outputFormat ! = OutputFormat . TorrentGzip )
{
outputFormat = OutputFormat . Folder ;
}
// Prepopluate a few key strings based on DatItem type
string crc = null ;
string sha1 = null ;
if ( datItem . Type = = ItemType . Rom )
{
crc = ( ( Rom ) datItem ) . CRC ;
sha1 = ( ( Rom ) datItem ) . SHA1 ;
}
else if ( datItem . Type = = ItemType . Disk )
{
crc = "" ;
sha1 = ( ( Disk ) datItem ) . SHA1 ;
}
2017-10-09 20:38:15 -07:00
// Find if the file has duplicates in the DAT
2017-10-30 21:49:55 -07:00
bool hasDuplicates = datItem . HasDuplicates ( this ) ;
2017-10-09 20:38:15 -07:00
// If it has duplicates and we're not filtering, rebuild it
if ( hasDuplicates & & ! inverse )
{
// Get the list of duplicates to rebuild to
2017-10-30 21:49:55 -07:00
List < DatItem > dupes = datItem . GetDuplicates ( this , remove : updateDat ) ;
2017-10-09 20:38:15 -07:00
// If we don't have any duplicates, continue
if ( dupes . Count = = 0 )
{
return rebuilt ;
}
// If we have a very specifc TGZ->TGZ case, just copy it accordingly
2017-11-02 01:03:36 -07:00
GZipArchive tgz = new GZipArchive ( file ) ;
Rom rom = tgz . GetTorrentGZFileInfo ( ) ;
if ( isZip = = false & & rom ! = null & & outputFormat = = OutputFormat . TorrentGzip )
2017-10-09 20:38:15 -07:00
{
// Get the proper output path
if ( romba )
{
2017-11-08 00:27:00 -08:00
outDir = Path . Combine ( outDir , Utilities . GetRombaPath ( sha1 ) ) ;
2017-10-09 20:38:15 -07:00
}
else
{
2017-10-31 02:37:32 -07:00
outDir = Path . Combine ( outDir , sha1 + ".gz" ) ;
2017-10-09 20:38:15 -07:00
}
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outDir ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outDir ) ;
rebuilt & = true ;
}
catch
{
rebuilt = false ;
}
return rebuilt ;
}
// Get a generic stream for the file
Stream fileStream = new MemoryStream ( ) ;
// If we have a zipfile, extract the stream to memory
if ( isZip ! = null )
{
string realName = null ;
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( file ) ;
2017-11-02 00:29:20 -07:00
if ( archive ! = null )
{
( fileStream , realName ) = archive . ExtractEntryStream ( datItem . Name ) ;
}
2017-10-09 20:38:15 -07:00
}
// Otherwise, just open the filestream
else
{
2017-11-08 00:27:00 -08:00
fileStream = Utilities . TryOpenRead ( file ) ;
2017-10-09 20:38:15 -07:00
}
// If the stream is null, then continue
if ( fileStream = = null )
{
return rebuilt ;
}
// Seek to the beginning of the stream
fileStream . Seek ( 0 , SeekOrigin . Begin ) ;
2017-11-08 00:04:19 -08:00
Globals . Logger . User ( "Matches found for '{0}', rebuilding accordingly..." , Path . GetFileName ( datItem . Name ) ) ;
2017-10-09 20:38:15 -07:00
rebuilt = true ;
// Now loop through the list and rebuild accordingly
2017-10-31 11:02:49 -07:00
foreach ( DatItem item in dupes )
2017-10-09 20:38:15 -07:00
{
2017-11-02 00:29:20 -07:00
// Get the output archive, if possible
2017-11-17 15:51:09 -08:00
BaseArchive outputArchive = Utilities . GetArchive ( outputFormat ) ;
2017-11-02 00:29:20 -07:00
// Now rebuild to the output file
outputArchive . Write ( fileStream , outDir , ( Rom ) item , date : date , romba : romba ) ;
2017-10-09 20:38:15 -07:00
}
// Close the input stream
fileStream ? . Dispose ( ) ;
}
// If we have no duplicates and we're filtering, rebuild it
else if ( ! hasDuplicates & & inverse )
{
string machinename = null ;
// If we have a very specifc TGZ->TGZ case, just copy it accordingly
2017-11-02 01:03:36 -07:00
GZipArchive tgz = new GZipArchive ( file ) ;
Rom rom = tgz . GetTorrentGZFileInfo ( ) ;
if ( isZip = = false & & rom ! = null & & outputFormat = = OutputFormat . TorrentGzip )
2017-10-09 20:38:15 -07:00
{
// Get the proper output path
if ( romba )
{
2017-11-08 00:27:00 -08:00
outDir = Path . Combine ( outDir , Utilities . GetRombaPath ( sha1 ) ) ;
2017-10-09 20:38:15 -07:00
}
else
{
2017-10-31 02:37:32 -07:00
outDir = Path . Combine ( outDir , sha1 + ".gz" ) ;
2017-10-09 20:38:15 -07:00
}
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outDir ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outDir ) ;
rebuilt & = true ;
}
catch
{
rebuilt = false ;
}
return rebuilt ;
}
// Get a generic stream for the file
Stream fileStream = new MemoryStream ( ) ;
// If we have a zipfile, extract the stream to memory
if ( isZip ! = null )
{
string realName = null ;
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( file ) ;
2017-11-02 00:29:20 -07:00
if ( archive ! = null )
{
( fileStream , realName ) = archive . ExtractEntryStream ( datItem . Name ) ;
}
2017-10-09 20:38:15 -07:00
}
// Otherwise, just open the filestream
else
{
2017-11-08 00:27:00 -08:00
fileStream = Utilities . TryOpenRead ( file ) ;
2017-10-09 20:38:15 -07:00
}
// If the stream is null, then continue
if ( fileStream = = null )
{
return rebuilt ;
}
// Get the item from the current file
2017-11-08 00:27:00 -08:00
Rom item = ( Rom ) Utilities . GetStreamInfo ( fileStream , fileStream . Length , keepReadOpen : true ) ;
2017-11-08 00:04:19 -08:00
item . MachineName = Path . GetFileNameWithoutExtension ( item . Name ) ;
item . MachineDescription = Path . GetFileNameWithoutExtension ( item . Name ) ;
2017-10-09 20:38:15 -07:00
// If we are coming from an archive, set the correct machine name
if ( machinename ! = null )
{
item . MachineName = machinename ;
item . MachineDescription = machinename ;
}
2017-11-08 00:04:19 -08:00
Globals . Logger . User ( "No matches found for '{0}', rebuilding accordingly from inverse flag..." , Path . GetFileName ( datItem . Name ) ) ;
2017-10-09 20:38:15 -07:00
2017-11-02 00:29:20 -07:00
// Get the output archive, if possible
2017-11-17 15:51:09 -08:00
BaseArchive outputArchive = Utilities . GetArchive ( outputFormat ) ;
2017-11-02 00:29:20 -07:00
2017-10-09 20:38:15 -07:00
// Now rebuild to the output file
2017-11-02 00:29:20 -07:00
if ( outputArchive = = null )
2017-10-09 20:38:15 -07:00
{
2017-11-08 00:27:00 -08:00
string outfile = Path . Combine ( outDir , Utilities . RemovePathUnsafeCharacters ( item . MachineName ) , item . Name ) ;
2017-10-09 20:38:15 -07:00
2017-11-02 00:29:20 -07:00
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outfile ) ) ;
2017-10-09 20:38:15 -07:00
2017-11-02 00:29:20 -07:00
// Now copy the file over
try
{
2017-11-08 00:27:00 -08:00
FileStream writeStream = Utilities . TryCreate ( outfile ) ;
2017-10-09 20:38:15 -07:00
2017-11-02 00:29:20 -07:00
// Copy the input stream to the output
int bufferSize = 4096 * 128 ;
byte [ ] ibuffer = new byte [ bufferSize ] ;
int ilen ;
while ( ( ilen = fileStream . Read ( ibuffer , 0 , bufferSize ) ) > 0 )
{
writeStream . Write ( ibuffer , 0 , ilen ) ;
writeStream . Flush ( ) ;
2017-10-09 20:38:15 -07:00
}
2017-11-02 00:29:20 -07:00
writeStream . Dispose ( ) ;
2017-11-08 13:15:44 -08:00
if ( date & & ! String . IsNullOrWhiteSpace ( item . Date ) )
2017-10-09 20:38:15 -07:00
{
2017-11-02 00:29:20 -07:00
File . SetCreationTime ( outfile , DateTime . Parse ( item . Date ) ) ;
2017-10-09 20:38:15 -07:00
}
2017-11-02 00:29:20 -07:00
rebuilt & = true ;
}
catch
{
rebuilt & = false ;
}
}
else
{
rebuilt & = outputArchive . Write ( fileStream , outDir , item , date : date , romba : romba ) ;
2017-10-09 20:38:15 -07:00
}
// Close the input stream
fileStream ? . Dispose ( ) ;
}
// Now we want to take care of headers, if applicable
if ( headerToCheckAgainst ! = null )
{
// Get a generic stream for the file
Stream fileStream = new MemoryStream ( ) ;
// If we have a zipfile, extract the stream to memory
if ( isZip ! = null )
{
string realName = null ;
2017-11-17 15:51:09 -08:00
BaseArchive archive = Utilities . GetArchive ( file ) ;
2017-11-02 00:29:20 -07:00
if ( archive ! = null )
{
( fileStream , realName ) = archive . ExtractEntryStream ( datItem . Name ) ;
}
2017-10-09 20:38:15 -07:00
}
// Otherwise, just open the filestream
else
{
2017-11-08 00:27:00 -08:00
fileStream = Utilities . TryOpenRead ( file ) ;
2017-10-09 20:38:15 -07:00
}
// If the stream is null, then continue
if ( fileStream = = null )
{
return rebuilt ;
}
// Check to see if we have a matching header first
SkipperRule rule = Skipper . GetMatchingRule ( fileStream , Path . GetFileNameWithoutExtension ( headerToCheckAgainst ) ) ;
// If there's a match, create the new file to write
if ( rule . Tests ! = null & & rule . Tests . Count ! = 0 )
{
// If the file could be transformed correctly
MemoryStream transformStream = new MemoryStream ( ) ;
if ( rule . TransformStream ( fileStream , transformStream , keepReadOpen : true , keepWriteOpen : true ) )
{
// Get the file informations that we will be using
2017-11-08 00:27:00 -08:00
Rom headerless = ( Rom ) Utilities . GetStreamInfo ( transformStream , transformStream . Length , keepReadOpen : true ) ;
2017-10-09 20:38:15 -07:00
// Find if the file has duplicates in the DAT
hasDuplicates = headerless . HasDuplicates ( this ) ;
// If it has duplicates and we're not filtering, rebuild it
if ( hasDuplicates & & ! inverse )
{
// Get the list of duplicates to rebuild to
List < DatItem > dupes = headerless . GetDuplicates ( this , remove : updateDat ) ;
// If we don't have any duplicates, continue
if ( dupes . Count = = 0 )
{
return rebuilt ;
}
2017-11-08 00:04:19 -08:00
Globals . Logger . User ( "Headerless matches found for '{0}', rebuilding accordingly..." , Path . GetFileName ( datItem . Name ) ) ;
2017-10-09 20:38:15 -07:00
rebuilt = true ;
// Now loop through the list and rebuild accordingly
2017-10-31 11:02:49 -07:00
foreach ( DatItem item in dupes )
2017-10-09 20:38:15 -07:00
{
// Create a headered item to use as well
2017-10-30 21:49:55 -07:00
datItem . CopyMachineInformation ( item ) ;
2017-10-31 02:37:32 -07:00
datItem . Name + = "_" + crc ;
2017-10-09 20:38:15 -07:00
// If either copy succeeds, then we want to set rebuilt to true
bool eitherSuccess = false ;
2017-11-02 00:29:20 -07:00
// Get the output archive, if possible
2017-11-17 15:51:09 -08:00
BaseArchive outputArchive = Utilities . GetArchive ( outputFormat ) ;
2017-11-02 00:29:20 -07:00
// Now rebuild to the output file
eitherSuccess | = outputArchive . Write ( transformStream , outDir , ( Rom ) item , date : date , romba : romba ) ;
eitherSuccess | = outputArchive . Write ( fileStream , outDir , ( Rom ) datItem , date : date , romba : romba ) ;
2017-10-09 20:38:15 -07:00
// Now add the success of either rebuild
rebuilt & = eitherSuccess ;
}
}
}
// Dispose of the stream
transformStream ? . Dispose ( ) ;
}
// Dispose of the stream
fileStream ? . Dispose ( ) ;
}
return rebuilt ;
}
/// <summary>
/// Process the DAT and verify from the depots
/// </summary>
/// <param name="inputs">List of input directories to compare against</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if verification was a success, false otherwise</returns>
2017-10-31 16:37:36 -07:00
public bool VerifyDepot ( List < string > inputs , string headerToCheckAgainst )
2017-10-09 20:38:15 -07:00
{
bool success = true ;
InternalStopwatch watch = new InternalStopwatch ( "Verifying all from supplied depots" ) ;
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
foreach ( string input in inputs )
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
Globals . Logger . Verbose ( "Adding depot: {0}" , input ) ;
directories . Add ( input ) ;
}
}
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
{
return success ;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
BucketBy ( SortedBy . SHA1 , DedupeType . None ) ;
// Then we want to loop through each of the hashes and see if we can rebuild
2017-10-30 21:15:37 -07:00
List < string > hashes = Keys ;
2017-10-09 20:38:15 -07:00
foreach ( string hash in hashes )
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
{
continue ;
}
Globals . Logger . User ( "Checking hash '{0}'" , hash ) ;
// Get the extension path for the hash
2017-11-08 00:27:00 -08:00
string subpath = Utilities . GetRombaPath ( hash ) ;
2017-10-09 20:38:15 -07:00
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
2017-10-09 18:04:49 -07:00
break ;
2017-10-09 20:38:15 -07:00
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
{
continue ;
}
// If we have a path, we want to try to get the rom information
2017-11-02 01:03:36 -07:00
GZipArchive tgz = new GZipArchive ( foundpath ) ;
Rom fileinfo = tgz . GetTorrentGZFileInfo ( ) ;
2017-10-09 20:38:15 -07:00
// If the file information is null, then we continue
if ( fileinfo = = null )
{
continue ;
2017-10-09 18:04:49 -07:00
}
2017-10-09 20:38:15 -07:00
// Now we want to remove all duplicates from the DAT
fileinfo . GetDuplicates ( this , remove : true ) ;
2017-10-09 18:04:49 -07:00
}
2017-10-09 20:38:15 -07:00
watch . Stop ( ) ;
// If there are any entries in the DAT, output to the rebuild directory
FileName = "fixDAT_" + FileName ;
Name = "fixDAT_" + Name ;
Description = "fixDAT_" + Description ;
2017-11-17 14:58:54 -08:00
Write ( ) ;
2017-10-09 20:38:15 -07:00
return success ;
}
/// <summary>
/// Process the DAT and verify the output directory
/// </summary>
/// <param name="inputs">List of input directories to compare against</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-10-31 21:22:05 -07:00
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
2017-10-09 20:38:15 -07:00
/// <returns>True if verification was a success, false otherwise</returns>
2017-10-31 21:22:05 -07:00
public bool VerifyGeneric ( List < string > inputs , bool hashOnly , bool quickScan , string headerToCheckAgainst , bool chdsAsFiles )
2017-10-09 20:38:15 -07:00
{
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
bool success = true ;
// Then, loop through and check each of the inputs
Globals . Logger . User ( "Processing files:\n" ) ;
foreach ( string input in inputs )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir ( input , ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) /* omitFromScan */ , true /* bare */ , false /* archivesAsFiles */ ,
2017-10-31 21:22:05 -07:00
SkipFileType . None , false /* addBlanks */ , false /* addDate */ , "" /* tempDir */ , false /* copyFiles */ , headerToCheckAgainst , chdsAsFiles ) ;
2017-10-09 20:38:15 -07:00
}
// Setup the fixdat
DatFile matched = new DatFile ( this ) ;
matched . ResetDictionary ( ) ;
matched . FileName = "fixDat_" + matched . FileName ;
matched . Name = "fixDat_" + matched . Name ;
matched . Description = "fixDat_" + matched . Description ;
matched . DatFormat = DatFormat . Logiqx ;
// If we are checking hashes only, essentially diff the inputs
if ( hashOnly )
{
// First we need to sort and dedupe by hash to get duplicates
BucketBy ( SortedBy . CRC , DedupeType . Full ) ;
// Then follow the same tactics as before
foreach ( string key in Keys )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
List < DatItem > roms = this [ key ] ;
foreach ( DatItem rom in roms )
{
if ( rom . SourceID = = 99 )
{
if ( rom . Type = = ItemType . Disk | | rom . Type = = ItemType . Rom )
{
matched . Add ( ( ( Disk ) rom ) . SHA1 , rom ) ;
}
}
}
}
}
// If we are checking full names, get only files found in directory
else
{
foreach ( string key in Keys )
{
List < DatItem > roms = this [ key ] ;
List < DatItem > newroms = DatItem . Merge ( roms ) ;
foreach ( Rom rom in newroms )
{
if ( rom . SourceID = = 99 )
{
matched . Add ( rom . Size + "-" + rom . CRC , rom ) ;
}
}
2017-10-09 18:04:49 -07:00
}
}
2017-10-09 20:38:15 -07:00
// Now output the fixdat to the main folder
2017-11-17 14:58:54 -08:00
success & = matched . Write ( stats : true ) ;
2017-10-09 20:38:15 -07:00
return success ;
}
#endregion
#region Splitting
2017-11-08 15:59:56 -08:00
/// <summary>
/// Split a set of input DATs based on the given information
/// </summary>
/// <param name="inputs">List of inputs to be used</param>
/// <param name="outDir">Output directory for the split files</param>
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
2017-11-08 21:39:04 -08:00
/// <param name="splittingMode">Type of split to perform, if any</param>
2017-11-08 15:59:56 -08:00
/// <param name="exta">First extension to split on (Extension Split only)</param>
/// <param name="extb">Second extension to split on (Extension Split only)</param>
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
2017-11-08 21:39:04 -08:00
public void DetermineSplitType ( List < string > inputs , string outDir , bool inplace , SplittingMode splittingMode ,
2017-11-08 15:59:56 -08:00
List < string > exta , List < string > extb , bool shortname , bool basedat )
{
// If we somehow have the "none" split type, return
2017-11-08 21:39:04 -08:00
if ( splittingMode = = SplittingMode . None )
2017-11-08 15:59:56 -08:00
{
return ;
}
// Get only files from the inputs
List < string > files = Utilities . GetOnlyFilesFromInputs ( inputs , appendparent : true ) ;
// Loop over the input files
foreach ( string file in files )
{
// Create and fill the new DAT
2017-12-01 15:28:41 -08:00
Parse ( file , 0 , 0 ) ;
2017-11-08 15:59:56 -08:00
// Get the output directory
2017-12-01 15:28:41 -08:00
outDir = Utilities . GetOutputPath ( outDir , file , inplace ) ;
2017-11-08 15:59:56 -08:00
// Split and write the DAT
2017-11-08 21:39:04 -08:00
if ( ( splittingMode & SplittingMode . Extension ) ! = 0 )
2017-11-08 15:59:56 -08:00
{
2017-11-08 21:44:03 -08:00
SplitByExtension ( outDir , exta , extb ) ;
2017-11-08 16:29:35 -08:00
}
2017-11-08 21:39:04 -08:00
if ( ( splittingMode & SplittingMode . Hash ) ! = 0 )
2017-11-08 16:29:35 -08:00
{
2017-11-08 21:44:03 -08:00
SplitByHash ( outDir ) ;
2017-11-08 16:29:35 -08:00
}
2017-11-08 21:39:04 -08:00
if ( ( splittingMode & SplittingMode . Level ) ! = 0 )
2017-11-08 16:29:35 -08:00
{
2017-11-08 21:44:03 -08:00
SplitByLevel ( outDir , shortname , basedat ) ;
2017-11-08 16:29:35 -08:00
}
2017-11-08 21:39:04 -08:00
if ( ( splittingMode & SplittingMode . Type ) ! = 0 )
2017-11-08 16:29:35 -08:00
{
2017-11-08 21:44:03 -08:00
SplitByType ( outDir ) ;
2017-11-08 15:59:56 -08:00
}
// Now re-empty the DAT to make room for the next one
2017-11-08 16:44:27 -08:00
DatFormat tempFormat = DatFormat ;
2017-11-08 15:59:56 -08:00
_datHeader = new DatHeader ( ) ;
ResetDictionary ( ) ;
2017-11-08 16:44:27 -08:00
DatFormat = tempFormat ;
2017-11-08 15:59:56 -08:00
}
}
2017-10-09 20:38:15 -07:00
/// <summary>
/// Split a DAT by input extensions
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>True if split succeeded, false otherwise</returns>
2017-11-08 21:44:03 -08:00
public bool SplitByExtension ( string outDir , List < string > extA , List < string > extB )
2017-10-09 20:38:15 -07:00
{
// Make sure all of the extensions have a dot at the beginning
List < string > newExtA = new List < string > ( ) ;
foreach ( string s in extA )
{
2017-12-05 13:26:25 -08:00
newExtA . Add ( ( s . StartsWith ( "." ) ? s . Substring ( 1 ) : s ) . ToUpperInvariant ( ) ) ;
2017-10-09 20:38:15 -07:00
}
string newExtAString = string . Join ( "," , newExtA ) ;
List < string > newExtB = new List < string > ( ) ;
foreach ( string s in extB )
{
2017-12-05 13:26:25 -08:00
newExtB . Add ( ( s . StartsWith ( "." ) ? s . Substring ( 1 ) : s ) . ToUpperInvariant ( ) ) ;
2017-10-09 20:38:15 -07:00
}
string newExtBString = string . Join ( "," , newExtB ) ;
// Set all of the appropriate outputs for each of the subsets
DatFile datdataA = new DatFile
{
FileName = this . FileName + " (" + newExtAString + ")" ,
Name = this . Name + " (" + newExtAString + ")" ,
Description = this . Description + " (" + newExtAString + ")" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
DatFormat = this . DatFormat ,
} ;
DatFile datdataB = new DatFile
{
FileName = this . FileName + " (" + newExtBString + ")" ,
Name = this . Name + " (" + newExtBString + ")" ,
Description = this . Description + " (" + newExtBString + ")" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
DatFormat = this . DatFormat ,
} ;
// If roms is empty, return false
if ( Count = = 0 )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
return false ;
}
// Now separate the roms accordingly
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
foreach ( DatItem item in items )
{
2017-12-05 13:26:25 -08:00
if ( newExtA . Contains ( Utilities . GetExtension ( item . Name . ToUpperInvariant ( ) ) ) )
2017-10-09 20:38:15 -07:00
{
datdataA . Add ( key , item ) ;
}
2017-12-05 13:26:25 -08:00
else if ( newExtB . Contains ( Utilities . GetExtension ( item . Name . ToUpperInvariant ( ) ) ) )
2017-10-09 20:38:15 -07:00
{
datdataB . Add ( key , item ) ;
}
else
{
datdataA . Add ( key , item ) ;
datdataB . Add ( key , item ) ;
}
}
} ) ;
// Then write out both files
2017-11-17 14:58:54 -08:00
bool success = datdataA . Write ( outDir ) ;
success & = datdataB . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
return success ;
2017-10-09 18:04:49 -07:00
}
/// <summary>
2017-10-09 20:38:15 -07:00
/// Split a DAT by best available hashes
2017-10-09 18:04:49 -07:00
/// </summary>
2017-10-09 20:38:15 -07:00
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <returns>True if split succeeded, false otherwise</returns>
2017-11-08 21:44:03 -08:00
public bool SplitByHash ( string outDir )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
// Create each of the respective output DATs
Globals . Logger . User ( "Creating and populating new DATs" ) ;
DatFile nodump = new DatFile
{
FileName = this . FileName + " (Nodump)" ,
Name = this . Name + " (Nodump)" ,
Description = this . Description + " (Nodump)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile sha512 = new DatFile
{
FileName = this . FileName + " (SHA-512)" ,
Name = this . Name + " (SHA-512)" ,
Description = this . Description + " (SHA-512)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile sha384 = new DatFile
{
FileName = this . FileName + " (SHA-384)" ,
Name = this . Name + " (SHA-384)" ,
Description = this . Description + " (SHA-384)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile sha256 = new DatFile
{
FileName = this . FileName + " (SHA-256)" ,
Name = this . Name + " (SHA-256)" ,
Description = this . Description + " (SHA-256)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile sha1 = new DatFile
{
FileName = this . FileName + " (SHA-1)" ,
Name = this . Name + " (SHA-1)" ,
Description = this . Description + " (SHA-1)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile md5 = new DatFile
{
FileName = this . FileName + " (MD5)" ,
Name = this . Name + " (MD5)" ,
Description = this . Description + " (MD5)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile crc = new DatFile
{
FileName = this . FileName + " (CRC)" ,
Name = this . Name + " (CRC)" ,
Description = this . Description + " (CRC)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile other = new DatFile
{
FileName = this . FileName + " (Other)" ,
Name = this . Name + " (Other)" ,
Description = this . Description + " (Other)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
// Now populate each of the DAT objects in turn
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
foreach ( DatItem item in items )
{
// If the file is not a Rom or Disk, continue
if ( item . Type ! = ItemType . Disk & & item . Type ! = ItemType . Rom )
{
return ;
}
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
// If the file is a nodump
if ( ( item . Type = = ItemType . Rom & & ( ( Rom ) item ) . ItemStatus = = ItemStatus . Nodump )
| | ( item . Type = = ItemType . Disk & & ( ( Disk ) item ) . ItemStatus = = ItemStatus . Nodump ) )
{
nodump . Add ( key , item ) ;
}
// If the file has a SHA-512
2017-11-08 13:15:44 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . SHA512 ) )
| | ( item . Type = = ItemType . Disk & & ! String . IsNullOrWhiteSpace ( ( ( Disk ) item ) . SHA512 ) ) )
2017-10-09 20:38:15 -07:00
{
sha512 . Add ( key , item ) ;
}
// If the file has a SHA-384
2017-11-08 13:15:44 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . SHA384 ) )
| | ( item . Type = = ItemType . Disk & & ! String . IsNullOrWhiteSpace ( ( ( Disk ) item ) . SHA384 ) ) )
2017-10-09 20:38:15 -07:00
{
sha384 . Add ( key , item ) ;
}
// If the file has a SHA-256
2017-11-08 13:15:44 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . SHA256 ) )
| | ( item . Type = = ItemType . Disk & & ! String . IsNullOrWhiteSpace ( ( ( Disk ) item ) . SHA256 ) ) )
2017-10-09 20:38:15 -07:00
{
sha256 . Add ( key , item ) ;
}
// If the file has a SHA-1
2017-11-08 13:15:44 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . SHA1 ) )
| | ( item . Type = = ItemType . Disk & & ! String . IsNullOrWhiteSpace ( ( ( Disk ) item ) . SHA1 ) ) )
2017-10-09 20:38:15 -07:00
{
sha1 . Add ( key , item ) ;
}
// If the file has no SHA-1 but has an MD5
2017-11-08 13:15:44 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . MD5 ) )
| | ( item . Type = = ItemType . Disk & & ! String . IsNullOrWhiteSpace ( ( ( Disk ) item ) . MD5 ) ) )
2017-10-09 20:38:15 -07:00
{
md5 . Add ( key , item ) ;
}
// If the file has no MD5 but a CRC
2017-11-08 15:32:55 -08:00
else if ( ( item . Type = = ItemType . Rom & & ! String . IsNullOrWhiteSpace ( ( ( Rom ) item ) . CRC ) ) )
2017-10-09 20:38:15 -07:00
{
crc . Add ( key , item ) ;
}
else
{
other . Add ( key , item ) ;
}
}
} ) ;
// Now, output all of the files to the output directory
Globals . Logger . User ( "DAT information created, outputting new files" ) ;
bool success = true ;
2017-11-17 14:58:54 -08:00
success & = nodump . Write ( outDir ) ;
success & = sha512 . Write ( outDir ) ;
success & = sha384 . Write ( outDir ) ;
success & = sha256 . Write ( outDir ) ;
success & = sha1 . Write ( outDir ) ;
success & = md5 . Write ( outDir ) ;
success & = crc . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
return success ;
}
/// <summary>
/// Split a SuperDAT by lowest available directory level
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
2017-11-08 21:44:03 -08:00
public bool SplitByLevel ( string outDir , bool shortname , bool basedat )
2017-10-09 20:38:15 -07:00
{
// First, organize by games so that we can do the right thing
BucketBy ( SortedBy . Game , DedupeType . None , lower : false , norename : true ) ;
// Create a temporary DAT to add things to
DatFile tempDat = new DatFile ( this )
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
Name = null ,
} ;
// Sort the input keys
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
keys . Sort ( SplitByLevelSort ) ;
// Then, we loop over the games
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
// Here, the key is the name of the game to be used for comparison
2017-11-08 00:04:19 -08:00
if ( tempDat . Name ! = null & & tempDat . Name ! = Path . GetDirectoryName ( key ) )
2017-10-09 20:38:15 -07:00
{
// Reset the DAT for the next items
tempDat = new DatFile ( this )
{
Name = null ,
} ;
}
// Clean the input list and set all games to be pathless
List < DatItem > items = this [ key ] ;
2017-11-08 00:04:19 -08:00
items . ForEach ( item = > item . MachineName = Path . GetFileName ( item . MachineName ) ) ;
items . ForEach ( item = > item . MachineDescription = Path . GetFileName ( item . MachineDescription ) ) ;
2017-10-09 20:38:15 -07:00
// Now add the game to the output DAT
tempDat . AddRange ( key , items ) ;
// Then set the DAT name to be the parent directory name
2017-11-08 00:04:19 -08:00
tempDat . Name = Path . GetDirectoryName ( key ) ;
2017-10-09 20:38:15 -07:00
} ) ;
return true ;
}
/// <summary>
/// Helper function for SplitByLevel to sort the input game names
/// </summary>
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort ( string a , string b )
{
NaturalComparer nc = new NaturalComparer ( ) ;
int adeep = a . Count ( c = > c = = '/' | | c = = '\\' ) ;
int bdeep = b . Count ( c = > c = = '/' | | c = = '\\' ) ;
if ( adeep = = bdeep )
{
return nc . Compare ( a , b ) ;
2017-10-09 18:04:49 -07:00
}
2017-10-09 20:38:15 -07:00
return adeep - bdeep ;
}
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
/// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT
/// </summary>
/// <param name="datFile">DAT to clean and write out</param>
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
2017-11-08 21:44:03 -08:00
private void SplitByLevelHelper ( DatFile datFile , string outDir , bool shortname , bool restore )
2017-10-09 20:38:15 -07:00
{
// Get the name from the DAT to use separately
string name = datFile . Name ;
string expName = name . Replace ( "/" , " - " ) . Replace ( "\\" , " - " ) ;
// Now set the new output values
2017-11-08 13:15:44 -08:00
datFile . FileName = HttpUtility . HtmlDecode ( String . IsNullOrWhiteSpace ( name )
2017-10-09 20:38:15 -07:00
? FileName
: ( shortname
2017-11-08 00:04:19 -08:00
? Path . GetFileName ( name )
2017-10-09 20:38:15 -07:00
: expName
)
) ;
datFile . FileName = ( restore ? FileName + " (" + datFile . FileName + ")" : datFile . FileName ) ;
datFile . Name = Name + " (" + expName + ")" ;
2017-11-08 13:15:44 -08:00
datFile . Description = ( String . IsNullOrWhiteSpace ( Description ) ? datFile . Name : Description + " (" + expName + ")" ) ;
2017-10-09 20:38:15 -07:00
datFile . Type = null ;
// Write out the temporary DAT to the proper directory
2017-11-17 14:58:54 -08:00
datFile . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
}
2017-10-09 18:04:49 -07:00
2017-10-09 20:38:15 -07:00
/// <summary>
/// Split a DAT by type of Rom
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <returns>True if split succeeded, false otherwise</returns>
2017-11-08 21:44:03 -08:00
public bool SplitByType ( string outDir )
2017-10-09 20:38:15 -07:00
{
// Create each of the respective output DATs
Globals . Logger . User ( "Creating and populating new DATs" ) ;
DatFile romdat = new DatFile
{
FileName = this . FileName + " (ROM)" ,
Name = this . Name + " (ROM)" ,
Description = this . Description + " (ROM)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile diskdat = new DatFile
{
FileName = this . FileName + " (Disk)" ,
Name = this . Name + " (Disk)" ,
Description = this . Description + " (Disk)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
DatFile sampledat = new DatFile
{
FileName = this . FileName + " (Sample)" ,
Name = this . Name + " (Sample)" ,
Description = this . Description + " (Sample)" ,
Category = this . Category ,
Version = this . Version ,
Date = this . Date ,
Author = this . Author ,
Email = this . Email ,
Homepage = this . Homepage ,
Url = this . Url ,
Comment = this . Comment ,
Header = this . Header ,
Type = this . Type ,
ForceMerging = this . ForceMerging ,
ForceNodump = this . ForceNodump ,
ForcePacking = this . ForcePacking ,
DatFormat = this . DatFormat ,
DedupeRoms = this . DedupeRoms ,
} ;
// Now populate each of the DAT objects in turn
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 20:38:15 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
2017-10-09 18:04:49 -07:00
{
2017-10-09 20:38:15 -07:00
List < DatItem > items = this [ key ] ;
foreach ( DatItem item in items )
{
// If the file is a Rom
if ( item . Type = = ItemType . Rom )
{
romdat . Add ( key , item ) ;
}
// If the file is a Disk
else if ( item . Type = = ItemType . Disk )
{
diskdat . Add ( key , item ) ;
}
// If the file is a Sample
else if ( item . Type = = ItemType . Sample )
{
sampledat . Add ( key , item ) ;
}
}
} ) ;
// Now, output all of the files to the output directory
Globals . Logger . User ( "DAT information created, outputting new files" ) ;
bool success = true ;
2017-11-17 14:58:54 -08:00
success & = romdat . Write ( outDir ) ;
success & = diskdat . Write ( outDir ) ;
success & = sampledat . Write ( outDir ) ;
2017-10-09 20:38:15 -07:00
return success ;
}
#endregion
#region Statistics
/// <summary>
/// Output the stats for the Dat in a human-readable format
/// </summary>
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
2017-11-07 13:56:15 -08:00
public void WriteStatsToScreen ( bool recalculate = false , long game = - 1 , bool baddumpCol = false , bool nodumpCol = false )
2017-10-09 20:38:15 -07:00
{
// If we're supposed to recalculate the statistics, do so
if ( recalculate )
{
RecalculateStats ( ) ;
}
BucketBy ( SortedBy . Game , DedupeType . None , norename : true ) ;
if ( TotalSize < 0 )
{
TotalSize = Int64 . MaxValue + TotalSize ;
}
// Log the results to screen
string results = @"For '" + FileName + @ "':
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2017-11-08 00:27:00 -08:00
Uncompressed size : " + Utilities.GetBytesReadable(TotalSize) + @"
2017-10-09 20:38:15 -07:00
Games found : " + (game == -1 ? Keys.Count() : game) + @"
Roms found : " + RomCount + @"
Disks found : " + DiskCount + @"
Roms with CRC : " + CRCCount + @"
Roms with MD5 : " + MD5Count + @"
Roms with SHA - 1 : " + SHA1Count + @"
Roms with SHA - 256 : " + SHA256Count + @"
Roms with SHA - 384 : " + SHA384Count + @"
Roms with SHA - 512 : " + SHA512Count + " \ n ";
if ( baddumpCol )
{
results + = " Roms with BadDump status: " + BaddumpCount + "\n" ;
}
if ( nodumpCol )
{
results + = " Roms with Nodump status: " + NodumpCount + "\n" ;
}
// For spacing between DATs
results + = "\n\n" ;
Globals . Logger . User ( results ) ;
2017-10-09 18:04:49 -07:00
}
2017-10-09 21:16:03 -07:00
/// <summary>
/// Recalculate the statistics for the Dat
/// </summary>
private void RecalculateStats ( )
{
// Wipe out any stats already there
_datStats . Reset ( ) ;
// If we have a blank Dat in any way, return
if ( this = = null | | Count = = 0 )
{
return ;
}
// Loop through and add
2017-10-30 21:15:37 -07:00
List < string > keys = Keys ;
2017-10-09 21:16:03 -07:00
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = this [ key ] ;
foreach ( DatItem item in items )
{
_datStats . AddItem ( item ) ;
}
} ) ;
}
2017-10-09 18:04:49 -07:00
#endregion
#region Writing
/// <summary>
/// Create and open an output file for writing direct from a dictionary
/// </summary>
2017-11-08 14:39:04 -08:00
/// <param name="outDir">Set the output directory (default current directory)</param>
2017-10-09 18:04:49 -07:00
/// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param>
/// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
/// <param name="overwrite">True if files should be overwritten (default), false if they should be renamed instead</param>
/// <returns>True if the DAT was written correctly, false otherwise</returns>
2017-11-17 14:58:54 -08:00
public bool Write ( string outDir = null , bool norename = true , bool stats = false , bool ignoreblanks = false , bool overwrite = true )
2017-10-09 18:04:49 -07:00
{
// If there's nothing there, abort
if ( Count = = 0 )
{
Globals . Logger . User ( "There were no items to write out!" ) ;
return false ;
}
2017-11-08 13:15:44 -08:00
// Ensure the output directory is set and created
outDir = Utilities . EnsureOutputDirectory ( outDir , create : true ) ;
2017-10-09 18:04:49 -07:00
// If the DAT has no output format, default to XML
if ( DatFormat = = 0 )
{
Globals . Logger . Verbose ( "No DAT format defined, defaulting to XML" ) ;
DatFormat = DatFormat . Logiqx ;
}
// Make sure that the three essential fields are filled in
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( FileName ) & & String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
FileName = Name = Description = "Default" ;
}
2017-11-08 13:15:44 -08:00
else if ( String . IsNullOrWhiteSpace ( FileName ) & & String . IsNullOrWhiteSpace ( Name ) & & ! String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
FileName = Name = Description ;
}
2017-11-08 13:15:44 -08:00
else if ( String . IsNullOrWhiteSpace ( FileName ) & & ! String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
FileName = Description = Name ;
}
2017-11-08 13:15:44 -08:00
else if ( String . IsNullOrWhiteSpace ( FileName ) & & ! String . IsNullOrWhiteSpace ( Name ) & & ! String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
FileName = Description ;
}
2017-11-08 13:15:44 -08:00
else if ( ! String . IsNullOrWhiteSpace ( FileName ) & & String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
Name = Description = FileName ;
}
2017-11-08 13:15:44 -08:00
else if ( ! String . IsNullOrWhiteSpace ( FileName ) & & String . IsNullOrWhiteSpace ( Name ) & & ! String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
Name = Description ;
}
2017-11-08 13:15:44 -08:00
else if ( ! String . IsNullOrWhiteSpace ( FileName ) & & ! String . IsNullOrWhiteSpace ( Name ) & & String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
Description = Name ;
}
2017-11-08 13:15:44 -08:00
else if ( ! String . IsNullOrWhiteSpace ( FileName ) & & ! String . IsNullOrWhiteSpace ( Name ) & & ! String . IsNullOrWhiteSpace ( Description ) )
2017-10-09 18:04:49 -07:00
{
// Nothing is needed
}
// Output initial statistics, for kicks
if ( stats )
{
2017-11-07 13:56:15 -08:00
WriteStatsToScreen ( recalculate : ( RomCount + DiskCount = = 0 ) , baddumpCol : true , nodumpCol : true ) ;
2017-10-09 18:04:49 -07:00
}
// Bucket and dedupe according to the flag
if ( DedupeRoms = = DedupeType . Full )
{
BucketBy ( SortedBy . CRC , DedupeRoms , norename : norename ) ;
}
else if ( DedupeRoms = = DedupeType . Game )
{
BucketBy ( SortedBy . Game , DedupeRoms , norename : norename ) ;
}
// Bucket roms by game name, if not already
BucketBy ( SortedBy . Game , DedupeType . None , norename : norename ) ;
// Output the number of items we're going to be writing
Globals . Logger . User ( "A total of {0} items will be written out to '{1}'" , Count , FileName ) ;
// If we are removing hashes, do that now
if ( StripHash ! = 0x0 )
{
StripHashesFromItems ( ) ;
}
2017-10-30 15:17:13 -07:00
// If we are removing scene dates, do that now
if ( SceneDateStrip )
{
2017-10-30 15:22:50 -07:00
StripSceneDatesFromItems ( ) ;
2017-10-30 15:17:13 -07:00
}
2017-10-09 18:04:49 -07:00
// Get the outfile names
2017-11-07 23:53:12 -08:00
Dictionary < DatFormat , string > outfiles = CreateOutfileNames ( outDir , overwrite ) ;
2017-10-09 18:04:49 -07:00
try
{
// Write out all required formats
Parallel . ForEach ( outfiles . Keys , Globals . ParallelOptions , datFormat = >
{
string outfile = outfiles [ datFormat ] ;
2017-10-16 14:02:41 -07:00
try
2017-10-09 18:04:49 -07:00
{
2017-11-17 15:51:09 -08:00
Utilities . GetDatFile ( datFormat , this ) ? . WriteToFile ( outfile , ignoreblanks ) ;
2017-10-16 14:02:41 -07:00
}
catch ( Exception ex )
{
Globals . Logger . Error ( "Datfile {0} could not be written out: {1}" , outfile , ex . ToString ( ) ) ;
2017-10-09 18:04:49 -07:00
}
2017-10-16 14:02:41 -07:00
2017-10-09 18:04:49 -07:00
} ) ;
}
catch ( Exception ex )
{
Globals . Logger . Error ( ex . ToString ( ) ) ;
return false ;
}
return true ;
}
2017-11-17 14:58:54 -08:00
/// <summary>
/// Create and open an output file for writing direct from a dictionary
/// </summary>
/// <param name="outfile">Name of the file to write to</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
/// <returns>True if the DAT was written correctly, false otherwise</returns>
public virtual bool WriteToFile ( string outfile , bool ignoreblanks = false )
{
throw new NotImplementedException ( ) ;
}
2017-11-07 23:53:12 -08:00
/// <summary>
/// Generate a proper outfile name based on a DAT and output directory
/// </summary>
/// <param name="outDir">Output directory</param>
/// <param name="overwrite">True if we ignore existing files (default), false otherwise</param>
/// <returns>Dictionary of output formats mapped to file names</returns>
private Dictionary < DatFormat , string > CreateOutfileNames ( string outDir , bool overwrite = true )
{
// Create the output dictionary
Dictionary < DatFormat , string > outfileNames = new Dictionary < DatFormat , string > ( ) ;
// Double check the outDir for the end delim
if ( ! outDir . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
outDir + = Path . DirectorySeparatorChar ;
}
// Get the extensions from the output type
// AttractMode
if ( ( DatFormat & DatFormat . AttractMode ) ! = 0 )
{
outfileNames . Add ( DatFormat . AttractMode , CreateOutfileNamesHelper ( outDir , ".txt" , overwrite ) ) ;
}
// ClrMamePro
if ( ( DatFormat & DatFormat . ClrMamePro ) ! = 0 )
{
outfileNames . Add ( DatFormat . ClrMamePro , CreateOutfileNamesHelper ( outDir , ".dat" , overwrite ) ) ;
} ;
// CSV
if ( ( DatFormat & DatFormat . CSV ) ! = 0 )
{
outfileNames . Add ( DatFormat . CSV , CreateOutfileNamesHelper ( outDir , ".csv" , overwrite ) ) ;
} ;
// DOSCenter
if ( ( DatFormat & DatFormat . DOSCenter ) ! = 0
& & ( DatFormat & DatFormat . ClrMamePro ) = = 0
& & ( DatFormat & DatFormat . RomCenter ) = = 0 )
{
outfileNames . Add ( DatFormat . DOSCenter , CreateOutfileNamesHelper ( outDir , ".dat" , overwrite ) ) ;
} ;
if ( ( DatFormat & DatFormat . DOSCenter ) ! = 0
& & ( ( DatFormat & DatFormat . ClrMamePro ) ! = 0
| | ( DatFormat & DatFormat . RomCenter ) ! = 0 ) )
{
outfileNames . Add ( DatFormat . DOSCenter , CreateOutfileNamesHelper ( outDir , ".dc.dat" , overwrite ) ) ;
}
//MAME Listroms
if ( ( DatFormat & DatFormat . Listroms ) ! = 0
& & ( DatFormat & DatFormat . AttractMode ) = = 0 )
{
outfileNames . Add ( DatFormat . Listroms , CreateOutfileNamesHelper ( outDir , ".txt" , overwrite ) ) ;
}
if ( ( DatFormat & DatFormat . Listroms ) ! = 0
& & ( DatFormat & DatFormat . AttractMode ) ! = 0 )
{
outfileNames . Add ( DatFormat . Listroms , CreateOutfileNamesHelper ( outDir , ".lr.txt" , overwrite ) ) ;
}
// Logiqx XML
if ( ( DatFormat & DatFormat . Logiqx ) ! = 0 )
{
outfileNames . Add ( DatFormat . Logiqx , CreateOutfileNamesHelper ( outDir , ".xml" , overwrite ) ) ;
}
// Missfile
if ( ( DatFormat & DatFormat . MissFile ) ! = 0
& & ( DatFormat & DatFormat . AttractMode ) = = 0 )
{
outfileNames . Add ( DatFormat . MissFile , CreateOutfileNamesHelper ( outDir , ".txt" , overwrite ) ) ;
}
if ( ( DatFormat & DatFormat . MissFile ) ! = 0
& & ( DatFormat & DatFormat . AttractMode ) ! = 0 )
{
outfileNames . Add ( DatFormat . MissFile , CreateOutfileNamesHelper ( outDir , ".miss.txt" , overwrite ) ) ;
}
// OfflineList
if ( ( ( DatFormat & DatFormat . OfflineList ) ! = 0 )
& & ( DatFormat & DatFormat . Logiqx ) = = 0
& & ( DatFormat & DatFormat . SabreDat ) = = 0
& & ( DatFormat & DatFormat . SoftwareList ) = = 0 )
{
outfileNames . Add ( DatFormat . OfflineList , CreateOutfileNamesHelper ( outDir , ".xml" , overwrite ) ) ;
}
if ( ( ( DatFormat & DatFormat . OfflineList ) ! = 0
& & ( ( DatFormat & DatFormat . Logiqx ) ! = 0
| | ( DatFormat & DatFormat . SabreDat ) ! = 0
| | ( DatFormat & DatFormat . SoftwareList ) ! = 0 ) ) )
{
outfileNames . Add ( DatFormat . OfflineList , CreateOutfileNamesHelper ( outDir , ".ol.xml" , overwrite ) ) ;
}
// Redump MD5
if ( ( DatFormat & DatFormat . RedumpMD5 ) ! = 0 )
{
outfileNames . Add ( DatFormat . RedumpMD5 , CreateOutfileNamesHelper ( outDir , ".md5" , overwrite ) ) ;
} ;
// Redump SFV
if ( ( DatFormat & DatFormat . RedumpSFV ) ! = 0 )
{
outfileNames . Add ( DatFormat . RedumpSFV , CreateOutfileNamesHelper ( outDir , ".sfv" , overwrite ) ) ;
} ;
// Redump SHA-1
if ( ( DatFormat & DatFormat . RedumpSHA1 ) ! = 0 )
{
outfileNames . Add ( DatFormat . RedumpSHA1 , CreateOutfileNamesHelper ( outDir , ".sha1" , overwrite ) ) ;
} ;
// Redump SHA-256
if ( ( DatFormat & DatFormat . RedumpSHA256 ) ! = 0 )
{
outfileNames . Add ( DatFormat . RedumpSHA256 , CreateOutfileNamesHelper ( outDir , ".sha256" , overwrite ) ) ;
} ;
// RomCenter
if ( ( DatFormat & DatFormat . RomCenter ) ! = 0
& & ( DatFormat & DatFormat . ClrMamePro ) = = 0 )
{
outfileNames . Add ( DatFormat . RomCenter , CreateOutfileNamesHelper ( outDir , ".dat" , overwrite ) ) ;
} ;
if ( ( DatFormat & DatFormat . RomCenter ) ! = 0
& & ( DatFormat & DatFormat . ClrMamePro ) ! = 0 )
{
outfileNames . Add ( DatFormat . RomCenter , CreateOutfileNamesHelper ( outDir , ".rc.dat" , overwrite ) ) ;
} ;
// SabreDAT
if ( ( DatFormat & DatFormat . SabreDat ) ! = 0 & & ( DatFormat & DatFormat . Logiqx ) = = 0 )
{
outfileNames . Add ( DatFormat . SabreDat , CreateOutfileNamesHelper ( outDir , ".xml" , overwrite ) ) ;
} ;
if ( ( DatFormat & DatFormat . SabreDat ) ! = 0 & & ( DatFormat & DatFormat . Logiqx ) ! = 0 )
{
outfileNames . Add ( DatFormat . SabreDat , CreateOutfileNamesHelper ( outDir , ".sd.xml" , overwrite ) ) ;
} ;
// Software List
if ( ( DatFormat & DatFormat . SoftwareList ) ! = 0
& & ( DatFormat & DatFormat . Logiqx ) = = 0
& & ( DatFormat & DatFormat . SabreDat ) = = 0 )
{
outfileNames . Add ( DatFormat . SoftwareList , CreateOutfileNamesHelper ( outDir , ".xml" , overwrite ) ) ;
}
if ( ( DatFormat & DatFormat . SoftwareList ) ! = 0
& & ( ( DatFormat & DatFormat . Logiqx ) ! = 0
| | ( DatFormat & DatFormat . SabreDat ) ! = 0 ) )
{
outfileNames . Add ( DatFormat . SoftwareList , CreateOutfileNamesHelper ( outDir , ".sl.xml" , overwrite ) ) ;
}
// TSV
if ( ( DatFormat & DatFormat . TSV ) ! = 0 )
{
outfileNames . Add ( DatFormat . TSV , CreateOutfileNamesHelper ( outDir , ".tsv" , overwrite ) ) ;
} ;
return outfileNames ;
}
/// <summary>
/// Help generating the outfile name
/// </summary>
/// <param name="outDir">Output directory</param>
/// <param name="extension">Extension to use for the file</param>
/// <param name="overwrite">True if we ignore existing files, false otherwise</param>
/// <returns>String containing the new filename</returns>
private string CreateOutfileNamesHelper ( string outDir , string extension , bool overwrite )
{
2017-11-08 13:15:44 -08:00
string filename = ( String . IsNullOrWhiteSpace ( FileName ) ? Description : FileName ) ;
2017-11-07 23:53:12 -08:00
string outfile = outDir + filename + extension ;
outfile = ( outfile . Contains ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) ) ?
outfile . Replace ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) , Path . DirectorySeparatorChar . ToString ( ) ) :
outfile ) ;
if ( ! overwrite )
{
int i = 1 ;
while ( File . Exists ( outfile ) )
{
outfile = outDir + filename + "_" + i + extension ;
outfile = ( outfile . Contains ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) ) ?
outfile . Replace ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) , Path . DirectorySeparatorChar . ToString ( ) ) :
outfile ) ;
i + + ;
}
}
return outfile ;
}
2017-10-09 18:04:49 -07:00
#endregion
2016-10-24 16:14:22 -07:00
#endregion // Instance Methods
2017-10-09 20:38:15 -07:00
#region Static Methods
#region Statistics
/// <summary>
/// Output the stats for a list of input dats as files in a human-readable format
/// </summary>
/// <param name="inputs">List of input files and folders</param>
/// <param name="reportName">Name of the output file</param>
/// <param name="single">True if single DAT stats are output, false otherwise</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat" > Set the statistics output format to use</param>
public static void OutputStats ( List < string > inputs , string reportName , string outDir , bool single ,
2017-11-07 13:56:15 -08:00
bool baddumpCol , bool nodumpCol , StatReportFormat statDatFormat )
2017-10-09 20:38:15 -07:00
{
// If there's no output format, set the default
2017-11-08 01:00:21 -08:00
if ( statDatFormat = = StatReportFormat . None )
2017-10-09 20:38:15 -07:00
{
2017-11-08 01:00:21 -08:00
statDatFormat = StatReportFormat . Textfile ;
2017-10-09 20:38:15 -07:00
}
// Get the proper output file name
2017-11-08 13:15:44 -08:00
if ( String . IsNullOrWhiteSpace ( reportName ) )
2017-10-09 20:38:15 -07:00
{
reportName = "report" ;
}
2017-11-08 12:57:48 -08:00
// Get the proper output directory name
2017-11-08 13:15:44 -08:00
outDir = Utilities . EnsureOutputDirectory ( outDir ) ;
2017-10-09 20:38:15 -07:00
2017-10-09 21:16:03 -07:00
// Get the dictionary of desired output report names
2017-11-07 23:53:12 -08:00
Dictionary < StatReportFormat , string > outputs = CreateOutStatsNames ( outDir , statDatFormat , reportName ) ;
2017-10-09 20:38:15 -07:00
2017-10-09 21:16:03 -07:00
// Make sure we have all files and then order them
2017-11-08 00:27:00 -08:00
List < string > files = Utilities . GetOnlyFilesFromInputs ( inputs ) ;
2017-10-09 21:16:03 -07:00
files = files
. OrderBy ( i = > Path . GetDirectoryName ( i ) )
. ThenBy ( i = > Path . GetFileName ( i ) )
2017-10-09 20:38:15 -07:00
. ToList ( ) ;
2017-11-07 13:56:15 -08:00
// Get all of the writers that we need
List < BaseReport > reports = new List < BaseReport > ( ) ;
// Loop through and output based on the inputs
foreach ( KeyValuePair < StatReportFormat , string > kvp in outputs )
2017-10-09 21:16:03 -07:00
{
2017-11-17 15:51:09 -08:00
reports . Add ( Utilities . GetBaseReport ( kvp . Key , kvp . Value , baddumpCol , nodumpCol ) ) ;
2017-10-09 21:16:03 -07:00
}
2017-10-09 20:38:15 -07:00
// Write the header, if any
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteHeader ( ) ) ;
2017-10-09 20:38:15 -07:00
// Init all total variables
DatStats totalStats = new DatStats ( ) ;
// Init directory-level variables
string lastdir = null ;
string basepath = null ;
DatStats dirStats = new DatStats ( ) ;
// Now process each of the input files
2017-10-09 21:16:03 -07:00
foreach ( string file in files )
2017-10-09 20:38:15 -07:00
{
// Get the directory for the current file
2017-10-09 21:16:03 -07:00
string thisdir = Path . GetDirectoryName ( file ) ;
basepath = Path . GetDirectoryName ( Path . GetDirectoryName ( file ) ) ;
2017-10-09 20:38:15 -07:00
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
if ( lastdir ! = null & & thisdir ! = lastdir )
{
// Output separator if needed
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteMidSeparator ( ) ) ;
2017-10-09 20:38:15 -07:00
DatFile lastdirdat = new DatFile
{
2017-11-16 00:04:30 -08:00
FileName = "DIR: " + HttpUtility . HtmlEncode ( lastdir ) ,
2017-10-09 20:38:15 -07:00
_datStats = dirStats ,
} ;
2017-11-07 13:56:15 -08:00
lastdirdat . WriteStatsToScreen ( recalculate : false , game : dirStats . GameCount , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
reports . ForEach ( report = > report . ReplaceDatFile ( lastdirdat ) ) ;
reports . ForEach ( report = > report . Write ( game : dirStats . GameCount ) ) ;
2017-10-09 20:38:15 -07:00
// Write the mid-footer, if any
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteFooterSeparator ( ) ) ;
2017-10-09 20:38:15 -07:00
// Write the header, if any
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteMidHeader ( ) ) ;
2017-10-09 20:38:15 -07:00
// Reset the directory stats
dirStats . Reset ( ) ;
}
2017-10-09 21:16:03 -07:00
Globals . Logger . Verbose ( "Beginning stat collection for '{0}'" , false , file ) ;
2017-10-09 20:38:15 -07:00
List < string > games = new List < string > ( ) ;
DatFile datdata = new DatFile ( ) ;
2017-10-09 21:16:03 -07:00
datdata . Parse ( file , 0 , 0 ) ;
2017-10-09 20:38:15 -07:00
datdata . BucketBy ( SortedBy . Game , DedupeType . None , norename : true ) ;
// Output single DAT stats (if asked)
2017-10-09 21:16:03 -07:00
Globals . Logger . User ( "Adding stats for file '{0}'\n" , false , file ) ;
2017-10-09 20:38:15 -07:00
if ( single )
{
2017-11-07 13:56:15 -08:00
datdata . WriteStatsToScreen ( recalculate : false , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
reports . ForEach ( report = > report . ReplaceDatFile ( datdata ) ) ;
reports . ForEach ( report = > report . Write ( ) ) ;
2017-10-09 20:38:15 -07:00
}
// Add single DAT stats to dir
dirStats . AddStats ( datdata . _datStats ) ;
2017-10-09 21:16:03 -07:00
dirStats . GameCount + = datdata . Keys . Count ( ) ;
2017-10-09 20:38:15 -07:00
// Add single DAT stats to totals
totalStats . AddStats ( datdata . _datStats ) ;
2017-10-09 21:16:03 -07:00
totalStats . GameCount + = datdata . Keys . Count ( ) ;
2017-10-09 20:38:15 -07:00
// Make sure to assign the new directory
lastdir = thisdir ;
}
// Output the directory stats one last time
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteMidSeparator ( ) ) ;
2017-10-09 20:38:15 -07:00
if ( single )
{
DatFile dirdat = new DatFile
{
2017-11-16 00:04:30 -08:00
FileName = "DIR: " + HttpUtility . HtmlEncode ( lastdir ) ,
2017-10-09 20:38:15 -07:00
_datStats = dirStats ,
} ;
2017-11-07 13:56:15 -08:00
dirdat . WriteStatsToScreen ( recalculate : false , game : dirStats . GameCount , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
reports . ForEach ( report = > report . ReplaceDatFile ( dirdat ) ) ;
reports . ForEach ( report = > report . Write ( dirStats . GameCount ) ) ;
2017-10-09 20:38:15 -07:00
}
// Write the mid-footer, if any
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteFooterSeparator ( ) ) ;
2017-10-09 20:38:15 -07:00
// Write the header, if any
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteMidHeader ( ) ) ;
2017-10-09 20:38:15 -07:00
// Reset the directory stats
dirStats . Reset ( ) ;
// Output total DAT stats
DatFile totaldata = new DatFile
{
FileName = "DIR: All DATs" ,
_datStats = totalStats ,
} ;
2017-11-07 13:56:15 -08:00
totaldata . WriteStatsToScreen ( recalculate : false , game : totalStats . GameCount , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
reports . ForEach ( report = > report . ReplaceDatFile ( totaldata ) ) ;
reports . ForEach ( report = > report . Write ( totalStats . GameCount ) ) ;
2017-10-09 20:38:15 -07:00
// Output footer if needed
2017-11-07 14:06:21 -08:00
reports . ForEach ( report = > report . WriteFooter ( ) ) ;
2017-10-09 20:38:15 -07:00
Globals . Logger . User ( @ "
Please check the log folder if the stats scrolled offscreen ", false);
}
2017-11-07 23:53:12 -08:00
/// <summary>
/// Get the proper extension for the stat output format
/// </summary>
/// <param name="outDir">Output path to use</param>
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
/// <param name="reportName">Name of the input file to use</param>
/// <returns>Dictionary of output formats mapped to file names</returns>
private static Dictionary < StatReportFormat , string > CreateOutStatsNames ( string outDir , StatReportFormat statDatFormat , string reportName , bool overwrite = true )
{
Dictionary < StatReportFormat , string > output = new Dictionary < StatReportFormat , string > ( ) ;
// First try to create the output directory if we need to
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
}
// Double check the outDir for the end delim
if ( ! outDir . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
{
outDir + = Path . DirectorySeparatorChar ;
}
// For each output format, get the appropriate stream writer
2017-11-08 01:00:21 -08:00
if ( ( statDatFormat & StatReportFormat . Textfile ) ! = 0 )
2017-11-07 23:53:12 -08:00
{
2017-11-08 01:00:21 -08:00
output . Add ( StatReportFormat . Textfile , CreateOutStatsNamesHelper ( outDir , ".txt" , reportName , overwrite ) ) ;
2017-11-07 23:53:12 -08:00
}
if ( ( statDatFormat & StatReportFormat . CSV ) ! = 0 )
{
output . Add ( StatReportFormat . CSV , CreateOutStatsNamesHelper ( outDir , ".csv" , reportName , overwrite ) ) ;
}
if ( ( statDatFormat & StatReportFormat . HTML ) ! = 0 )
{
output . Add ( StatReportFormat . HTML , CreateOutStatsNamesHelper ( outDir , ".html" , reportName , overwrite ) ) ;
}
if ( ( statDatFormat & StatReportFormat . TSV ) ! = 0 )
{
output . Add ( StatReportFormat . TSV , CreateOutStatsNamesHelper ( outDir , ".tsv" , reportName , overwrite ) ) ;
}
return output ;
}
/// <summary>
/// Help generating the outstats name
/// </summary>
/// <param name="outDir">Output directory</param>
/// <param name="extension">Extension to use for the file</param>
/// <param name="reportName">Name of the input file to use</param>
/// <param name="overwrite">True if we ignore existing files, false otherwise</param>
/// <returns>String containing the new filename</returns>
private static string CreateOutStatsNamesHelper ( string outDir , string extension , string reportName , bool overwrite )
{
string outfile = outDir + reportName + extension ;
outfile = ( outfile . Contains ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) ) ?
outfile . Replace ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) , Path . DirectorySeparatorChar . ToString ( ) ) :
outfile ) ;
if ( ! overwrite )
{
int i = 1 ;
while ( File . Exists ( outfile ) )
{
outfile = outDir + reportName + "_" + i + extension ;
outfile = ( outfile . Contains ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) ) ?
outfile . Replace ( Path . DirectorySeparatorChar . ToString ( ) + Path . DirectorySeparatorChar . ToString ( ) , Path . DirectorySeparatorChar . ToString ( ) ) :
outfile ) ;
i + + ;
}
}
return outfile ;
}
2017-10-09 20:38:15 -07:00
#endregion
#endregion // Static Methods
2016-04-19 01:11:23 -07:00
}
}