2017-10-09 18:04:49 -07:00
using System ;
2020-08-17 23:28:12 -07:00
using System.Collections.Concurrent ;
2017-10-09 18:04:49 -07:00
using System.Collections.Generic ;
2020-06-10 22:37:19 -07:00
using System.IO ;
2017-09-25 12:21:52 -07:00
using System.Linq ;
2019-03-29 00:15:40 -07:00
using System.Net ;
2020-08-17 23:28:12 -07:00
using System.Text.RegularExpressions ;
2017-10-09 18:04:49 -07:00
using System.Threading.Tasks ;
2020-09-07 14:47:27 -07:00
using System.Xml.Serialization ;
2020-06-10 22:37:19 -07:00
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Data ;
2017-11-02 15:44:15 -07:00
using SabreTools.Library.DatItems ;
2020-07-15 09:41:59 -07:00
using SabreTools.Library.FileTypes ;
2020-07-30 22:51:33 -07:00
using SabreTools.Library.Filtering ;
2020-08-01 23:04:11 -07:00
using SabreTools.Library.IO ;
2017-11-07 13:56:15 -08:00
using SabreTools.Library.Reports ;
2017-10-09 20:38:15 -07:00
using SabreTools.Library.Skippers ;
2017-10-09 18:04:49 -07:00
using SabreTools.Library.Tools ;
2017-10-09 20:38:15 -07:00
using NaturalSort ;
2020-08-24 01:06:52 -07:00
using Newtonsoft.Json ;
2016-10-26 22:10:47 -07:00
2017-10-06 20:46:43 -07:00
namespace SabreTools.Library.DatFiles
2016-04-19 01:11:23 -07:00
{
2019-01-08 11:49:31 -08:00
/// <summary>
/// Represents a format-agnostic DAT
/// </summary>
2020-09-08 10:12:41 -07:00
[JsonObject("datfile"), XmlRoot("datfile")]
2020-07-15 09:41:59 -07:00
public abstract class DatFile
2019-01-08 11:49:31 -08:00
{
2020-07-31 14:04:10 -07:00
#region Fields
2019-01-08 11:49:31 -08:00
/// <summary>
2020-07-27 10:26:08 -07:00
/// Header values
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-24 01:06:52 -07:00
[JsonProperty("header")]
2020-09-07 14:47:27 -07:00
[XmlElement("header")]
2020-07-27 10:26:08 -07:00
public DatHeader Header { get ; set ; } = new DatHeader ( ) ;
2019-01-08 11:49:31 -08:00
/// <summary>
2020-07-27 10:26:08 -07:00
/// DatItems and related statistics
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-24 01:06:52 -07:00
[JsonProperty("items")]
2020-09-07 14:47:27 -07:00
[XmlElement("items")]
2020-07-27 10:26:08 -07:00
public ItemDictionary Items { get ; set ; } = new ItemDictionary ( ) ;
2019-01-08 11:49:31 -08:00
#endregion
2020-07-15 09:41:59 -07:00
#region Constructors
2019-01-08 11:49:31 -08:00
/// <summary>
2020-07-15 09:41:59 -07:00
/// Create a new DatFile from an existing one
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="datFile">DatFile to get the values from</param>
public DatFile ( DatFile datFile )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
if ( datFile ! = null )
2019-01-08 11:49:31 -08:00
{
2020-07-27 10:26:08 -07:00
Header = datFile . Header ;
2020-07-31 14:04:10 -07:00
Items = datFile . Items ;
2019-01-08 11:49:31 -08:00
}
}
/// <summary>
2020-07-15 09:41:59 -07:00
/// Create a specific type of DatFile to be used based on a format and a base DAT
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="datFormat">Format of the DAT to be created</param>
/// <param name="baseDat">DatFile containing the information to use in specific operations</param>
2020-09-20 21:12:57 -07:00
/// <param name="quotes">For relevant types, assume the usage of quotes</param>
2020-07-15 09:41:59 -07:00
/// <returns>DatFile of the specific internal type that corresponds to the inputs</returns>
2020-09-20 21:12:57 -07:00
public static DatFile Create ( DatFormat ? datFormat = null , DatFile baseDat = null , bool quotes = true )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
switch ( datFormat )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
case DatFormat . AttractMode :
return new AttractMode ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . ClrMamePro :
2020-09-20 21:12:57 -07:00
return new ClrMamePro ( baseDat , quotes ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . CSV :
return new SeparatedValue ( baseDat , ',' ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . DOSCenter :
return new DosCenter ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . EverdriveSMDB :
return new EverdriveSMDB ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . Listrom :
return new Listrom ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . Listxml :
return new Listxml ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . Logiqx :
return new Logiqx ( baseDat , false ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . LogiqxDeprecated :
return new Logiqx ( baseDat , true ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . MissFile :
return new Missfile ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . OfflineList :
return new OfflineList ( baseDat ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
case DatFormat . OpenMSX :
return new OpenMSX ( baseDat ) ;
case DatFormat . RedumpMD5 :
return new Hashfile ( baseDat , Hash . MD5 ) ;
#if NET_FRAMEWORK
case DatFormat . RedumpRIPEMD160 :
return new Hashfile ( baseDat , Hash . RIPEMD160 ) ;
#endif
case DatFormat . RedumpSFV :
return new Hashfile ( baseDat , Hash . CRC ) ;
case DatFormat . RedumpSHA1 :
return new Hashfile ( baseDat , Hash . SHA1 ) ;
case DatFormat . RedumpSHA256 :
return new Hashfile ( baseDat , Hash . SHA256 ) ;
case DatFormat . RedumpSHA384 :
return new Hashfile ( baseDat , Hash . SHA384 ) ;
case DatFormat . RedumpSHA512 :
return new Hashfile ( baseDat , Hash . SHA512 ) ;
2020-09-04 15:02:15 -07:00
case DatFormat . RedumpSpamSum :
return new Hashfile ( baseDat , Hash . SpamSum ) ;
2020-07-15 09:41:59 -07:00
case DatFormat . RomCenter :
return new RomCenter ( baseDat ) ;
2020-09-07 22:40:27 -07:00
case DatFormat . SabreJSON :
return new SabreJSON ( baseDat ) ;
2020-09-07 22:57:44 -07:00
case DatFormat . SabreXML :
return new SabreXML ( baseDat ) ;
2020-07-15 09:41:59 -07:00
case DatFormat . SoftwareList :
return new SoftwareList ( baseDat ) ;
case DatFormat . SSV :
return new SeparatedValue ( baseDat , ';' ) ;
case DatFormat . TSV :
return new SeparatedValue ( baseDat , '\t' ) ;
// We use new-style Logiqx as a backup for generic DatFile
case null :
default :
return new Logiqx ( baseDat , false ) ;
2019-01-08 11:49:31 -08:00
}
}
/// <summary>
2020-07-15 09:41:59 -07:00
/// Create a new DatFile from an existing DatHeader
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="datHeader">DatHeader to get the values from</param>
public static DatFile Create ( DatHeader datHeader )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
DatFile datFile = Create ( datHeader . DatFormat ) ;
2020-07-27 10:26:08 -07:00
datFile . Header = ( DatHeader ) datHeader . Clone ( ) ;
2020-07-15 09:41:59 -07:00
return datFile ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-07-15 09:41:59 -07:00
/// Add items from another DatFile to the existing DatFile
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="datFile">DatFile to add from</param>
/// <param name="delete">If items should be deleted from the source DatFile</param>
public void AddFromExisting ( DatFile datFile , bool delete = false )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
// Get the list of keys from the DAT
2020-08-21 23:24:32 -07:00
var keys = datFile . Items . Keys . ToList ( ) ;
foreach ( string key in keys )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
// Add everything from the key to the internal DAT
2020-07-26 22:34:45 -07:00
Items . AddRange ( key , datFile . Items [ key ] ) ;
2020-07-15 09:41:59 -07:00
// Now remove the key from the source DAT
if ( delete )
2020-07-26 22:34:45 -07:00
datFile . Items . Remove ( key ) ;
2019-01-08 11:49:31 -08:00
}
2020-07-15 09:41:59 -07:00
// Now remove the file dictionary from the source DAT
if ( delete )
2020-07-26 22:34:45 -07:00
datFile . Items = null ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-07-15 09:41:59 -07:00
/// Apply a DatHeader to an existing DatFile
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="datHeader">DatHeader to get the values from</param>
public void ApplyDatHeader ( DatHeader datHeader )
2019-01-08 11:49:31 -08:00
{
2020-07-27 10:26:08 -07:00
Header . ConditionalCopy ( datHeader ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
/// <summary>
/// Fill the header values based on existing Header and path
/// </summary>
/// <param name="path">Path used for creating a name, if necessary</param>
/// <param name="bare">True if the date should be omitted from name and description, false otherwise</param>
public void FillHeaderFromPath ( string path , bool bare )
{
// If the description is defined but not the name, set the name from the description
if ( string . IsNullOrWhiteSpace ( Header . Name ) & & ! string . IsNullOrWhiteSpace ( Header . Description ) )
{
Header . Name = Header . Description ;
}
// If the name is defined but not the description, set the description from the name
else if ( ! string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
{
Header . Description = Header . Name + ( bare ? string . Empty : $" ({Header.Date})" ) ;
}
// If neither the name or description are defined, set them from the automatic values
else if ( string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
{
string [ ] splitpath = path . TrimEnd ( Path . DirectorySeparatorChar ) . Split ( Path . DirectorySeparatorChar ) ;
Header . Name = splitpath . Last ( ) ;
Header . Description = Header . Name + ( bare ? string . Empty : $" ({Header.Date})" ) ;
}
}
2020-07-15 09:41:59 -07:00
#endregion
2020-07-27 12:41:36 -07:00
#region Converting and Updating
2019-01-08 11:49:31 -08:00
/// <summary>
2020-07-27 12:41:36 -07:00
/// Replace item values from the base set represented by the current DAT
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-27 21:40:08 -07:00
/// <param name="intDat">DatFile to replace the values in</param>
/// <param name="updateFields">List of Fields representing what should be updated</param>
2020-07-27 12:41:36 -07:00
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
2020-08-27 21:40:08 -07:00
public void BaseReplace ( DatFile intDat , List < Field > updateFields , bool onlySame )
2019-01-08 11:49:31 -08:00
{
2020-08-27 21:40:08 -07:00
Globals . Logger . User ( $"Replacing items in '{intDat.Header.FileName}' from the base DAT" ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 21:40:08 -07:00
// If we are matching based on DatItem fields of any sort
if ( updateFields . Intersect ( DatItem . DatItemFields ) . Any ( ) )
2020-07-15 09:41:59 -07:00
{
2020-08-27 21:40:08 -07:00
// For comparison's sake, we want to use CRC as the base bucketing
Items . BucketBy ( Field . DatItem_CRC , DedupeType . Full ) ;
intDat . Items . BucketBy ( Field . DatItem_CRC , DedupeType . None ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Then we do a hashwise comparison against the base DAT
Parallel . ForEach ( intDat . Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-08-27 21:40:08 -07:00
List < DatItem > datItems = intDat . Items [ key ] ;
List < DatItem > newDatItems = new List < DatItem > ( ) ;
foreach ( DatItem datItem in datItems )
2019-01-08 11:49:31 -08:00
{
2020-08-27 21:40:08 -07:00
List < DatItem > dupes = Items . GetDuplicates ( datItem , sorted : true ) ;
DatItem newDatItem = datItem . Clone ( ) as DatItem ;
2020-06-06 13:53:31 -07:00
2020-08-27 21:40:08 -07:00
// Replace fields from the first duplicate, if we have one
if ( dupes . Count > 0 )
newDatItem . ReplaceFields ( dupes . First ( ) , updateFields ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
newDatItems . Add ( newDatItem ) ;
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Now add the new list to the key
intDat . Items . Remove ( key ) ;
intDat . Items . AddRange ( key , newDatItems ) ;
} ) ;
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// If we are matching based on Machine fields of any sort
if ( updateFields . Intersect ( DatItem . MachineFields ) . Any ( ) )
{
// For comparison's sake, we want to use Machine Name as the base bucketing
Items . BucketBy ( Field . Machine_Name , DedupeType . Full ) ;
intDat . Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Then we do a namewise comparison against the base DAT
Parallel . ForEach ( intDat . Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > datItems = intDat . Items [ key ] ;
List < DatItem > newDatItems = new List < DatItem > ( ) ;
foreach ( DatItem datItem in datItems )
2020-07-15 09:41:59 -07:00
{
2020-08-27 21:40:08 -07:00
DatItem newDatItem = datItem . Clone ( ) as DatItem ;
if ( Items . ContainsKey ( key ) & & Items [ key ] . Count ( ) > 0 )
newDatItem . Machine . ReplaceFields ( Items [ key ] [ 0 ] . Machine , updateFields , onlySame ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
newDatItems . Add ( newDatItem ) ;
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Now add the new list to the key
intDat . Items . Remove ( key ) ;
intDat . Items . AddRange ( key , newDatItems ) ;
} ) ;
2020-07-15 09:41:59 -07:00
}
}
2019-01-08 11:49:31 -08:00
/// <summary>
2020-07-15 09:41:59 -07:00
/// Output diffs against a base set represented by the current DAT
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-27 21:40:08 -07:00
/// <param name="intDat">DatFile to replace the values in</param>
2020-08-01 15:03:00 -07:00
/// <param name="useGames">True to diff using games, false to use hashes</param>
2020-08-27 21:40:08 -07:00
public void DiffAgainst ( DatFile intDat , bool useGames )
2019-01-08 11:49:31 -08:00
{
2020-08-01 15:03:00 -07:00
// For comparison's sake, we want to use a base ordering
if ( useGames )
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
2020-08-01 15:03:00 -07:00
else
2020-08-25 11:20:50 -07:00
Items . BucketBy ( Field . DatItem_CRC , DedupeType . None ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 21:40:08 -07:00
Globals . Logger . User ( $"Comparing '{intDat.Header.FileName}' to base DAT" ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 21:40:08 -07:00
// For comparison's sake, we want to a the base bucketing
if ( useGames )
intDat . Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
else
intDat . Items . BucketBy ( Field . DatItem_CRC , DedupeType . Full ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 21:40:08 -07:00
// Then we compare against the base DAT
List < string > keys = intDat . Items . Keys . ToList ( ) ;
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
// Game Against uses game names
2020-08-01 15:03:00 -07:00
if ( useGames )
2019-01-08 11:49:31 -08:00
{
2020-08-27 21:40:08 -07:00
// If the base DAT doesn't contain the key, keep it
if ( ! Items . ContainsKey ( key ) )
return ;
2020-08-01 15:03:00 -07:00
2020-08-27 21:40:08 -07:00
// If the number of items is different, then keep it
if ( Items [ key ] . Count ! = intDat . Items [ key ] . Count )
return ;
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Otherwise, compare by name and hash the remaining files
bool exactMatch = true ;
foreach ( DatItem item in intDat . Items [ key ] )
2020-08-01 15:03:00 -07:00
{
2020-08-27 21:40:08 -07:00
// TODO: Make this granular to name as well
if ( ! Items [ key ] . Contains ( item ) )
2020-08-01 15:03:00 -07:00
{
2020-08-27 21:40:08 -07:00
exactMatch = false ;
break ;
2020-08-01 15:03:00 -07:00
}
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// If we have an exact match, remove the game
if ( exactMatch )
intDat . Items . Remove ( key ) ;
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Standard Against uses hashes
else
{
List < DatItem > datItems = intDat . Items [ key ] ;
List < DatItem > keepDatItems = new List < DatItem > ( ) ;
foreach ( DatItem datItem in datItems )
{
if ( ! Items . HasDuplicates ( datItem , true ) )
keepDatItems . Add ( datItem ) ;
}
2019-01-08 11:49:31 -08:00
2020-08-27 21:40:08 -07:00
// Now add the new list to the key
intDat . Items . Remove ( key ) ;
intDat . Items . AddRange ( key , keepDatItems ) ;
}
} ) ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-07-15 09:41:59 -07:00
/// Output cascading diffs
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-01 21:42:28 -07:00
/// <param name="datHeaders">Dat headers used optionally</param>
2020-08-27 22:19:31 -07:00
/// <returns>List of DatFiles representing the individually indexed items</returns>
public List < DatFile > DiffCascade ( List < DatHeader > datHeaders )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
// Create a list of DatData objects representing output files
List < DatFile > outDats = new List < DatFile > ( ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 22:19:31 -07:00
// Ensure the current DatFile is sorted optimally
Items . BucketBy ( Field . DatItem_CRC , DedupeType . None ) ;
2020-07-15 09:41:59 -07:00
// Loop through each of the inputs and get or create a new DatData object
2020-08-27 22:19:31 -07:00
InternalStopwatch watch = new InternalStopwatch ( "Initializing and filling all output DATs" ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 22:19:31 -07:00
// Create the DatFiles from the set of headers
DatFile [ ] outDatsArray = new DatFile [ datHeaders . Count ] ;
Parallel . For ( 0 , datHeaders . Count , Globals . ParallelOptions , j = >
2019-01-08 11:49:31 -08:00
{
2020-08-27 22:19:31 -07:00
DatFile diffData = Create ( datHeaders [ j ] ) ;
2020-07-26 22:34:45 -07:00
diffData . Items = new ItemDictionary ( ) ;
2020-08-27 22:19:31 -07:00
FillWithSourceIndex ( diffData , j ) ;
2020-07-15 09:41:59 -07:00
outDatsArray [ j ] = diffData ;
2019-01-08 11:49:31 -08:00
} ) ;
2020-07-15 09:41:59 -07:00
outDats = outDatsArray . ToList ( ) ;
watch . Stop ( ) ;
2019-01-08 11:49:31 -08:00
2020-08-27 22:19:31 -07:00
return outDats ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-08-01 21:42:28 -07:00
/// Output duplicate item diff
2019-01-08 11:49:31 -08:00
/// </summary>
2020-07-15 09:41:59 -07:00
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public DatFile DiffDuplicates ( List < string > inputs )
2019-01-08 11:49:31 -08:00
{
2020-08-01 21:42:28 -07:00
List < ParentablePath > paths = inputs . Select ( i = > new ParentablePath ( i ) ) . ToList ( ) ;
2020-08-27 22:27:23 -07:00
return DiffDuplicates ( paths ) ;
2020-08-01 21:42:28 -07:00
}
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
/// <summary>
/// Output duplicate item diff
/// </summary>
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public DatFile DiffDuplicates ( List < ParentablePath > inputs )
2020-08-01 21:42:28 -07:00
{
InternalStopwatch watch = new InternalStopwatch ( "Initializing duplicate DAT" ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// Fill in any information not in the base DAT
2020-07-27 10:26:08 -07:00
if ( string . IsNullOrWhiteSpace ( Header . FileName ) )
Header . FileName = "All DATs" ;
2019-01-08 11:49:31 -08:00
2020-07-27 10:26:08 -07:00
if ( string . IsNullOrWhiteSpace ( Header . Name ) )
Header . Name = "All DATs" ;
2019-01-08 11:49:31 -08:00
2020-07-27 10:26:08 -07:00
if ( string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Description = "All DATs" ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
string post = " (Duplicates)" ;
DatFile dupeData = Create ( Header ) ;
dupeData . Header . FileName + = post ;
dupeData . Header . Name + = post ;
dupeData . Header . Description + = post ;
dupeData . Items = new ItemDictionary ( ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
watch . Stop ( ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
// Now, loop through the dictionary and populate the correct DATs
watch . Start ( "Populating duplicate DAT" ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-08-01 21:42:28 -07:00
List < DatItem > items = DatItem . Merge ( Items [ key ] ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
return ;
// Loop through and add the items correctly
foreach ( DatItem item in items )
2020-07-15 09:41:59 -07:00
{
2020-08-01 21:42:28 -07:00
if ( item . DupeType . HasFlag ( DupeType . External ) )
{
DatItem newrom = item . Clone ( ) as DatItem ;
2020-08-20 13:17:14 -07:00
newrom . Machine . Name + = $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})" ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
dupeData . Items . Add ( key , newrom ) ;
}
}
} ) ;
watch . Stop ( ) ;
2020-08-27 22:27:23 -07:00
return dupeData ;
2020-08-01 21:42:28 -07:00
}
/// <summary>
/// Output non-cascading diffs
/// </summary>
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public List < DatFile > DiffIndividuals ( List < string > inputs )
2020-08-01 21:42:28 -07:00
{
List < ParentablePath > paths = inputs . Select ( i = > new ParentablePath ( i ) ) . ToList ( ) ;
2020-08-27 22:27:23 -07:00
return DiffIndividuals ( paths ) ;
2020-08-01 21:42:28 -07:00
}
/// <summary>
/// Output non-cascading diffs
/// </summary>
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public List < DatFile > DiffIndividuals ( List < ParentablePath > inputs )
2020-08-01 21:42:28 -07:00
{
InternalStopwatch watch = new InternalStopwatch ( "Initializing all individual DATs" ) ;
// Fill in any information not in the base DAT
if ( string . IsNullOrWhiteSpace ( Header . FileName ) )
Header . FileName = "All DATs" ;
if ( string . IsNullOrWhiteSpace ( Header . Name ) )
Header . Name = "All DATs" ;
if ( string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Description = "All DATs" ;
// Loop through each of the inputs and get or create a new DatData object
DatFile [ ] outDatsArray = new DatFile [ inputs . Count ] ;
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , j = >
{
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)" ;
DatFile diffData = Create ( Header ) ;
diffData . Header . FileName + = innerpost ;
diffData . Header . Name + = innerpost ;
diffData . Header . Description + = innerpost ;
diffData . Items = new ItemDictionary ( ) ;
outDatsArray [ j ] = diffData ;
} ) ;
// Create a list of DatData objects representing individual output files
List < DatFile > outDats = outDatsArray . ToList ( ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
watch . Stop ( ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// Now, loop through the dictionary and populate the correct DATs
2020-08-01 21:42:28 -07:00
watch . Start ( "Populating all individual DATs" ) ;
2019-01-08 11:49:31 -08:00
2020-07-26 22:34:45 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-07-26 22:34:45 -07:00
List < DatItem > items = DatItem . Merge ( Items [ key ] ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
return ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// Loop through and add the items correctly
foreach ( DatItem item in items )
2019-01-08 11:49:31 -08:00
{
2020-08-01 21:42:28 -07:00
if ( item . DupeType . HasFlag ( DupeType . Internal ) | | item . DupeType = = 0x00 )
2020-08-20 13:17:14 -07:00
outDats [ item . Source . Index ] . Items . Add ( key , item ) ;
2020-08-01 21:42:28 -07:00
}
} ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
watch . Stop ( ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 22:27:23 -07:00
return outDats . ToList ( ) ;
2020-08-01 21:42:28 -07:00
}
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
/// <summary>
/// Output non-duplicate item diff
/// </summary>
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public DatFile DiffNoDuplicates ( List < string > inputs )
2020-08-01 21:42:28 -07:00
{
List < ParentablePath > paths = inputs . Select ( i = > new ParentablePath ( i ) ) . ToList ( ) ;
2020-08-27 22:27:23 -07:00
return DiffNoDuplicates ( paths ) ;
2020-08-01 21:42:28 -07:00
}
/// <summary>
/// Output non-duplicate item diff
/// </summary>
/// <param name="inputs">List of inputs to write out from</param>
2020-08-27 22:27:23 -07:00
public DatFile DiffNoDuplicates ( List < ParentablePath > inputs )
2020-08-01 21:42:28 -07:00
{
InternalStopwatch watch = new InternalStopwatch ( "Initializing no duplicate DAT" ) ;
// Fill in any information not in the base DAT
if ( string . IsNullOrWhiteSpace ( Header . FileName ) )
Header . FileName = "All DATs" ;
if ( string . IsNullOrWhiteSpace ( Header . Name ) )
Header . Name = "All DATs" ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
if ( string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Description = "All DATs" ;
string post = " (No Duplicates)" ;
DatFile outerDiffData = Create ( Header ) ;
outerDiffData . Header . FileName + = post ;
outerDiffData . Header . Name + = post ;
outerDiffData . Header . Description + = post ;
outerDiffData . Items = new ItemDictionary ( ) ;
watch . Stop ( ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
// Now, loop through the dictionary and populate the correct DATs
watch . Start ( "Populating no duplicate DAT" ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2020-07-15 09:41:59 -07:00
{
2020-08-01 21:42:28 -07:00
List < DatItem > items = DatItem . Merge ( Items [ key ] ) ;
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
return ;
// Loop through and add the items correctly
foreach ( DatItem item in items )
2019-01-08 11:49:31 -08:00
{
2020-08-01 21:42:28 -07:00
if ( item . DupeType . HasFlag ( DupeType . Internal ) | | item . DupeType = = 0x00 )
{
DatItem newrom = item . Clone ( ) as DatItem ;
2020-08-20 13:17:14 -07:00
newrom . Machine . Name + = $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})" ;
2020-08-01 21:42:28 -07:00
outerDiffData . Items . Add ( key , newrom ) ;
}
}
} ) ;
2019-01-08 11:49:31 -08:00
2020-08-01 21:42:28 -07:00
watch . Stop ( ) ;
2020-07-15 09:41:59 -07:00
2020-08-27 22:27:23 -07:00
return outerDiffData ;
2019-01-08 11:49:31 -08:00
}
2020-08-28 10:32:17 -07:00
/// <summary>
/// Fill a DatFile with all items with a particular ItemType
/// </summary>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
public void FillWithItemType ( DatFile indexDat , ItemType itemType )
{
// Loop through and add the items for this index to the output
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = DatItem . Merge ( Items [ key ] ) ;
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
return ;
foreach ( DatItem item in items )
{
if ( item . ItemType = = itemType )
indexDat . Items . Add ( key , item ) ;
}
} ) ;
}
/// <summary>
/// Fill a DatFile with all items with a particular source index ID
/// </summary>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="index">Source index ID to retrieve items for</param>
/// <returns>DatFile containing all items with the source index ID/returns>
public void FillWithSourceIndex ( DatFile indexDat , int index )
{
// Loop through and add the items for this index to the output
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = DatItem . Merge ( Items [ key ] ) ;
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
return ;
foreach ( DatItem item in items )
{
if ( item . Source . Index = = index )
indexDat . Items . Add ( key , item ) ;
}
} ) ;
}
2020-07-27 12:41:36 -07:00
/// <summary>
2020-08-27 21:46:19 -07:00
/// Populate from multiple paths while returning the invividual headers
2020-07-27 12:41:36 -07:00
/// </summary>
/// <param name="inputs">Paths to DATs to parse</param>
2020-08-27 21:46:19 -07:00
/// <returns>List of DatHeader objects representing headers</returns>
public List < DatHeader > PopulateUserData ( List < string > inputs )
2020-08-01 21:42:28 -07:00
{
List < ParentablePath > paths = inputs . Select ( i = > new ParentablePath ( i ) ) . ToList ( ) ;
2020-08-27 21:46:19 -07:00
return PopulateUserData ( paths ) ;
2020-08-01 21:42:28 -07:00
}
/// <summary>
2020-08-27 21:46:19 -07:00
/// Populate from multiple paths while returning the invividual headers
2020-08-01 21:42:28 -07:00
/// </summary>
/// <param name="inputs">Paths to DATs to parse</param>
2020-08-27 21:46:19 -07:00
/// <returns>List of DatHeader objects representing headers</returns>
public List < DatHeader > PopulateUserData ( List < ParentablePath > inputs )
2020-07-27 12:41:36 -07:00
{
DatFile [ ] datFiles = new DatFile [ inputs . Count ] ;
InternalStopwatch watch = new InternalStopwatch ( "Processing individual DATs" ) ;
// Parse all of the DATs into their own DatFiles in the array
Parallel . For ( 0 , inputs . Count , Globals . ParallelOptions , i = >
{
var input = inputs [ i ] ;
Globals . Logger . User ( $"Adding DAT: {input.CurrentPath}" ) ;
datFiles [ i ] = Create ( Header . CloneFiltering ( ) ) ;
datFiles [ i ] . Parse ( input , i , keep : true ) ;
} ) ;
watch . Stop ( ) ;
watch . Start ( "Populating internal DAT" ) ;
for ( int i = 0 ; i < inputs . Count ; i + + )
{
AddFromExisting ( datFiles [ i ] , true ) ;
}
watch . Stop ( ) ;
return datFiles . Select ( d = > d . Header ) . ToList ( ) ;
}
2020-07-15 09:41:59 -07:00
#endregion
2020-08-17 23:28:12 -07:00
#region Filtering
2020-08-28 13:33:05 -07:00
/// <summary>
/// Apply cleaning methods to the DatFile
/// </summary>
/// <param name="cleaner">Cleaner to use</param>
2020-09-15 14:46:39 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2020-08-28 13:33:05 -07:00
/// <returns>True if cleaning was successful, false on error</returns>
2020-09-15 14:46:39 -07:00
public bool ApplyCleaning ( Cleaner cleaner , bool throwOnError = false )
2020-08-28 13:33:05 -07:00
{
2020-08-28 13:45:01 -07:00
try
{
// Perform item-level cleaning
CleanDatItems ( cleaner ) ;
2020-08-28 13:33:05 -07:00
2020-08-30 23:11:05 -07:00
// Bucket and dedupe according to the flag
if ( cleaner ? . DedupeRoms = = DedupeType . Full )
Items . BucketBy ( Field . DatItem_CRC , cleaner . DedupeRoms ) ;
else if ( cleaner ? . DedupeRoms = = DedupeType . Game )
Items . BucketBy ( Field . Machine_Name , cleaner . DedupeRoms ) ;
2020-08-28 13:45:01 -07:00
// Process description to machine name
if ( cleaner ? . DescriptionAsName = = true )
MachineDescriptionToName ( ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// If we are removing scene dates, do that now
if ( cleaner ? . SceneDateStrip = = true )
StripSceneDatesFromItems ( ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// Run the one rom per game logic, if required
if ( cleaner ? . OneGamePerRegion = = true )
OneGamePerRegion ( cleaner . RegionList ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// Run the one rom per game logic, if required
if ( cleaner ? . OneRomPerGame = = true )
OneRomPerGame ( ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// If we are removing fields, do that now
if ( cleaner . ExcludeFields ! = null & & cleaner . ExcludeFields . Any ( ) )
RemoveFieldsFromItems ( cleaner . ExcludeFields ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// Remove all marked items
Items . ClearMarked ( ) ;
2020-08-28 13:33:05 -07:00
2020-08-28 13:45:01 -07:00
// We remove any blanks, if we aren't supposed to have any
if ( cleaner ? . KeepEmptyGames = = false )
Items . ClearEmpty ( ) ;
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2020-08-28 13:45:01 -07:00
return false ;
}
2020-08-28 13:33:05 -07:00
return true ;
}
2020-08-21 10:38:42 -07:00
/// <summary>
/// Apply a set of Extra INIs on the DatFile
/// </summary>
/// <param name="extras">ExtrasIni to use</param>
2020-09-15 14:46:39 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2020-08-21 10:38:42 -07:00
/// <returns>True if the extras were applied, false on error</returns>
2020-09-15 14:46:39 -07:00
public bool ApplyExtras ( ExtraIni extras , bool throwOnError = false )
2020-08-21 10:38:42 -07:00
{
try
{
// Bucket by game first
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
2020-08-21 10:38:42 -07:00
// Create a new set of mappings based on the items
var map = new Dictionary < string , Dictionary < Field , string > > ( ) ;
// Loop through each of the extras
foreach ( ExtraIniItem item in extras . Items )
{
foreach ( var mapping in item . Mappings )
{
string key = mapping . Key ;
List < string > machineNames = mapping . Value ;
// Loop through the machines and add the new mappings
foreach ( string machine in machineNames )
{
if ( ! map . ContainsKey ( machine ) )
map [ machine ] = new Dictionary < Field , string > ( ) ;
map [ machine ] [ item . Field ] = key ;
}
}
}
// Now apply the new set of mappings
foreach ( string key in map . Keys )
{
// If the key doesn't exist, continue
if ( ! Items . ContainsKey ( key ) )
continue ;
List < DatItem > datItems = Items [ key ] ;
var mappings = map [ key ] ;
foreach ( var datItem in datItems )
{
datItem . SetFields ( mappings ) ;
}
}
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2020-08-21 10:38:42 -07:00
return false ;
}
return true ;
}
2020-08-17 23:28:12 -07:00
/// <summary>
/// Apply a Filter on the DatFile
/// </summary>
/// <param name="filter">Filter to use</param>
2020-09-04 20:57:30 -07:00
/// <param name="perMachine">True if entire machines are considered, false otherwise (default)</param>
2020-09-15 14:46:39 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2020-08-17 23:28:12 -07:00
/// <returns>True if the DatFile was filtered, false on error</returns>
2020-09-15 14:46:39 -07:00
public bool ApplyFilter ( Filter filter , bool perMachine = false , bool throwOnError = false )
2020-08-17 23:28:12 -07:00
{
2020-08-28 13:33:05 -07:00
// If we have a null filter, return false
if ( filter = = null )
return false ;
2020-09-04 20:57:30 -07:00
// If we're filtering per machine, bucket by machine first
if ( perMachine )
Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
2020-08-17 23:28:12 -07:00
try
{
// Loop over every key in the dictionary
List < string > keys = Items . Keys . ToList ( ) ;
foreach ( string key in keys )
{
// For every item in the current key
2020-09-04 20:57:30 -07:00
bool machinePass = true ;
2020-08-17 23:28:12 -07:00
List < DatItem > items = Items [ key ] ;
foreach ( DatItem item in items )
{
// If we have a null item, we can't pass it
if ( item = = null )
continue ;
2020-09-04 21:17:06 -07:00
// If the item is already filtered out, we skip
if ( item . Remove )
continue ;
2020-08-28 13:33:05 -07:00
// If the rom doesn't pass the filter, mark for removal
if ( ! item . PassesFilter ( filter ) )
2020-09-04 20:57:30 -07:00
{
2020-08-28 11:44:49 -07:00
item . Remove = true ;
2020-09-04 20:57:30 -07:00
// If we're in machine mode, set and break
if ( perMachine )
{
machinePass = false ;
break ;
}
}
}
// If we didn't pass and we're in machine mode, set all items as remove
if ( perMachine & & ! machinePass )
{
foreach ( DatItem item in items )
{
item . Remove = true ;
}
2020-08-17 23:28:12 -07:00
}
2020-08-28 11:44:49 -07:00
// Assign back for caution
Items [ key ] = items ;
2020-08-17 23:28:12 -07:00
}
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2020-08-17 23:28:12 -07:00
return false ;
}
return true ;
}
2020-08-28 13:45:01 -07:00
/// <summary>
/// Apply splitting on the DatFile
/// </summary>
/// <param name="splitType">Split type to try</param>
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
2020-09-15 14:46:39 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2020-08-28 13:45:01 -07:00
/// <returns>True if the DatFile was split, false on error</returns>
2020-09-15 14:46:39 -07:00
public bool ApplySplitting ( MergingFlag splitType , bool useTags , bool throwOnError = false )
2020-08-28 13:45:01 -07:00
{
try
{
// If we are using tags from the DAT, set the proper input for split type unless overridden
if ( useTags & & splitType = = MergingFlag . None )
splitType = Header . ForceMerging ;
// Run internal splitting
switch ( splitType )
{
case MergingFlag . None :
// No-op
break ;
case MergingFlag . Device :
CreateDeviceNonMergedSets ( DedupeType . None ) ;
break ;
case MergingFlag . Full :
CreateFullyNonMergedSets ( DedupeType . None ) ;
break ;
case MergingFlag . NonMerged :
CreateNonMergedSets ( DedupeType . None ) ;
break ;
case MergingFlag . Merged :
CreateMergedSets ( DedupeType . None ) ;
break ;
case MergingFlag . Split :
CreateSplitSets ( DedupeType . None ) ;
break ;
}
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2020-08-28 13:45:01 -07:00
return false ;
}
return true ;
}
2020-08-27 21:40:08 -07:00
/// <summary>
/// Apply SuperDAT naming logic to a merged DatFile
/// </summary>
/// <param name="inputs">List of inputs to use for renaming</param>
public void ApplySuperDAT ( List < ParentablePath > inputs )
{
2020-08-28 10:48:27 -07:00
List < string > keys = Items . Keys . ToList ( ) ;
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
2020-08-27 21:40:08 -07:00
{
List < DatItem > items = Items [ key ] . ToList ( ) ;
List < DatItem > newItems = new List < DatItem > ( ) ;
foreach ( DatItem item in items )
{
DatItem newItem = item ;
string filename = inputs [ newItem . Source . Index ] . CurrentPath ;
string rootpath = inputs [ newItem . Source . Index ] . ParentPath ;
if ( ! string . IsNullOrWhiteSpace ( rootpath ) )
rootpath + = Path . DirectorySeparatorChar . ToString ( ) ;
filename = filename . Remove ( 0 , rootpath . Length ) ;
newItem . Machine . Name = Path . GetDirectoryName ( filename ) + Path . DirectorySeparatorChar
+ Path . GetFileNameWithoutExtension ( filename ) + Path . DirectorySeparatorChar
+ newItem . Machine . Name ;
newItems . Add ( newItem ) ;
}
Items . Remove ( key ) ;
Items . AddRange ( key , newItems ) ;
} ) ;
}
2020-08-28 13:33:05 -07:00
/// <summary>
/// Clean individual items based on the current filter
/// </summary>
/// <param name="cleaner">Cleaner to use</param>
public void CleanDatItems ( Cleaner cleaner )
{
List < string > keys = Items . Keys . ToList ( ) ;
foreach ( string key in keys )
{
// For every item in the current key
List < DatItem > items = Items [ key ] ;
foreach ( DatItem item in items )
{
// If we have a null item, we can't clean it it
if ( item = = null )
continue ;
2020-09-02 12:19:12 -07:00
// Run cleaning per item
item . Clean ( cleaner ) ;
2020-08-28 13:33:05 -07:00
}
// Assign back for caution
Items [ key ] = items ;
}
}
2020-08-17 23:28:12 -07:00
/// <summary>
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
/// </summary>
2020-09-15 14:46:39 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void MachineDescriptionToName ( bool throwOnError = false )
2020-08-17 23:28:12 -07:00
{
try
{
// First we want to get a mapping for all games to description
ConcurrentDictionary < string , string > mapping = new ConcurrentDictionary < string , string > ( ) ;
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = Items [ key ] ;
foreach ( DatItem item in items )
{
// If the key mapping doesn't exist, add it
2020-08-20 13:17:14 -07:00
mapping . TryAdd ( item . Machine . Name , item . Machine . Description . Replace ( '/' , '_' ) . Replace ( "\"" , "''" ) . Replace ( ":" , " -" ) ) ;
2020-08-17 23:28:12 -07:00
}
} ) ;
// Now we loop through every item and update accordingly
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = Items [ key ] ;
List < DatItem > newItems = new List < DatItem > ( ) ;
foreach ( DatItem item in items )
{
// Update machine name
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( item . Machine . Name ) & & mapping . ContainsKey ( item . Machine . Name ) )
item . Machine . Name = mapping [ item . Machine . Name ] ;
2020-08-17 23:28:12 -07:00
// Update cloneof
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( item . Machine . CloneOf ) & & mapping . ContainsKey ( item . Machine . CloneOf ) )
item . Machine . CloneOf = mapping [ item . Machine . CloneOf ] ;
2020-08-17 23:28:12 -07:00
// Update romof
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( item . Machine . RomOf ) & & mapping . ContainsKey ( item . Machine . RomOf ) )
item . Machine . RomOf = mapping [ item . Machine . RomOf ] ;
2020-08-17 23:28:12 -07:00
// Update sampleof
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( item . Machine . SampleOf ) & & mapping . ContainsKey ( item . Machine . SampleOf ) )
item . Machine . SampleOf = mapping [ item . Machine . SampleOf ] ;
2020-08-17 23:28:12 -07:00
// Add the new item to the output list
newItems . Add ( item ) ;
}
// Replace the old list of roms with the new one
Items . Remove ( key ) ;
Items . AddRange ( key , newItems ) ;
} ) ;
}
catch ( Exception ex )
{
Globals . Logger . Warning ( ex . ToString ( ) ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2020-08-17 23:28:12 -07:00
}
}
/// <summary>
/// Filter a DAT using 1G1R logic given an ordered set of regions
/// </summary>
2020-08-28 13:33:05 -07:00
/// <param name="regions">Ordered list of regions to use</param>
2020-08-17 23:28:12 -07:00
/// <remarks>
/// In the most technical sense, the way that the region list is being used does not
/// confine its values to be just regions. Since it's essentially acting like a
/// specialized version of the machine name filter, anything that is usually encapsulated
/// in parenthesis would be matched on, including disc numbers, languages, editions,
/// and anything else commonly used. Please note that, unlike other existing 1G1R
/// solutions, this does not have the ability to contain custom mappings of parent
/// to clone sets based on name, nor does it have the ability to match on the
/// Release DatItem type.
/// </remarks>
2020-08-28 13:33:05 -07:00
public void OneGamePerRegion ( List < string > regions )
2020-08-17 23:28:12 -07:00
{
2020-08-28 13:33:05 -07:00
// If we have null region list, make it empty
if ( regions = = null )
regions = new List < string > ( ) ;
2020-08-17 23:28:12 -07:00
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . None , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Then we want to get a mapping of all machines to parents
Dictionary < string , List < string > > parents = new Dictionary < string , List < string > > ( ) ;
foreach ( string key in Items . Keys )
{
DatItem item = Items [ key ] [ 0 ] ;
// Match on CloneOf first
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrEmpty ( item . Machine . CloneOf ) )
2020-08-17 23:28:12 -07:00
{
2020-08-20 13:17:14 -07:00
if ( ! parents . ContainsKey ( item . Machine . CloneOf . ToLowerInvariant ( ) ) )
parents . Add ( item . Machine . CloneOf . ToLowerInvariant ( ) , new List < string > ( ) ) ;
2020-08-17 23:28:12 -07:00
2020-08-20 13:17:14 -07:00
parents [ item . Machine . CloneOf . ToLowerInvariant ( ) ] . Add ( item . Machine . Name . ToLowerInvariant ( ) ) ;
2020-08-17 23:28:12 -07:00
}
// Then by RomOf
2020-08-20 13:17:14 -07:00
else if ( ! string . IsNullOrEmpty ( item . Machine . RomOf ) )
2020-08-17 23:28:12 -07:00
{
2020-08-20 13:17:14 -07:00
if ( ! parents . ContainsKey ( item . Machine . RomOf . ToLowerInvariant ( ) ) )
parents . Add ( item . Machine . RomOf . ToLowerInvariant ( ) , new List < string > ( ) ) ;
2020-08-17 23:28:12 -07:00
2020-08-20 13:17:14 -07:00
parents [ item . Machine . RomOf . ToLowerInvariant ( ) ] . Add ( item . Machine . Name . ToLowerInvariant ( ) ) ;
2020-08-17 23:28:12 -07:00
}
// Otherwise, treat it as a parent
else
{
2020-08-20 13:17:14 -07:00
if ( ! parents . ContainsKey ( item . Machine . Name . ToLowerInvariant ( ) ) )
parents . Add ( item . Machine . Name . ToLowerInvariant ( ) , new List < string > ( ) ) ;
2020-08-17 23:28:12 -07:00
2020-08-20 13:17:14 -07:00
parents [ item . Machine . Name . ToLowerInvariant ( ) ] . Add ( item . Machine . Name . ToLowerInvariant ( ) ) ;
2020-08-17 23:28:12 -07:00
}
}
// Once we have the full list of mappings, filter out games to keep
foreach ( string key in parents . Keys )
{
// Find the first machine that matches the regions in order, if possible
string machine = default ;
foreach ( string region in regions )
{
machine = parents [ key ] . FirstOrDefault ( m = > Regex . IsMatch ( m , @"\(.*" + region + @".*\)" , RegexOptions . IgnoreCase ) ) ;
if ( machine ! = default )
break ;
}
// If we didn't get a match, use the parent
if ( machine = = default )
machine = key ;
// Remove the key from the list
parents [ key ] . Remove ( machine ) ;
// Remove the rest of the items from this key
parents [ key ] . ForEach ( k = > Items . Remove ( k ) ) ;
}
// Finally, strip out the parent tags
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Ensure that all roms are in their own game (or at least try to ensure)
/// </summary>
public void OneRomPerGame ( )
{
// Because this introduces subfolders, we need to set the SuperDAT type
Header . Type = "SuperDAT" ;
// For each rom, we want to update the game to be "<game name>/<rom name>"
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = Items [ key ] ;
for ( int i = 0 ; i < items . Count ; i + + )
{
2020-09-02 12:19:12 -07:00
items [ i ] . SetOneRomPerGame ( ) ;
2020-08-17 23:28:12 -07:00
}
} ) ;
}
/// <summary>
/// Remove fields as per the header
/// </summary>
2020-08-28 13:33:05 -07:00
/// <param name="fields">List of fields to use</param>
public void RemoveFieldsFromItems ( List < Field > fields )
2020-08-17 23:28:12 -07:00
{
2020-08-28 13:33:05 -07:00
// If we have null field list, make it empty
if ( fields = = null )
fields = new List < Field > ( ) ;
2020-08-17 23:28:12 -07:00
// Output the logging statement
Globals . Logger . User ( "Removing filtered fields" ) ;
// Now process all of the roms
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = Items [ key ] ;
for ( int j = 0 ; j < items . Count ; j + + )
{
2020-08-17 23:45:23 -07:00
items [ j ] . RemoveFields ( fields ) ;
2020-08-17 23:28:12 -07:00
}
Items . Remove ( key ) ;
Items . AddRange ( key , items ) ;
} ) ;
}
/// <summary>
/// Strip the dates from the beginning of scene-style set names
/// </summary>
public void StripSceneDatesFromItems ( )
{
// Output the logging statement
Globals . Logger . User ( "Stripping scene-style dates" ) ;
// Set the regex pattern to use
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)" ;
// Now process all of the roms
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
{
List < DatItem > items = Items [ key ] ;
for ( int j = 0 ; j < items . Count ; j + + )
{
DatItem item = items [ j ] ;
2020-08-20 13:17:14 -07:00
if ( Regex . IsMatch ( item . Machine . Name , pattern ) )
item . Machine . Name = Regex . Replace ( item . Machine . Name , pattern , "$2" ) ;
2020-08-17 23:28:12 -07:00
2020-08-20 13:17:14 -07:00
if ( Regex . IsMatch ( item . Machine . Description , pattern ) )
item . Machine . Description = Regex . Replace ( item . Machine . Description , pattern , "$2" ) ;
2020-08-17 23:28:12 -07:00
items [ j ] = item ;
}
Items . Remove ( key ) ;
Items . AddRange ( key , items ) ;
} ) ;
}
#endregion
2020-08-27 22:53:21 -07:00
// TODO: Should any of these create a new DatFile in the process?
// The reason this comes up is that doing any of the splits or merges
// is an inherently destructive process. Making it output a new DatFile
// might make it easier to deal with multiple internal steps. On the other
// hand, this will increase memory usage significantly and would force the
// existing paths to behave entirely differently
2020-08-17 23:28:12 -07:00
#region Internal Splitting / Merging
/// <summary>
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
private void CreateDeviceNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating device non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , mergeroms , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Now we want to loop through all of the games and set the correct information
while ( AddRomsFromDevices ( false , false ) ) ;
while ( AddRomsFromDevices ( true , false ) ) ;
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
private void CreateFullyNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating fully non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , mergeroms , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Now we want to loop through all of the games and set the correct information
while ( AddRomsFromDevices ( true , true ) ) ;
AddRomsFromDevices ( false , true ) ;
AddRomsFromParent ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
AddRomsFromBios ( ) ;
// Then, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof tags to create merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
private void CreateMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , mergeroms , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Now we want to loop through all of the games and set the correct information
AddRomsFromChildren ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( false ) ;
RemoveBiosRomsFromChild ( true ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof tags to create non-merged sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
private void CreateNonMergedSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating non-merged sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , mergeroms , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Now we want to loop through all of the games and set the correct information
AddRomsFromParent ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( false ) ;
RemoveBiosRomsFromChild ( true ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use cloneof and romof tags to create split sets and remove the tags
/// </summary>
/// <param name="mergeroms">Dedupe type to be used</param>
private void CreateSplitSets ( DedupeType mergeroms )
{
Globals . Logger . User ( "Creating split sets from the DAT" ) ;
// For sake of ease, the first thing we want to do is bucket by game
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , mergeroms , norename : true ) ;
2020-08-17 23:28:12 -07:00
// Now we want to loop through all of the games and set the correct information
RemoveRomsFromChild ( ) ;
// Now that we have looped through the cloneof tags, we loop through the romof tags
RemoveBiosRomsFromChild ( false ) ;
RemoveBiosRomsFromChild ( true ) ;
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
RemoveTagsFromChild ( ) ;
}
/// <summary>
/// Use romof tags to add roms to the children
/// </summary>
private void AddRomsFromBios ( )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( Items [ game ] . Count = = 0 )
continue ;
// Determine if the game has a parent or not
string parent = null ;
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( Items [ game ] [ 0 ] . Machine . RomOf ) )
parent = Items [ game ] [ 0 ] . Machine . RomOf ;
2020-08-17 23:28:12 -07:00
// If the parent doesnt exist, we want to continue
if ( string . IsNullOrWhiteSpace ( parent ) )
continue ;
// If the parent doesn't have any items, we want to continue
if ( Items [ parent ] . Count = = 0 )
continue ;
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = Items [ game ] [ 0 ] ;
List < DatItem > parentItems = Items [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
2020-09-02 12:19:12 -07:00
if ( Items [ game ] . Where ( i = > i . GetName ( ) = = datItem . GetName ( ) ) . Count ( ) = = 0 & & ! Items [ game ] . Contains ( datItem ) )
2020-08-17 23:28:12 -07:00
Items . Add ( game , datItem ) ;
}
}
}
/// <summary>
/// Use device_ref and optionally slotoption tags to add roms to the children
/// </summary>
/// <param name="dev">True if only child device sets are touched, false for non-device sets (default)</param>
2020-09-03 20:58:10 -07:00
/// <param name="useSlotOptions">True if slotoptions tags are used as well, false otherwise</param>
private bool AddRomsFromDevices ( bool dev = false , bool useSlotOptions = false )
2020-08-17 23:28:12 -07:00
{
bool foundnew = false ;
2020-09-03 20:58:10 -07:00
List < string > machines = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string machine in machines )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// If the machine doesn't have items, we continue
if ( Items [ machine ] = = null | | Items [ machine ] . Count = = 0 )
2020-08-17 23:28:12 -07:00
continue ;
2020-09-03 20:58:10 -07:00
// If the machine (is/is not) a device, we want to continue
if ( dev ^ ( Items [ machine ] [ 0 ] . Machine . MachineType . HasFlag ( MachineType . Device ) ) )
2020-08-17 23:28:12 -07:00
continue ;
2020-09-03 20:58:10 -07:00
// Get all device reference names from the current machine
List < string > deviceReferences = Items [ machine ]
. Where ( i = > i . ItemType = = ItemType . DeviceReference )
. Select ( i = > i as DeviceReference )
. Select ( dr = > dr . Name )
. Distinct ( )
. ToList ( ) ;
// Get all slot option names from the current machine
List < string > slotOptions = Items [ machine ]
. Where ( i = > i . ItemType = = ItemType . Slot )
. Select ( i = > i as Slot )
2020-09-30 13:25:40 -07:00
. Where ( s = > s . SlotOptionsSpecified )
2020-09-03 20:58:10 -07:00
. SelectMany ( s = > s . SlotOptions )
. Select ( so = > so . DeviceName )
. Distinct ( )
. ToList ( ) ;
// If we're checking device references
if ( deviceReferences . Any ( ) )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// Loop through all names and check the corresponding machines
List < string > newDeviceReferences = new List < string > ( ) ;
2020-08-31 15:33:05 -07:00
foreach ( string deviceReference in deviceReferences )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// If the machine doesn't exist then we continue
2020-08-31 15:33:05 -07:00
if ( Items [ deviceReference ] = = null | | Items [ deviceReference ] . Count = = 0 )
continue ;
2020-09-03 20:58:10 -07:00
// Add to the list of new device reference names
2020-08-31 15:33:05 -07:00
List < DatItem > devItems = Items [ deviceReference ] ;
2020-09-03 20:58:10 -07:00
newDeviceReferences . AddRange ( devItems
2020-08-31 23:01:51 -07:00
. Where ( i = > i . ItemType = = ItemType . DeviceReference )
2020-09-02 12:19:12 -07:00
. Select ( i = > ( i as DeviceReference ) . Name ) ) ;
2020-08-31 23:01:51 -07:00
2020-09-03 20:58:10 -07:00
// Set new machine information and add to the current machine
DatItem copyFrom = Items [ machine ] [ 0 ] ;
2020-08-31 15:33:05 -07:00
foreach ( DatItem item in devItems )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// If the parent machine doesn't already contain this item, add it
if ( ! Items [ machine ] . Any ( i = > i . ItemType = = item . ItemType & & i . GetName ( ) = = item . GetName ( ) ) )
2020-08-31 15:33:05 -07:00
{
2020-09-03 20:58:10 -07:00
// Set that we found new items
2020-08-31 15:33:05 -07:00
foundnew = true ;
2020-09-03 20:58:10 -07:00
// Clone the item and then add it
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
Items . Add ( machine , datItem ) ;
2020-08-31 15:33:05 -07:00
}
2020-08-17 23:28:12 -07:00
}
}
2020-09-03 20:58:10 -07:00
// Now that every device reference is accounted for, add the new list of device references, if they don't already exist
foreach ( string deviceReference in newDeviceReferences . Distinct ( ) )
2020-08-31 15:33:05 -07:00
{
2020-09-03 20:58:10 -07:00
if ( ! deviceReferences . Contains ( deviceReference ) )
Items [ machine ] . Add ( new DeviceReference ( ) { Name = deviceReference } ) ;
2020-08-31 15:33:05 -07:00
}
2020-08-17 23:28:12 -07:00
}
2020-08-31 15:33:05 -07:00
// If we're checking slotoptions
2020-09-03 20:58:10 -07:00
if ( useSlotOptions & & slotOptions . Any ( ) )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// Loop through all names and check the corresponding machines
2020-08-31 15:33:05 -07:00
List < string > newSlotOptions = new List < string > ( ) ;
foreach ( string slotOption in slotOptions )
{
2020-09-03 20:58:10 -07:00
// If the machine doesn't exist then we continue
2020-08-31 15:33:05 -07:00
if ( Items [ slotOption ] = = null | | Items [ slotOption ] . Count = = 0 )
continue ;
2020-09-03 20:58:10 -07:00
// Add to the list of new slot option names
List < DatItem > slotItems = Items [ slotOption ] ;
newSlotOptions . AddRange ( slotItems
2020-09-01 16:21:55 -07:00
. Where ( i = > i . ItemType = = ItemType . Slot )
2020-09-30 13:25:40 -07:00
. Where ( s = > ( s as Slot ) . SlotOptionsSpecified )
2020-09-01 16:21:55 -07:00
. SelectMany ( s = > ( s as Slot ) . SlotOptions )
. Select ( o = > o . DeviceName ) ) ;
2020-09-03 20:58:10 -07:00
// Set new machine information and add to the current machine
DatItem copyFrom = Items [ machine ] [ 0 ] ;
foreach ( DatItem item in slotItems )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// If the parent machine doesn't already contain this item, add it
if ( ! Items [ machine ] . Any ( i = > i . ItemType = = item . ItemType & & i . GetName ( ) = = item . GetName ( ) ) )
2020-08-17 23:28:12 -07:00
{
2020-09-03 20:58:10 -07:00
// Set that we found new items
2020-08-31 15:33:05 -07:00
foundnew = true ;
2020-09-03 20:58:10 -07:00
// Clone the item and then add it
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
Items . Add ( machine , datItem ) ;
2020-08-17 23:28:12 -07:00
}
}
}
2020-09-03 20:58:10 -07:00
// Now that every device is accounted for, add the new list of slot options, if they don't already exist
foreach ( string slotOption in newSlotOptions . Distinct ( ) )
2020-08-17 23:28:12 -07:00
{
2020-08-31 15:33:05 -07:00
if ( ! slotOptions . Contains ( slotOption ) )
2020-09-03 20:58:10 -07:00
Items [ machine ] . Add ( new Slot ( ) { SlotOptions = new List < SlotOption > { new SlotOption { DeviceName = slotOption } } } ) ;
2020-08-17 23:28:12 -07:00
}
}
}
return foundnew ;
}
/// <summary>
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
/// </summary>
private void AddRomsFromParent ( )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( Items [ game ] . Count = = 0 )
continue ;
// Determine if the game has a parent or not
string parent = null ;
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( Items [ game ] [ 0 ] . Machine . CloneOf ) )
parent = Items [ game ] [ 0 ] . Machine . CloneOf ;
2020-08-17 23:28:12 -07:00
// If the parent doesnt exist, we want to continue
if ( string . IsNullOrWhiteSpace ( parent ) )
continue ;
// If the parent doesn't have any items, we want to continue
if ( Items [ parent ] . Count = = 0 )
continue ;
// If the parent exists and has items, we copy the items from the parent to the current game
DatItem copyFrom = Items [ game ] [ 0 ] ;
List < DatItem > parentItems = Items [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
datItem . CopyMachineInformation ( copyFrom ) ;
2020-09-02 12:19:12 -07:00
if ( Items [ game ] . Where ( i = > i . GetName ( ) ? . ToLowerInvariant ( ) = = datItem . GetName ( ) ? . ToLowerInvariant ( ) ) . Count ( ) = = 0
2020-08-17 23:28:12 -07:00
& & ! Items [ game ] . Contains ( datItem ) )
{
Items . Add ( game , datItem ) ;
}
}
// Now we want to get the parent romof tag and put it in each of the items
List < DatItem > items = Items [ game ] ;
2020-08-20 13:17:14 -07:00
string romof = Items [ parent ] [ 0 ] . Machine . RomOf ;
2020-08-17 23:28:12 -07:00
foreach ( DatItem item in items )
{
2020-08-20 13:17:14 -07:00
item . Machine . RomOf = romof ;
2020-08-17 23:28:12 -07:00
}
}
}
/// <summary>
/// Use cloneof tags to add roms to the parents, removing the child sets in the process
/// </summary>
/// <param name="subfolder">True to add DatItems to subfolder of parent (not including Disk), false otherwise</param>
private void AddRomsFromChildren ( bool subfolder = true )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( Items [ game ] . Count = = 0 )
continue ;
// Determine if the game has a parent or not
string parent = null ;
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( Items [ game ] [ 0 ] . Machine . CloneOf ) )
parent = Items [ game ] [ 0 ] . Machine . CloneOf ;
2020-08-17 23:28:12 -07:00
// If there is no parent, then we continue
if ( string . IsNullOrWhiteSpace ( parent ) )
continue ;
// Otherwise, move the items from the current game to a subfolder of the parent game
2020-08-20 13:17:14 -07:00
DatItem copyFrom ;
if ( Items [ parent ] . Count = = 0 )
{
copyFrom = new Rom ( ) ;
copyFrom . Machine . Name = parent ;
copyFrom . Machine . Description = parent ;
}
else
{
copyFrom = Items [ parent ] [ 0 ] ;
}
2020-08-17 23:28:12 -07:00
List < DatItem > items = Items [ game ] ;
foreach ( DatItem item in items )
{
// Special disk handling
if ( item . ItemType = = ItemType . Disk )
{
Disk disk = item as Disk ;
// If the merge tag exists and the parent already contains it, skip
2020-09-02 12:19:12 -07:00
if ( disk . MergeTag ! = null & & Items [ parent ] . Where ( i = > i . ItemType = = ItemType . Disk ) . Select ( i = > ( i as Disk ) . Name ) . Contains ( disk . MergeTag ) )
2020-08-17 23:28:12 -07:00
{
continue ;
}
// If the merge tag exists but the parent doesn't contain it, add to parent
2020-09-02 12:19:12 -07:00
else if ( disk . MergeTag ! = null & & ! Items [ parent ] . Where ( i = > i . ItemType = = ItemType . Disk ) . Select ( i = > ( i as Disk ) . Name ) . Contains ( disk . MergeTag ) )
2020-08-17 23:28:12 -07:00
{
2020-09-02 12:19:12 -07:00
disk . CopyMachineInformation ( copyFrom ) ;
Items . Add ( parent , disk ) ;
2020-08-17 23:28:12 -07:00
}
// If there is no merge tag, add to parent
else if ( disk . MergeTag = = null )
{
2020-09-02 12:19:12 -07:00
disk . CopyMachineInformation ( copyFrom ) ;
Items . Add ( parent , disk ) ;
2020-08-17 23:28:12 -07:00
}
}
// Special rom handling
else if ( item . ItemType = = ItemType . Rom )
{
Rom rom = item as Rom ;
// If the merge tag exists and the parent already contains it, skip
2020-09-02 12:19:12 -07:00
if ( rom . MergeTag ! = null & & Items [ parent ] . Where ( i = > i . ItemType = = ItemType . Rom ) . Select ( i = > ( i as Rom ) . Name ) . Contains ( rom . MergeTag ) )
2020-08-17 23:28:12 -07:00
{
continue ;
}
// If the merge tag exists but the parent doesn't contain it, add to subfolder of parent
2020-09-02 12:19:12 -07:00
else if ( rom . MergeTag ! = null & & ! Items [ parent ] . Where ( i = > i . ItemType = = ItemType . Rom ) . Select ( i = > ( i as Rom ) . Name ) . Contains ( rom . MergeTag ) )
2020-08-17 23:28:12 -07:00
{
if ( subfolder )
2020-09-02 12:19:12 -07:00
rom . Name = $"{rom.Machine.Name}\\{rom.Name}" ;
2020-08-17 23:28:12 -07:00
2020-09-02 12:19:12 -07:00
rom . CopyMachineInformation ( copyFrom ) ;
Items . Add ( parent , rom ) ;
2020-08-17 23:28:12 -07:00
}
// If the parent doesn't already contain this item, add to subfolder of parent
else if ( ! Items [ parent ] . Contains ( item ) )
{
if ( subfolder )
2020-09-02 12:19:12 -07:00
rom . Name = $"{item.Machine.Name}\\{rom.Name}" ;
2020-08-17 23:28:12 -07:00
2020-09-02 12:19:12 -07:00
rom . CopyMachineInformation ( copyFrom ) ;
Items . Add ( parent , rom ) ;
2020-08-17 23:28:12 -07:00
}
}
// All other that would be missing to subfolder of parent
else if ( ! Items [ parent ] . Contains ( item ) )
{
if ( subfolder )
2020-09-02 12:19:12 -07:00
item . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = $"{item.Machine.Name}\\{item.GetName()}" } ) ;
2020-08-17 23:28:12 -07:00
item . CopyMachineInformation ( copyFrom ) ;
Items . Add ( parent , item ) ;
}
}
// Then, remove the old game so it's not picked up by the writer
Items . Remove ( game ) ;
}
}
/// <summary>
/// Remove all BIOS and device sets
/// </summary>
private void RemoveBiosAndDeviceSets ( )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
if ( Items [ game ] . Count > 0
2020-08-20 13:17:14 -07:00
& & ( Items [ game ] [ 0 ] . Machine . MachineType . HasFlag ( MachineType . Bios )
| | Items [ game ] [ 0 ] . Machine . MachineType . HasFlag ( MachineType . Device ) ) )
2020-08-17 23:28:12 -07:00
{
Items . Remove ( game ) ;
}
}
}
/// <summary>
/// Use romof tags to remove bios roms from children
/// </summary>
/// <param name="bios">True if only child Bios sets are touched, false for non-bios sets (default)</param>
private void RemoveBiosRomsFromChild ( bool bios = false )
{
// Loop through the romof tags
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( Items [ game ] . Count = = 0 )
continue ;
// If the game (is/is not) a bios, we want to continue
2020-08-20 13:17:14 -07:00
if ( bios ^ Items [ game ] [ 0 ] . Machine . MachineType . HasFlag ( MachineType . Bios ) )
2020-08-17 23:28:12 -07:00
continue ;
// Determine if the game has a parent or not
string parent = null ;
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( Items [ game ] [ 0 ] . Machine . RomOf ) )
parent = Items [ game ] [ 0 ] . Machine . RomOf ;
2020-08-17 23:28:12 -07:00
// If the parent doesnt exist, we want to continue
if ( string . IsNullOrWhiteSpace ( parent ) )
continue ;
// If the parent doesn't have any items, we want to continue
if ( Items [ parent ] . Count = = 0 )
continue ;
// If the parent exists and has items, we remove the items that are in the parent from the current game
List < DatItem > parentItems = Items [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
while ( Items [ game ] . Contains ( datItem ) )
{
Items . Remove ( game , datItem ) ;
}
}
}
}
/// <summary>
/// Use cloneof tags to remove roms from the children
/// </summary>
private void RemoveRomsFromChild ( )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
// If the game has no items in it, we want to continue
if ( Items [ game ] . Count = = 0 )
continue ;
// Determine if the game has a parent or not
string parent = null ;
2020-08-20 13:17:14 -07:00
if ( ! string . IsNullOrWhiteSpace ( Items [ game ] [ 0 ] . Machine . CloneOf ) )
parent = Items [ game ] [ 0 ] . Machine . CloneOf ;
2020-08-17 23:28:12 -07:00
// If the parent doesnt exist, we want to continue
if ( string . IsNullOrWhiteSpace ( parent ) )
continue ;
// If the parent doesn't have any items, we want to continue
if ( Items [ parent ] . Count = = 0 )
continue ;
// If the parent exists and has items, we remove the parent items from the current game
List < DatItem > parentItems = Items [ parent ] ;
foreach ( DatItem item in parentItems )
{
DatItem datItem = ( DatItem ) item . Clone ( ) ;
while ( Items [ game ] . Contains ( datItem ) )
{
Items . Remove ( game , datItem ) ;
}
}
// Now we want to get the parent romof tag and put it in each of the remaining items
List < DatItem > items = Items [ game ] ;
2020-08-20 13:17:14 -07:00
string romof = Items [ parent ] [ 0 ] . Machine . RomOf ;
2020-08-17 23:28:12 -07:00
foreach ( DatItem item in items )
{
2020-08-20 13:17:14 -07:00
item . Machine . RomOf = romof ;
2020-08-17 23:28:12 -07:00
}
}
}
/// <summary>
/// Remove all romof and cloneof tags from all games
/// </summary>
private void RemoveTagsFromChild ( )
{
List < string > games = Items . Keys . OrderBy ( g = > g ) . ToList ( ) ;
foreach ( string game in games )
{
List < DatItem > items = Items [ game ] ;
foreach ( DatItem item in items )
{
2020-08-20 13:17:14 -07:00
item . Machine . CloneOf = null ;
item . Machine . RomOf = null ;
item . Machine . SampleOf = null ;
2020-08-17 23:28:12 -07:00
}
}
}
#endregion
2019-01-08 11:49:31 -08:00
#region Parsing
/// <summary>
2020-07-15 09:41:59 -07:00
/// Create a DatFile and parse a file into it
2019-01-08 11:49:31 -08:00
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
2020-09-21 13:04:11 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public static DatFile CreateAndParse ( string filename , bool throwOnError = false )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
DatFile datFile = Create ( ) ;
2020-09-21 13:04:11 -07:00
datFile . Parse ( new ParentablePath ( filename ) , throwOnError : throwOnError ) ;
2020-07-15 09:41:59 -07:00
return datFile ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-07-27 15:21:59 -07:00
/// Parse a DAT and return all found games and roms within
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="indexId">Index ID for the DAT</param>
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
2020-09-20 21:12:57 -07:00
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
2020-09-15 14:23:40 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void Parse (
string filename ,
int indexId = 0 ,
bool keep = false ,
bool keepext = false ,
2020-09-20 21:12:57 -07:00
bool quotes = true ,
2020-09-15 14:23:40 -07:00
bool throwOnError = false )
2020-07-27 15:21:59 -07:00
{
2020-08-26 17:11:24 -07:00
ParentablePath path = new ParentablePath ( filename . Trim ( '"' ) ) ;
2020-09-20 21:12:57 -07:00
Parse ( path , indexId , keep , keepext , quotes , throwOnError ) ;
2020-07-27 15:21:59 -07:00
}
/// <summary>
2019-01-08 11:49:31 -08:00
/// Parse a DAT and return all found games and roms within
/// </summary>
2020-09-21 13:04:11 -07:00
/// <param name="input">Name of the file to be parsed</param>
2020-07-15 09:41:59 -07:00
/// <param name="indexId">Index ID for the DAT</param>
2019-01-08 11:49:31 -08:00
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
2020-09-20 21:12:57 -07:00
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
2020-09-15 14:23:40 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
public void Parse (
2020-09-21 13:04:11 -07:00
ParentablePath input ,
2020-09-15 14:23:40 -07:00
int indexId = 0 ,
bool keep = false ,
bool keepext = false ,
2020-09-20 21:12:57 -07:00
bool quotes = true ,
2020-09-21 13:04:11 -07:00
bool throwOnError = true )
2019-01-08 11:49:31 -08:00
{
2020-07-26 23:39:33 -07:00
// Get the current path from the filename
2020-09-21 13:04:11 -07:00
string currentPath = input . CurrentPath ;
2019-01-08 11:49:31 -08:00
// Check the file extension first as a safeguard
2020-07-26 23:39:33 -07:00
if ( ! PathExtensions . HasValidDatExtension ( currentPath ) )
2019-01-08 11:49:31 -08:00
return ;
// If the output filename isn't set already, get the internal filename
2020-07-27 10:26:08 -07:00
Header . FileName = ( string . IsNullOrWhiteSpace ( Header . FileName ) ? ( keepext ? Path . GetFileName ( currentPath ) : Path . GetFileNameWithoutExtension ( currentPath ) ) : Header . FileName ) ;
2019-01-08 11:49:31 -08:00
// If the output type isn't set already, get the internal output type
2020-07-27 10:26:08 -07:00
Header . DatFormat = ( Header . DatFormat = = 0 ? currentPath . GetDatFormat ( ) : Header . DatFormat ) ;
2020-08-25 11:20:50 -07:00
Items . SetBucketedBy ( Field . DatItem_CRC ) ; // Setting this because it can reduce issues later
2019-01-08 11:49:31 -08:00
// Now parse the correct type of DAT
try
{
2020-09-20 21:12:57 -07:00
Create ( currentPath . GetDatFormat ( ) , this , quotes ) ? . ParseFile ( currentPath , indexId , keep , throwOnError ) ;
2019-01-08 11:49:31 -08:00
}
catch ( Exception ex )
{
2020-09-21 13:04:11 -07:00
Globals . Logger . Error ( ex , $"Error with file '{currentPath}'" ) ;
2020-09-15 14:23:40 -07:00
if ( throwOnError ) throw ex ;
2019-01-08 11:49:31 -08:00
}
}
/// <summary>
/// Add a rom to the Dat after checking
/// </summary>
/// <param name="item">Item data to check against</param>
/// <returns>The key for the item</returns>
2020-07-15 09:41:59 -07:00
protected string ParseAddHelper ( DatItem item )
2019-01-08 11:49:31 -08:00
{
2020-06-10 22:37:19 -07:00
string key = string . Empty ;
2019-01-08 11:49:31 -08:00
2020-08-27 16:57:22 -07:00
// If we have a Disk, Media, or Rom, clean the hash data
if ( item . ItemType = = ItemType . Disk )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
Disk disk = item as Disk ;
// If the file has aboslutely no hashes, skip and log
if ( disk . ItemStatus ! = ItemStatus . Nodump
& & string . IsNullOrWhiteSpace ( disk . MD5 )
& & string . IsNullOrWhiteSpace ( disk . SHA1 ) )
{
Globals . Logger . Verbose ( $"Incomplete entry for '{disk.Name}' will be output as nodump" ) ;
disk . ItemStatus = ItemStatus . Nodump ;
}
item = disk ;
}
else if ( item . ItemType = = ItemType . Rom )
{
Rom rom = item as Rom ;
2019-01-08 11:49:31 -08:00
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
2020-09-17 23:37:42 -07:00
if ( rom . Size = = null & & ! rom . HasHashes ( ) )
2019-01-08 11:49:31 -08:00
{
// No-op, just catch it so it doesn't go further
2020-08-27 16:57:22 -07:00
Globals . Logger . Verbose ( $"{Header.FileName}: Entry with only SHA-1 found - '{rom.Name}'" ) ;
2019-01-08 11:49:31 -08:00
}
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
2020-09-04 23:03:27 -07:00
else if ( ( rom . Size = = 0 | | rom . Size = = null )
2020-09-17 23:37:42 -07:00
& & ( string . IsNullOrWhiteSpace ( rom . CRC ) | | rom . HasZeroHash ( ) ) )
2019-01-08 11:49:31 -08:00
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2020-08-27 16:57:22 -07:00
rom . Size = Constants . SizeZero ;
rom . CRC = Constants . CRCZero ;
rom . MD5 = Constants . MD5Zero ;
2020-07-15 09:41:59 -07:00
#if NET_FRAMEWORK
2020-08-27 16:57:22 -07:00
rom . RIPEMD160 = null ; // Constants.RIPEMD160Zero;
2020-07-15 09:41:59 -07:00
#endif
2020-08-27 16:57:22 -07:00
rom . SHA1 = Constants . SHA1Zero ;
rom . SHA256 = null ; // Constants.SHA256Zero;
rom . SHA384 = null ; // Constants.SHA384Zero;
rom . SHA512 = null ; // Constants.SHA512Zero;
2020-09-17 23:37:42 -07:00
rom . SpamSum = null ; // Constants.SpamSumZero;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
2019-01-08 11:49:31 -08:00
// If the file has no size and it's not the above case, skip and log
2020-09-04 23:03:27 -07:00
else if ( rom . ItemStatus ! = ItemStatus . Nodump & & ( rom . Size = = 0 | | rom . Size = = null ) )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
Globals . Logger . Verbose ( $"{Header.FileName}: Incomplete entry for '{rom.Name}' will be output as nodump" ) ;
rom . ItemStatus = ItemStatus . Nodump ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
// If the file has a size but aboslutely no hashes, skip and log
else if ( rom . ItemStatus ! = ItemStatus . Nodump
2020-09-17 23:37:42 -07:00
& & rom . Size ! = null & & rom . Size > 0
& & ! rom . HasHashes ( ) )
2020-06-10 22:37:19 -07:00
{
2020-08-27 16:57:22 -07:00
Globals . Logger . Verbose ( $"{Header.FileName}: Incomplete entry for '{rom.Name}' will be output as nodump" ) ;
rom . ItemStatus = ItemStatus . Nodump ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
item = rom ;
2019-01-08 11:49:31 -08:00
}
// Get the key and add the file
2020-09-02 00:24:46 -07:00
key = item . GetKey ( Field . Machine_Name ) ;
2020-07-26 22:34:45 -07:00
Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
return key ;
}
/// <summary>
/// Parse DatFile and return all found games and roms within
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
2020-07-15 09:41:59 -07:00
/// <param name="indexId">Index ID for the DAT</param>
2019-01-08 11:49:31 -08:00
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
2020-09-15 14:23:40 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
protected abstract void ParseFile ( string filename , int indexId , bool keep , bool throwOnError = false ) ;
2019-01-08 11:49:31 -08:00
#endregion
2020-08-28 16:45:27 -07:00
// TODO: See if any of the methods can be broken up a bit more neatly
2019-01-08 11:49:31 -08:00
#region Populate DAT from Directory
/// <summary>
/// Create a new Dat from a directory
/// </summary>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
2020-08-02 13:23:47 -07:00
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
2019-01-08 11:49:31 -08:00
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
2020-10-05 17:43:44 -07:00
/// <param name="hashes">Hashes to include in the information</param>
2020-07-27 11:21:32 -07:00
public bool PopulateFromDir (
string basePath ,
2020-09-18 00:45:08 -07:00
TreatAsFile asFiles = 0x00 ,
2020-08-02 13:34:35 -07:00
SkipFileType skipFileType = SkipFileType . None ,
bool addBlanks = false ,
2020-10-05 17:43:44 -07:00
Hash hashes = Hash . Standard )
2019-01-08 11:49:31 -08:00
{
// Clean the temp directory path
2020-08-02 13:08:33 -07:00
Globals . TempDir = DirectoryExtensions . Ensure ( Globals . TempDir , temp : true ) ;
2019-01-08 11:49:31 -08:00
// Process the input
if ( Directory . Exists ( basePath ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Verbose ( $"Folder found: {basePath}" ) ;
2019-01-08 11:49:31 -08:00
// Process the files in the main folder or any subfolder
List < string > files = Directory . EnumerateFiles ( basePath , "*" , SearchOption . AllDirectories ) . ToList ( ) ;
Parallel . ForEach ( files , Globals . ParallelOptions , item = >
{
2020-10-05 17:43:44 -07:00
CheckFileForHashes ( item , basePath , asFiles , skipFileType , addBlanks , hashes ) ;
2019-01-08 11:49:31 -08:00
} ) ;
// Now find all folders that are empty, if we are supposed to
2020-09-18 12:15:09 -07:00
if ( addBlanks )
2020-09-18 15:18:12 -07:00
ProcessDirectoryBlanks ( basePath ) ;
2019-01-08 11:49:31 -08:00
}
else if ( File . Exists ( basePath ) )
{
2020-09-18 12:15:09 -07:00
string parentPath = Path . GetDirectoryName ( Path . GetDirectoryName ( basePath ) ) ;
2020-10-05 17:43:44 -07:00
CheckFileForHashes ( basePath , parentPath , asFiles , skipFileType , addBlanks , hashes ) ;
2019-01-08 11:49:31 -08:00
}
// Now that we're done, delete the temp folder (if it's not the default)
Globals . Logger . User ( "Cleaning temp folder" ) ;
2020-08-02 13:08:33 -07:00
if ( Globals . TempDir ! = Path . GetTempPath ( ) )
DirectoryExtensions . TryDelete ( Globals . TempDir ) ;
2019-01-08 11:49:31 -08:00
return true ;
}
/// <summary>
/// Check a given file for hashes, based on current settings
/// </summary>
/// <param name="item">Filename of the item to be checked</param>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
2020-08-02 13:23:47 -07:00
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
2019-01-08 11:49:31 -08:00
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
2020-10-05 17:43:44 -07:00
/// <param name="hashes">Hashes to include in the information</param>
private void CheckFileForHashes ( string item , string basePath , TreatAsFile asFiles , SkipFileType skipFileType , bool addBlanks , Hash hashes )
2019-01-08 11:49:31 -08:00
{
2020-08-28 16:45:27 -07:00
// If we're in depot mode, process it separately
if ( CheckDepotFile ( item ) )
2019-01-08 11:49:31 -08:00
return ;
// Initialize possible archive variables
2020-10-05 17:43:44 -07:00
BaseArchive archive = BaseArchive . Create ( item ) ;
2019-01-08 11:49:31 -08:00
2020-09-18 12:15:09 -07:00
// Process archives according to flags
if ( archive ! = null )
2019-01-08 11:49:31 -08:00
{
2020-10-05 17:43:44 -07:00
// Set the archive flags
archive . AvailableHashes = hashes ;
2020-09-18 12:15:09 -07:00
// Skip if we're treating archives as files and skipping files
if ( asFiles . HasFlag ( TreatAsFile . Archive ) & & skipFileType = = SkipFileType . File )
{
return ;
}
// Skip if we're skipping archives
else if ( skipFileType = = SkipFileType . Archive )
{
return ;
}
2019-01-08 11:49:31 -08:00
2020-09-18 12:15:09 -07:00
// Process as archive if we're not treating archives as files
else if ( ! asFiles . HasFlag ( TreatAsFile . Archive ) )
{
var extracted = archive . GetChildren ( ) ;
2020-10-03 13:32:11 -07:00
// If we have internal items to process, do so
if ( extracted ! = null )
ProcessArchive ( item , basePath , extracted ) ;
2020-09-18 15:18:12 -07:00
// Now find all folders that are empty, if we are supposed to
if ( addBlanks )
ProcessArchiveBlanks ( item , basePath , archive ) ;
2020-09-18 12:15:09 -07:00
}
2020-08-02 13:38:24 -07:00
2020-09-18 12:15:09 -07:00
// Process as file if we're treating archives as files
else
{
2020-10-05 17:43:44 -07:00
ProcessFile ( item , basePath , hashes , asFiles ) ;
2020-09-18 12:15:09 -07:00
}
}
// Process non-archives according to flags
2019-01-08 11:49:31 -08:00
else
2020-09-18 12:15:09 -07:00
{
// Skip if we're skipping files
if ( skipFileType = = SkipFileType . File )
return ;
// Process as file
else
2020-10-05 17:43:44 -07:00
ProcessFile ( item , basePath , hashes , asFiles ) ;
2020-09-18 12:15:09 -07:00
}
2020-08-28 16:45:27 -07:00
}
/// <summary>
/// Check an item as if it's supposed to be in a depot
/// </summary>
/// <param name="item">Filename of the item to be checked</param>
/// <returns>True if we checked a depot file, false otherwise</returns>
private bool CheckDepotFile ( string item )
{
// If we're not in Depot mode, return false
if ( Header . OutputDepot ? . IsActive ! = true )
return false ;
2019-01-08 11:49:31 -08:00
2020-08-28 16:45:27 -07:00
// Check the file as if it were in a depot
GZipArchive gzarc = new GZipArchive ( item ) ;
BaseFile baseFile = gzarc . GetTorrentGZFileInfo ( ) ;
2019-09-20 10:30:30 -07:00
2020-08-28 16:45:27 -07:00
// If the rom is valid, add it
if ( baseFile ! = null & & baseFile . Filename ! = null )
{
// Add the list if it doesn't exist already
Rom rom = new Rom ( baseFile ) ;
Items . Add ( rom . GetKey ( Field . DatItem_CRC ) , rom ) ;
Globals . Logger . User ( $"File added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}" ) ;
}
else
{
Globals . Logger . User ( $"File not added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}" ) ;
return true ;
2019-01-08 11:49:31 -08:00
}
2020-08-28 16:45:27 -07:00
return true ;
}
/// <summary>
/// Process a single file as an archive
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="extracted">List of BaseFiles representing the internal files</param>
2020-09-18 15:18:12 -07:00
private void ProcessArchive ( string item , string basePath , List < BaseFile > extracted )
2020-08-28 16:45:27 -07:00
{
// Get the parent path for all items
string parent = ( Path . GetDirectoryName ( Path . GetFullPath ( item ) ) + Path . DirectorySeparatorChar ) . Remove ( 0 , basePath . Length ) + Path . GetFileNameWithoutExtension ( item ) ;
// First take care of the found items
Parallel . ForEach ( extracted , Globals . ParallelOptions , baseFile = >
{
DatItem datItem = DatItem . Create ( baseFile ) ;
ProcessFileHelper ( item , datItem , basePath , parent ) ;
} ) ;
2020-09-18 15:18:12 -07:00
}
2020-08-28 16:45:27 -07:00
2020-09-18 15:18:12 -07:00
/// <summary>
/// Process blank folders in an archive
/// </summary>
/// <param name="item">File containing the blanks</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="archive">BaseArchive to get blanks from</param>
private void ProcessArchiveBlanks ( string item , string basePath , BaseArchive archive )
{
List < string > empties = new List < string > ( ) ;
2020-08-28 16:45:27 -07:00
2020-09-18 15:18:12 -07:00
// Get the parent path for all items
string parent = ( Path . GetDirectoryName ( Path . GetFullPath ( item ) ) + Path . DirectorySeparatorChar ) . Remove ( 0 , basePath . Length ) + Path . GetFileNameWithoutExtension ( item ) ;
2020-08-28 16:45:27 -07:00
2020-09-18 15:18:12 -07:00
// Now get all blank folders from the archive
if ( archive ! = null )
empties = archive . GetEmptyFolders ( ) ;
// Add add all of the found empties to the DAT
Parallel . ForEach ( empties , Globals . ParallelOptions , empty = >
{
Rom emptyRom = new Rom ( Path . Combine ( empty , "_" ) , item ) ;
ProcessFileHelper ( item , emptyRom , basePath , parent ) ;
} ) ;
2019-01-08 11:49:31 -08:00
}
2020-09-18 12:15:09 -07:00
/// <summary>
2020-09-18 15:18:12 -07:00
/// Process blank folders in a directory
2020-09-18 12:15:09 -07:00
/// </summary>
/// <param name="basePath">Path the represents the parent directory</param>
2020-09-18 15:18:12 -07:00
private void ProcessDirectoryBlanks ( string basePath )
2020-09-18 12:15:09 -07:00
{
// If we're in depot mode, we don't process blanks
if ( Header . OutputDepot ? . IsActive = = true )
return ;
List < string > empties = DirectoryExtensions . ListEmpty ( basePath ) ;
Parallel . ForEach ( empties , Globals . ParallelOptions , dir = >
{
// Get the full path for the directory
string fulldir = Path . GetFullPath ( dir ) ;
// Set the temporary variables
string gamename = string . Empty ;
string romname = string . Empty ;
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
if ( Header . Type = = "SuperDAT" )
{
gamename = fulldir . Remove ( 0 , basePath . Length + 1 ) ;
romname = "_" ;
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
gamename = fulldir . Remove ( 0 , basePath . Length + 1 ) . Split ( Path . DirectorySeparatorChar ) [ 0 ] ;
romname = Path . Combine ( fulldir . Remove ( 0 , basePath . Length + 1 + gamename . Length ) , "_" ) ;
}
// Sanitize the names
gamename = gamename . Trim ( Path . DirectorySeparatorChar ) ;
romname = romname . Trim ( Path . DirectorySeparatorChar ) ;
Globals . Logger . Verbose ( $"Adding blank empty folder: {gamename}" ) ;
Items [ "null" ] . Add ( new Rom ( romname , gamename ) ) ;
} ) ;
}
2019-01-08 11:49:31 -08:00
/// <summary>
/// Process a single file as a file
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="basePath">Path the represents the parent directory</param>
2020-10-05 17:43:44 -07:00
/// <param name="hashes">Hashes to include in the information</param>
2020-08-02 13:23:47 -07:00
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
2020-10-05 17:43:44 -07:00
private void ProcessFile ( string item , string basePath , Hash hashes , TreatAsFile asFiles )
2019-01-08 11:49:31 -08:00
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Verbose ( $"'{Path.GetFileName(item)}' treated like a file" ) ;
2020-10-05 17:43:44 -07:00
BaseFile baseFile = FileExtensions . GetInfo ( item , header : Header . HeaderSkipper , hashes : hashes , asFiles : asFiles ) ;
2020-09-18 01:50:44 -07:00
DatItem datItem = DatItem . Create ( baseFile ) ;
ProcessFileHelper ( item , datItem , basePath , string . Empty ) ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Process a single file as a file (with found Rom data)
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="item">Rom data to be used to write to file</param>
/// <param name="basepath">Path the represents the parent directory</param>
/// <param name="parent">Parent game to be used</param>
private void ProcessFileHelper ( string item , DatItem datItem , string basepath , string parent )
{
2020-08-27 16:57:22 -07:00
// If we didn't get an accepted parsed type somehow, cancel out
List < ItemType > parsed = new List < ItemType > { ItemType . Disk , ItemType . Media , ItemType . Rom } ;
if ( ! parsed . Contains ( datItem . ItemType ) )
2019-01-08 11:49:31 -08:00
return ;
try
{
2020-06-10 22:37:19 -07:00
// If the basepath doesn't end with a directory separator, add it
2019-01-08 11:49:31 -08:00
if ( ! basepath . EndsWith ( Path . DirectorySeparatorChar . ToString ( ) ) )
basepath + = Path . DirectorySeparatorChar . ToString ( ) ;
// Make sure we have the full item path
item = Path . GetFullPath ( item ) ;
// Process the item to sanitize names based on input
SetDatItemInfo ( datItem , item , parent , basepath ) ;
// Add the file information to the DAT
2020-08-25 11:20:50 -07:00
string key = datItem . GetKey ( Field . DatItem_CRC ) ;
2020-07-26 22:34:45 -07:00
Items . Add ( key , datItem ) ;
2019-01-08 11:49:31 -08:00
2020-09-02 12:19:12 -07:00
Globals . Logger . User ( $"File added: {datItem.GetName() ?? string.Empty}{Environment.NewLine}" ) ;
2019-01-08 11:49:31 -08:00
}
catch ( IOException ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2019-01-08 11:49:31 -08:00
return ;
}
}
/// <summary>
/// Set proper Game and Rom names from user inputs
/// </summary>
/// <param name="datItem">DatItem representing the input file</param>
/// <param name="item">Item name to use</param>
/// <param name="parent">Parent name to use</param>
/// <param name="basepath">Base path to use</param>
private void SetDatItemInfo ( DatItem datItem , string item , string parent , string basepath )
{
// Get the data to be added as game and item names
2020-08-27 16:57:22 -07:00
string machineName , itemName ;
2019-01-08 11:49:31 -08:00
// If the parent is blank, then we have a non-archive file
2020-06-10 22:37:19 -07:00
if ( string . IsNullOrWhiteSpace ( parent ) )
2019-01-08 11:49:31 -08:00
{
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
2020-07-27 10:26:08 -07:00
if ( Header . Type = = "SuperDAT" )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
machineName = Path . GetDirectoryName ( item . Remove ( 0 , basepath . Length ) ) ;
itemName = Path . GetFileName ( item ) ;
2019-01-08 11:49:31 -08:00
}
// Otherwise, we want just the top level folder as the game, and the file as everything else
else
{
2020-08-27 16:57:22 -07:00
machineName = item . Remove ( 0 , basepath . Length ) . Split ( Path . DirectorySeparatorChar ) [ 0 ] ;
itemName = item . Remove ( 0 , ( Path . Combine ( basepath , machineName ) . Length ) ) ;
2019-01-08 11:49:31 -08:00
}
}
// Otherwise, we assume that we have an archive
else
{
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
2020-07-27 10:26:08 -07:00
if ( Header . Type = = "SuperDAT" )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
machineName = parent ;
2020-09-02 12:19:12 -07:00
itemName = datItem . GetName ( ) ;
2019-01-08 11:49:31 -08:00
}
// Otherwise, we want the archive name as the game, and the file as everything else
else
{
2020-08-27 16:57:22 -07:00
machineName = parent ;
2020-09-02 12:19:12 -07:00
itemName = datItem . GetName ( ) ;
2019-01-08 11:49:31 -08:00
}
}
// Sanitize the names
2020-08-27 16:57:22 -07:00
machineName = machineName . Trim ( Path . DirectorySeparatorChar ) ;
itemName = itemName ? . Trim ( Path . DirectorySeparatorChar ) ? ? string . Empty ;
2020-06-10 22:37:19 -07:00
2020-08-27 16:57:22 -07:00
if ( ! string . IsNullOrWhiteSpace ( machineName ) & & string . IsNullOrWhiteSpace ( itemName ) )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
itemName = machineName ;
machineName = "Default" ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
// Update machine information
datItem . Machine . Name = machineName ;
datItem . Machine . Description = machineName ;
2019-01-08 11:49:31 -08:00
// If we have a Disk, then the ".chd" extension needs to be removed
2020-09-02 12:19:12 -07:00
if ( datItem . ItemType = = ItemType . Disk & & itemName . EndsWith ( ".chd" ) )
2020-08-27 16:57:22 -07:00
{
2020-09-02 12:19:12 -07:00
itemName = itemName . Substring ( 0 , itemName . Length - 4 ) ;
2020-08-27 16:57:22 -07:00
}
// If we have a Media, then the extension needs to be removed
else if ( datItem . ItemType = = ItemType . Media )
{
2020-09-02 12:19:12 -07:00
if ( itemName . EndsWith ( ".dicf" ) )
itemName = itemName . Substring ( 0 , itemName . Length - 5 ) ;
else if ( itemName . EndsWith ( ".aaru" ) )
itemName = itemName . Substring ( 0 , itemName . Length - 5 ) ;
else if ( itemName . EndsWith ( ".aaruformat" ) )
itemName = itemName . Substring ( 0 , itemName . Length - 11 ) ;
else if ( itemName . EndsWith ( ".aaruf" ) )
itemName = itemName . Substring ( 0 , itemName . Length - 6 ) ;
else if ( itemName . EndsWith ( ".aif" ) )
2020-09-04 15:04:35 -07:00
itemName = itemName . Substring ( 0 , itemName . Length - 4 ) ;
2020-08-27 16:57:22 -07:00
}
2020-09-02 12:19:12 -07:00
// Set the item name back
datItem . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = itemName } ) ;
2019-01-08 11:49:31 -08:00
}
#endregion
#region Rebuilding and Verifying
/// <summary>
/// Process the DAT and find all matches in input files and folders assuming they're a depot
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
2020-06-10 22:37:19 -07:00
public bool RebuildDepot (
List < string > inputs ,
2020-08-28 22:46:06 -07:00
string outDir ,
2020-08-02 13:44:45 -07:00
bool date = false ,
bool delete = false ,
bool inverse = false ,
2020-08-28 22:38:10 -07:00
OutputFormat outputFormat = OutputFormat . Folder )
2019-01-08 11:49:31 -08:00
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
2020-07-27 01:39:32 -07:00
if ( Items . TotalCount = = 0 & & ! inverse )
2019-01-08 11:49:31 -08:00
{
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
return false ;
}
// Check that the output directory exists
2020-07-15 09:41:59 -07:00
outDir = DirectoryExtensions . Ensure ( outDir , create : true ) ;
2019-01-08 11:49:31 -08:00
// Now we want to get forcepack flag if it's not overridden
2020-08-20 20:38:29 -07:00
if ( outputFormat = = OutputFormat . Folder & & Header . ForcePacking ! = PackingFlag . None )
2020-08-28 22:21:35 -07:00
outputFormat = Header . ForcePacking . AsOutputFormat ( ) ;
2019-01-08 11:49:31 -08:00
// Preload the Skipper list
2020-07-30 21:07:25 -07:00
Transform . Init ( ) ;
2019-01-08 11:49:31 -08:00
#endregion
bool success = true ;
#region Rebuild from depots in order
2020-08-28 22:21:35 -07:00
string format = outputFormat . FromOutputFormat ( ) ? ? string . Empty ;
2020-06-10 22:37:19 -07:00
InternalStopwatch watch = new InternalStopwatch ( $"Rebuilding all files to {format}" ) ;
2019-01-08 11:49:31 -08:00
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
Parallel . ForEach ( inputs , Globals . ParallelOptions , input = >
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Verbose ( $"Adding depot: {input}" ) ;
2019-01-08 11:49:31 -08:00
lock ( directories )
{
directories . Add ( input ) ;
}
}
} ) ;
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
return success ;
2020-07-15 10:47:13 -07:00
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
2020-08-25 11:20:50 -07:00
Items . BucketBy ( Field . DatItem_SHA1 , DedupeType . None ) ;
2019-01-08 11:49:31 -08:00
// Then we want to loop through each of the hashes and see if we can rebuild
2020-08-11 13:33:08 -07:00
var keys = Items . SortedKeys . ToList ( ) ;
foreach ( string hash in keys )
2019-01-08 11:49:31 -08:00
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
continue ;
2020-06-10 22:37:19 -07:00
Globals . Logger . User ( $"Checking hash '{hash}'" ) ;
2019-01-08 11:49:31 -08:00
// Get the extension path for the hash
2020-08-20 11:23:48 -07:00
string subpath = PathExtensions . GetDepotPath ( hash , Header . InputDepot . Depth ) ;
2019-01-08 11:49:31 -08:00
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
break ;
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
continue ;
// If we have a path, we want to try to get the rom information
GZipArchive archive = new GZipArchive ( foundpath ) ;
BaseFile fileinfo = archive . GetTorrentGZFileInfo ( ) ;
// If the file information is null, then we continue
if ( fileinfo = = null )
continue ;
2017-10-09 20:38:15 -07:00
2020-09-18 15:01:03 -07:00
// Ensure we are sorted correctly (some other calls can change this)
Items . BucketBy ( Field . DatItem_SHA1 , DedupeType . None ) ;
2020-08-28 21:10:02 -07:00
2020-08-11 13:33:08 -07:00
// If there are no items in the hash, we continue
if ( Items [ hash ] = = null | | Items [ hash ] . Count = = 0 )
continue ;
2019-01-02 23:17:49 -08:00
// Otherwise, we rebuild that file to all locations that we need to
2020-07-15 09:41:59 -07:00
bool usedInternally ;
2020-07-26 22:34:45 -07:00
if ( Items [ hash ] [ 0 ] . ItemType = = ItemType . Disk )
2020-08-28 22:38:10 -07:00
usedInternally = RebuildIndividualFile ( new Disk ( fileinfo ) , foundpath , outDir , date , inverse , outputFormat , false /* isZip */ ) ;
2020-08-27 16:57:22 -07:00
else if ( Items [ hash ] [ 0 ] . ItemType = = ItemType . Media )
2020-08-28 22:38:10 -07:00
usedInternally = RebuildIndividualFile ( new Media ( fileinfo ) , foundpath , outDir , date , inverse , outputFormat , false /* isZip */ ) ;
2019-01-02 23:17:49 -08:00
else
2020-08-28 22:38:10 -07:00
usedInternally = RebuildIndividualFile ( new Rom ( fileinfo ) , foundpath , outDir , date , inverse , outputFormat , false /* isZip */ ) ;
2020-07-15 09:41:59 -07:00
// If we are supposed to delete the depot file, do so
if ( delete & & usedInternally )
FileExtensions . TryDelete ( foundpath ) ;
2019-01-08 11:49:31 -08:00
}
watch . Stop ( ) ;
#endregion
return success ;
}
/// <summary>
/// Process the DAT and find all matches in input files and folders
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
2020-08-27 20:56:50 -07:00
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
2019-01-08 11:49:31 -08:00
/// <returns>True if rebuilding was a success, false otherwise</returns>
2020-06-10 22:37:19 -07:00
public bool RebuildGeneric (
List < string > inputs ,
2020-08-28 22:46:06 -07:00
string outDir ,
2020-08-02 13:44:45 -07:00
bool quickScan = false ,
bool date = false ,
bool delete = false ,
bool inverse = false ,
OutputFormat outputFormat = OutputFormat . Folder ,
2020-09-18 00:45:08 -07:00
TreatAsFile asFiles = 0x00 )
2019-01-08 11:49:31 -08:00
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
2020-07-27 01:39:32 -07:00
if ( Items . TotalCount = = 0 & & ! inverse )
2019-01-08 11:49:31 -08:00
{
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
return false ;
}
// Check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Now we want to get forcepack flag if it's not overridden
2020-08-20 20:38:29 -07:00
if ( outputFormat = = OutputFormat . Folder & & Header . ForcePacking ! = PackingFlag . None )
2020-08-28 22:21:35 -07:00
outputFormat = Header . ForcePacking . AsOutputFormat ( ) ;
2019-01-08 11:49:31 -08:00
// Preload the Skipper list
2020-07-30 21:07:25 -07:00
Transform . Init ( ) ;
2019-01-08 11:49:31 -08:00
#endregion
bool success = true ;
#region Rebuild from sources in order
2020-08-28 22:21:35 -07:00
string format = outputFormat . FromOutputFormat ( ) ? ? string . Empty ;
2020-06-10 22:37:19 -07:00
InternalStopwatch watch = new InternalStopwatch ( $"Rebuilding all files to {format}" ) ;
2019-01-08 11:49:31 -08:00
// Now loop through all of the files in all of the inputs
foreach ( string input in inputs )
{
// If the input is a file
if ( File . Exists ( input ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . User ( $"Checking file: {input}" ) ;
2020-09-18 11:40:21 -07:00
bool rebuilt = RebuildGenericHelper ( input , outDir , quickScan , date , inverse , outputFormat , asFiles ) ;
// If we are supposed to delete the file, do so
if ( delete & & rebuilt )
FileExtensions . TryDelete ( input ) ;
2019-01-08 11:49:31 -08:00
}
// If the input is a directory
else if ( Directory . Exists ( input ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Verbose ( $"Checking directory: {input}" ) ;
2019-01-08 11:49:31 -08:00
foreach ( string file in Directory . EnumerateFiles ( input , "*" , SearchOption . AllDirectories ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . User ( $"Checking file: {file}" ) ;
2020-09-18 11:40:21 -07:00
bool rebuilt = RebuildGenericHelper ( file , outDir , quickScan , date , inverse , outputFormat , asFiles ) ;
// If we are supposed to delete the file, do so
if ( delete & & rebuilt )
FileExtensions . TryDelete ( input ) ;
2019-01-08 11:49:31 -08:00
}
}
}
watch . Stop ( ) ;
#endregion
return success ;
}
/// <summary>
/// Attempt to add a file to the output if it matches
/// </summary>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
2020-08-27 20:56:50 -07:00
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
2020-09-18 11:40:21 -07:00
/// <returns>True if the file was used to rebuild, false otherwise</returns>
private bool RebuildGenericHelper (
2020-06-10 22:37:19 -07:00
string file ,
string outDir ,
bool quickScan ,
bool date ,
bool inverse ,
OutputFormat outputFormat ,
2020-09-18 00:45:08 -07:00
TreatAsFile asFiles )
2019-01-08 11:49:31 -08:00
{
// If we somehow have a null filename, return
if ( file = = null )
2020-09-18 11:40:21 -07:00
return false ;
2019-01-08 11:49:31 -08:00
// Set the deletion variables
2020-08-28 21:38:27 -07:00
bool usedExternally = false , usedInternally = false ;
2020-07-15 09:41:59 -07:00
// Create an empty list of BaseFile for archive entries
List < BaseFile > entries = null ;
2020-09-18 16:12:29 -07:00
// Get the TGZ and TXZ status for later
2020-07-15 09:41:59 -07:00
GZipArchive tgz = new GZipArchive ( file ) ;
2020-09-18 16:12:29 -07:00
XZArchive txz = new XZArchive ( file ) ;
bool isSingleTorrent = tgz . IsTorrent ( ) | | txz . IsTorrent ( ) ;
2020-07-15 09:41:59 -07:00
// Get the base archive first
2020-10-05 17:43:44 -07:00
BaseArchive archive = BaseArchive . Create ( file ) ;
2020-07-15 09:41:59 -07:00
// Now get all extracted items from the archive
if ( archive ! = null )
2020-10-05 17:43:44 -07:00
{
2020-10-05 20:39:06 -07:00
archive . AvailableHashes = quickScan ? Hash . CRC : Hash . Standard ;
2020-09-18 11:26:50 -07:00
entries = archive . GetChildren ( ) ;
2020-10-05 17:43:44 -07:00
}
2019-01-08 11:49:31 -08:00
2020-08-28 21:38:27 -07:00
// If the entries list is null, we encountered an error or have a file and should scan externally
2020-07-15 09:41:59 -07:00
if ( entries = = null & & File . Exists ( file ) )
2019-01-08 11:49:31 -08:00
{
2020-09-18 00:29:08 -07:00
BaseFile internalFileInfo = FileExtensions . GetInfo ( file , asFiles : asFiles ) ;
2019-01-08 11:49:31 -08:00
2020-09-18 00:52:43 -07:00
// Create the correct DatItem
DatItem internalDatItem ;
if ( internalFileInfo . Type = = FileType . AaruFormat & & ! asFiles . HasFlag ( TreatAsFile . AaruFormat ) )
2020-08-27 16:57:22 -07:00
internalDatItem = new Media ( internalFileInfo ) ;
2020-09-18 00:52:43 -07:00
else if ( internalFileInfo . Type = = FileType . CHD & & ! asFiles . HasFlag ( TreatAsFile . CHD ) )
2020-07-15 09:41:59 -07:00
internalDatItem = new Disk ( internalFileInfo ) ;
2020-09-18 00:52:43 -07:00
else
2020-07-15 09:41:59 -07:00
internalDatItem = new Rom ( internalFileInfo ) ;
2019-01-08 11:49:31 -08:00
2020-09-18 16:12:29 -07:00
usedExternally = RebuildIndividualFile ( internalDatItem , file , outDir , date , inverse , outputFormat ) ;
2020-07-15 09:41:59 -07:00
}
// Otherwise, loop through the entries and try to match
else
{
foreach ( BaseFile entry in entries )
2019-01-08 11:49:31 -08:00
{
2020-07-15 09:41:59 -07:00
DatItem internalDatItem = DatItem . Create ( entry ) ;
2020-09-18 16:12:29 -07:00
usedInternally | = RebuildIndividualFile ( internalDatItem , file , outDir , date , inverse , outputFormat , ! isSingleTorrent /* isZip */ ) ;
2019-01-08 11:49:31 -08:00
}
}
2020-09-18 11:40:21 -07:00
return usedExternally | | usedInternally ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Find duplicates and rebuild individual files to output
/// </summary>
/// <param name="datItem">Information for the current file to rebuild from</param>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
2020-09-18 16:12:29 -07:00
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ/TXZ, null otherwise</param>
2019-01-08 11:49:31 -08:00
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
2020-06-10 22:37:19 -07:00
private bool RebuildIndividualFile (
DatItem datItem ,
string file ,
string outDir ,
bool date ,
bool inverse ,
OutputFormat outputFormat ,
2020-09-18 16:12:29 -07:00
bool? isZip = null )
2019-01-08 11:49:31 -08:00
{
2020-04-03 13:19:21 -07:00
// Set the initial output value
bool rebuilt = false ;
2019-01-08 11:49:31 -08:00
2020-08-27 16:57:22 -07:00
// If the DatItem is a Disk or Media, force rebuilding to a folder except if TGZ or TXZ
if ( ( datItem . ItemType = = ItemType . Disk | | datItem . ItemType = = ItemType . Media )
2020-07-15 09:41:59 -07:00
& & ! ( outputFormat = = OutputFormat . TorrentGzip | | outputFormat = = OutputFormat . TorrentGzipRomba )
& & ! ( outputFormat = = OutputFormat . TorrentXZ | | outputFormat = = OutputFormat . TorrentXZRomba ) )
{
2019-01-08 11:49:31 -08:00
outputFormat = OutputFormat . Folder ;
2020-07-15 09:41:59 -07:00
}
2019-01-08 11:49:31 -08:00
2020-08-27 20:56:50 -07:00
// If we have a Disk or Media, change it into a Rom for later use
2019-09-20 10:30:30 -07:00
if ( datItem . ItemType = = ItemType . Disk )
2020-08-27 16:57:22 -07:00
datItem = ( datItem as Disk ) . ConvertToRom ( ) ;
2020-09-18 16:12:29 -07:00
else if ( datItem . ItemType = = ItemType . Media )
2020-08-27 16:57:22 -07:00
datItem = ( datItem as Media ) . ConvertToRom ( ) ;
2019-01-08 11:49:31 -08:00
2020-09-18 16:12:29 -07:00
// Prepopluate a key string
2020-08-27 16:57:22 -07:00
string crc = ( datItem as Rom ) . CRC ? ? string . Empty ;
2019-09-20 10:30:30 -07:00
2020-09-18 16:12:29 -07:00
// Try to get the stream for the file
if ( ! GetFileStream ( datItem , file , isZip , out Stream fileStream ) )
return false ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// If either we have duplicates or we're filtering
2020-09-18 16:12:29 -07:00
if ( ShouldRebuild ( datItem , fileStream , inverse , out List < DatItem > dupes ) )
2019-01-08 11:49:31 -08:00
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
2020-09-18 16:12:29 -07:00
if ( RebuildTorrentGzip ( datItem , file , outDir , outputFormat , isZip ) )
return true ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
// If we have a very specific TXZ->TXZ case, just copy it accordingly
2020-09-18 16:12:29 -07:00
if ( RebuildTorrentXz ( datItem , file , outDir , outputFormat , isZip ) )
return true ;
2019-01-08 11:49:31 -08:00
2020-09-02 12:19:12 -07:00
Globals . Logger . User ( $"{(inverse ? " No matches " : " Matches ")} found for '{Path.GetFileName(datItem.GetName() ?? datItem.ItemType.ToString())}', rebuilding accordingly..." ) ;
2020-07-15 09:41:59 -07:00
rebuilt = true ;
2019-01-08 11:49:31 -08:00
2020-08-28 21:10:02 -07:00
// Special case for partial packing mode
bool shouldCheck = false ;
if ( outputFormat = = OutputFormat . Folder & & Header . ForcePacking = = PackingFlag . Partial )
{
shouldCheck = true ;
Items . BucketBy ( Field . Machine_Name , DedupeType . None , lower : false ) ;
}
2020-07-15 09:41:59 -07:00
// Now loop through the list and rebuild accordingly
foreach ( DatItem item in dupes )
2019-01-08 11:49:31 -08:00
{
2020-08-28 21:10:02 -07:00
// If we should check for the items in the machine
if ( shouldCheck & & Items [ item . Machine . Name ] . Count > 1 )
outputFormat = OutputFormat . Folder ;
else if ( shouldCheck & & Items [ item . Machine . Name ] . Count = = 1 )
outputFormat = OutputFormat . ParentFolder ;
2020-07-15 09:41:59 -07:00
// Get the output archive, if possible
2020-09-18 16:12:29 -07:00
Folder outputArchive = GetPreconfiguredFolder ( date , outputFormat ) ;
2020-07-15 09:41:59 -07:00
// Now rebuild to the output file
2020-09-18 15:01:03 -07:00
outputArchive . Write ( fileStream , outDir , item as Rom ) ;
2019-01-08 11:49:31 -08:00
}
// Close the input stream
fileStream ? . Dispose ( ) ;
}
// Now we want to take care of headers, if applicable
2020-08-02 12:54:27 -07:00
if ( Header . HeaderSkipper ! = null )
2019-01-08 11:49:31 -08:00
{
// Check to see if we have a matching header first
2020-08-02 12:54:27 -07:00
SkipperRule rule = Transform . GetMatchingRule ( fileStream , Path . GetFileNameWithoutExtension ( Header . HeaderSkipper ) ) ;
2019-01-08 11:49:31 -08:00
// If there's a match, create the new file to write
if ( rule . Tests ! = null & & rule . Tests . Count ! = 0 )
{
// If the file could be transformed correctly
MemoryStream transformStream = new MemoryStream ( ) ;
if ( rule . TransformStream ( fileStream , transformStream , keepReadOpen : true , keepWriteOpen : true ) )
{
// Get the file informations that we will be using
2020-07-15 09:41:59 -07:00
Rom headerless = new Rom ( transformStream . GetInfo ( keepReadOpen : true ) ) ;
2019-01-08 11:49:31 -08:00
2020-09-18 16:12:29 -07:00
// If we have duplicates and we're not filtering
if ( ShouldRebuild ( headerless , transformStream , false , out dupes ) )
2019-01-08 11:49:31 -08:00
{
2020-09-02 12:19:12 -07:00
Globals . Logger . User ( $"Headerless matches found for '{Path.GetFileName(datItem.GetName() ?? datItem.ItemType.ToString())}', rebuilding accordingly..." ) ;
2019-01-08 11:49:31 -08:00
rebuilt = true ;
// Now loop through the list and rebuild accordingly
foreach ( DatItem item in dupes )
{
// Create a headered item to use as well
datItem . CopyMachineInformation ( item ) ;
2020-09-18 16:12:29 -07:00
datItem . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = $"{datItem.GetName()}_{crc}" } ) ;
2019-01-08 11:49:31 -08:00
// Get the output archive, if possible
2020-09-18 16:12:29 -07:00
Folder outputArchive = GetPreconfiguredFolder ( date , outputFormat ) ;
2019-01-08 11:49:31 -08:00
// Now rebuild to the output file
2020-09-18 16:12:29 -07:00
bool eitherSuccess = false ;
2020-09-18 15:01:03 -07:00
eitherSuccess | = outputArchive . Write ( transformStream , outDir , item as Rom ) ;
eitherSuccess | = outputArchive . Write ( fileStream , outDir , datItem as Rom ) ;
2019-01-08 11:49:31 -08:00
// Now add the success of either rebuild
rebuilt & = eitherSuccess ;
}
}
}
// Dispose of the stream
transformStream ? . Dispose ( ) ;
}
// Dispose of the stream
fileStream ? . Dispose ( ) ;
}
return rebuilt ;
}
2020-09-18 16:12:29 -07:00
/// <summary>
/// Get the rebuild state for a given item
/// </summary>
/// <param name="datItem">Information for the current file to rebuild from</param>
/// <param name="stream">Stream representing the input file</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="dupes">Output list of duplicate items to rebuild to</param>
/// <returns>True if the item should be rebuilt, false otherwise</returns>
private bool ShouldRebuild ( DatItem datItem , Stream stream , bool inverse , out List < DatItem > dupes )
{
// Find if the file has duplicates in the DAT
dupes = Items . GetDuplicates ( datItem ) ;
bool hasDuplicates = dupes . Count > 0 ;
// If we have duplicates but we're filtering
if ( hasDuplicates & & inverse )
{
return false ;
}
// If we have duplicates without filtering
else if ( hasDuplicates & & ! inverse )
{
return true ;
}
// If we have no duplicates and we're filtering
else if ( ! hasDuplicates & & inverse )
{
string machinename = null ;
// Get the item from the current file
Rom item = new Rom ( stream . GetInfo ( keepReadOpen : true ) ) ;
item . Machine . Name = Path . GetFileNameWithoutExtension ( item . Name ) ;
item . Machine . Description = Path . GetFileNameWithoutExtension ( item . Name ) ;
// If we are coming from an archive, set the correct machine name
if ( machinename ! = null )
{
item . Machine . Name = machinename ;
item . Machine . Description = machinename ;
}
dupes . Add ( item ) ;
return true ;
}
// If we have no duplicates and we're not filtering
else
{
return false ;
}
}
/// <summary>
/// Rebuild from TorrentGzip to TorrentGzip
/// </summary>
/// <param name="datItem">Information for the current file to rebuild from</param>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentGzip ( DatItem datItem , string file , string outDir , OutputFormat outputFormat , bool? isZip )
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive ( file ) ;
BaseFile tgzRom = tgz . GetTorrentGZFileInfo ( ) ;
if ( isZip = = false & & tgzRom ! = null & & ( outputFormat = = OutputFormat . TorrentGzip | | outputFormat = = OutputFormat . TorrentGzipRomba ) )
{
Globals . Logger . User ( $"Matches found for '{Path.GetFileName(datItem.GetName() ?? string.Empty)}', rebuilding accordingly..." ) ;
// Get the proper output path
string sha1 = ( datItem as Rom ) . SHA1 ? ? string . Empty ;
if ( outputFormat = = OutputFormat . TorrentGzipRomba )
outDir = Path . Combine ( outDir , PathExtensions . GetDepotPath ( sha1 , Header . OutputDepot . Depth ) ) ;
else
outDir = Path . Combine ( outDir , sha1 + ".gz" ) ;
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outDir ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outDir ) ;
return true ;
}
catch
{
return false ;
}
}
return false ;
}
/// <summary>
/// Rebuild from TorrentXz to TorrentXz
/// </summary>
/// <param name="datItem">Information for the current file to rebuild from</param>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="isZip">True if the input file is an archive, false if the file is TXZ, null otherwise</param>
/// <returns>True if rebuilt properly, false otherwise</returns>
private bool RebuildTorrentXz ( DatItem datItem , string file , string outDir , OutputFormat outputFormat , bool? isZip )
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
XZArchive txz = new XZArchive ( file ) ;
BaseFile txzRom = txz . GetTorrentXZFileInfo ( ) ;
if ( isZip = = false & & txzRom ! = null & & ( outputFormat = = OutputFormat . TorrentXZ | | outputFormat = = OutputFormat . TorrentXZRomba ) )
{
Globals . Logger . User ( $"Matches found for '{Path.GetFileName(datItem.GetName() ?? string.Empty)}', rebuilding accordingly..." ) ;
// Get the proper output path
string sha1 = ( datItem as Rom ) . SHA1 ? ? string . Empty ;
if ( outputFormat = = OutputFormat . TorrentXZRomba )
outDir = Path . Combine ( outDir , PathExtensions . GetDepotPath ( sha1 , Header . OutputDepot . Depth ) ) . Replace ( ".gz" , ".xz" ) ;
else
outDir = Path . Combine ( outDir , sha1 + ".xz" ) ;
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outDir ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outDir ) ;
return true ;
}
catch
{
return false ;
}
}
return false ;
}
/// <summary>
/// Get the Stream related to a file
/// </summary>
/// <param name="datItem">Information for the current file to rebuild from</param>
/// <param name="file">Name of the file to process</param>
/// <param name="isZip">Non-null if the input file is an archive</param>
/// <param name="stream">Output stream representing the opened file</param>
/// <returns>True if the stream opening succeeded, false otherwise</returns>
private bool GetFileStream ( DatItem datItem , string file , bool? isZip , out Stream stream )
{
// Get a generic stream for the file
stream = null ;
// If we have a zipfile, extract the stream to memory
if ( isZip ! = null )
{
BaseArchive archive = BaseArchive . Create ( file ) ;
if ( archive ! = null )
( stream , _ ) = archive . CopyToStream ( datItem . GetName ( ) ? ? datItem . ItemType . ToString ( ) ) ;
}
// Otherwise, just open the filestream
else
{
stream = FileExtensions . TryOpenRead ( file ) ;
}
// If the stream is null, then continue
if ( stream = = null )
return false ;
// Seek to the beginning of the stream
if ( stream . CanSeek )
stream . Seek ( 0 , SeekOrigin . Begin ) ;
return true ;
}
/// <summary>
/// Get preconfigured Folder for rebuilding
/// </summary>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <returns>Folder configured with proper flags</returns>
private Folder GetPreconfiguredFolder ( bool date , OutputFormat outputFormat )
{
Folder outputArchive = Folder . Create ( outputFormat ) ;
if ( outputArchive is BaseArchive baseArchive & & date )
baseArchive . UseDates = date ;
// Set the depth fields where appropriate
if ( outputArchive is GZipArchive gzipArchive )
gzipArchive . Depth = Header . OutputDepot . Depth ;
else if ( outputArchive is XZArchive xzArchive )
xzArchive . Depth = Header . OutputDepot . Depth ;
return outputArchive ;
}
2019-01-08 11:49:31 -08:00
/// <summary>
2020-08-27 22:53:21 -07:00
/// Verify a DatFile against a set of depots, leaving only missing files
2019-01-08 11:49:31 -08:00
/// </summary>
/// <param name="inputs">List of input directories to compare against</param>
/// <returns>True if verification was a success, false otherwise</returns>
2020-08-27 22:53:21 -07:00
public bool VerifyDepot ( List < string > inputs )
2019-01-08 11:49:31 -08:00
{
bool success = true ;
InternalStopwatch watch = new InternalStopwatch ( "Verifying all from supplied depots" ) ;
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
foreach ( string input in inputs )
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Verbose ( $"Adding depot: {input}" ) ;
2019-01-08 11:49:31 -08:00
directories . Add ( input ) ;
}
}
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
return success ;
2020-07-15 10:47:13 -07:00
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
2020-08-25 11:20:50 -07:00
Items . BucketBy ( Field . DatItem_SHA1 , DedupeType . None ) ;
2019-01-08 11:49:31 -08:00
// Then we want to loop through each of the hashes and see if we can rebuild
2020-08-14 23:11:29 -07:00
var keys = Items . SortedKeys . ToList ( ) ;
foreach ( string hash in keys )
2019-01-08 11:49:31 -08:00
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
continue ;
2020-06-10 22:37:19 -07:00
Globals . Logger . User ( $"Checking hash '{hash}'" ) ;
2019-01-08 11:49:31 -08:00
// Get the extension path for the hash
2020-08-20 11:23:48 -07:00
string subpath = PathExtensions . GetDepotPath ( hash , Header . InputDepot . Depth ) ;
2019-01-08 11:49:31 -08:00
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
break ;
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
continue ;
// If we have a path, we want to try to get the rom information
GZipArchive tgz = new GZipArchive ( foundpath ) ;
BaseFile fileinfo = tgz . GetTorrentGZFileInfo ( ) ;
// If the file information is null, then we continue
if ( fileinfo = = null )
continue ;
// Now we want to remove all duplicates from the DAT
2020-08-28 22:38:10 -07:00
Items . GetDuplicates ( new Rom ( fileinfo ) )
. AddRange ( Items . GetDuplicates ( new Disk ( fileinfo ) ) ) ;
2019-01-08 11:49:31 -08:00
}
watch . Stop ( ) ;
2020-08-27 22:53:21 -07:00
// Set fixdat headers in case of writing out
2020-07-27 10:26:08 -07:00
Header . FileName = $"fixDAT_{Header.FileName}" ;
Header . Name = $"fixDAT_{Header.Name}" ;
Header . Description = $"fixDAT_{Header.Description}" ;
2020-07-26 22:34:45 -07:00
Items . ClearMarked ( ) ;
2019-01-08 11:49:31 -08:00
return success ;
}
/// <summary>
2020-08-27 22:53:21 -07:00
/// Verify a DatFile against a set of inputs, leaving only missing files
2019-01-08 11:49:31 -08:00
/// </summary>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <returns>True if verification was a success, false otherwise</returns>
2020-09-18 11:40:21 -07:00
public bool VerifyGeneric ( bool hashOnly )
2019-01-08 11:49:31 -08:00
{
bool success = true ;
2020-08-28 01:13:55 -07:00
// Force bucketing according to the flags
Items . SetBucketedBy ( Field . NULL ) ;
2019-01-08 11:49:31 -08:00
if ( hashOnly )
2020-08-25 11:20:50 -07:00
Items . BucketBy ( Field . DatItem_CRC , DedupeType . Full ) ;
2019-01-08 11:49:31 -08:00
else
2020-08-28 01:13:55 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . Full ) ;
// Then mark items for removal
var keys = Items . SortedKeys . ToList ( ) ;
foreach ( string key in keys )
2019-01-08 11:49:31 -08:00
{
2020-08-28 01:13:55 -07:00
List < DatItem > items = Items [ key ] ;
for ( int i = 0 ; i < items . Count ; i + + )
2019-01-08 11:49:31 -08:00
{
2020-09-18 11:40:21 -07:00
// Unmatched items will have a source ID of int.MaxValue, remove all others
if ( items [ i ] . Source . Index ! = int . MaxValue )
2020-08-28 01:13:55 -07:00
items [ i ] . Remove = true ;
2019-01-08 11:49:31 -08:00
}
2020-08-28 01:13:55 -07:00
// Set the list back, just in case
Items [ key ] = items ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 22:53:21 -07:00
// Set fixdat headers in case of writing out
Header . FileName = $"fixDAT_{Header.FileName}" ;
Header . Name = $"fixDAT_{Header.Name}" ;
Header . Description = $"fixDAT_{Header.Description}" ;
2020-07-26 22:34:45 -07:00
Items . ClearMarked ( ) ;
2019-01-08 11:49:31 -08:00
2020-07-15 09:41:59 -07:00
return success ;
}
2019-01-08 11:49:31 -08:00
#endregion
2020-07-15 09:41:59 -07:00
// TODO: Implement Level split
2019-01-08 11:49:31 -08:00
#region Splitting
/// <summary>
/// Split a DAT by input extensions
/// </summary>
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
2020-08-28 10:32:17 -07:00
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
public ( DatFile extADat , DatFile extBDat ) SplitByExtension ( List < string > extA , List < string > extB )
2019-01-08 11:49:31 -08:00
{
2020-06-10 22:37:19 -07:00
// If roms is empty, return false
2020-07-27 01:39:32 -07:00
if ( Items . TotalCount = = 0 )
2020-08-28 10:32:17 -07:00
return ( null , null ) ;
2020-06-10 22:37:19 -07:00
// Make sure all of the extensions don't have a dot at the beginning
2020-07-15 09:41:59 -07:00
var newExtA = extA . Select ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) ;
2019-01-08 11:49:31 -08:00
string newExtAString = string . Join ( "," , newExtA ) ;
2020-07-15 09:41:59 -07:00
var newExtB = extB . Select ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) ;
2019-01-08 11:49:31 -08:00
string newExtBString = string . Join ( "," , newExtB ) ;
// Set all of the appropriate outputs for each of the subsets
2020-08-28 10:32:17 -07:00
DatFile extADat = Create ( Header . CloneStandard ( ) ) ;
extADat . Header . FileName + = $" ({newExtAString})" ;
extADat . Header . Name + = $" ({newExtAString})" ;
extADat . Header . Description + = $" ({newExtAString})" ;
2020-07-15 09:41:59 -07:00
2020-08-28 10:32:17 -07:00
DatFile extBDat = Create ( Header . CloneStandard ( ) ) ;
extBDat . Header . FileName + = $" ({newExtBString})" ;
extBDat . Header . Name + = $" ({newExtBString})" ;
extBDat . Header . Description + = $" ({newExtBString})" ;
2019-01-08 11:49:31 -08:00
// Now separate the roms accordingly
2020-07-26 22:34:45 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-07-26 22:34:45 -07:00
List < DatItem > items = Items [ key ] ;
2019-01-08 11:49:31 -08:00
foreach ( DatItem item in items )
{
2020-09-02 12:19:12 -07:00
if ( newExtA . Contains ( PathExtensions . GetNormalizedExtension ( item . GetName ( ) ? ? string . Empty ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 10:32:17 -07:00
extADat . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-09-02 12:19:12 -07:00
else if ( newExtB . Contains ( PathExtensions . GetNormalizedExtension ( item . GetName ( ) ? ? string . Empty ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 10:32:17 -07:00
extBDat . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
else
{
2020-08-28 10:32:17 -07:00
extADat . Items . Add ( key , item ) ;
extBDat . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
}
} ) ;
2020-08-28 10:32:17 -07:00
// Then return both DatFiles
return ( extADat , extBDat ) ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Split a DAT by best available hashes
/// </summary>
2020-08-28 22:59:38 -07:00
/// <returns>Dictionary of Field to DatFile mappings</returns>
public Dictionary < Field , DatFile > SplitByHash ( )
2019-01-08 11:49:31 -08:00
{
// Create each of the respective output DATs
Globals . Logger . User ( "Creating and populating new DATs" ) ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
// Create the set of field-to-dat mappings
Dictionary < Field , DatFile > fieldDats = new Dictionary < Field , DatFile > ( ) ;
// TODO: Can this be made into a loop?
fieldDats [ Field . DatItem_Status ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_Status ] . Header . FileName + = " (Nodump)" ;
fieldDats [ Field . DatItem_Status ] . Header . Name + = " (Nodump)" ;
fieldDats [ Field . DatItem_Status ] . Header . Description + = " (Nodump)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA512 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_SHA512 ] . Header . FileName + = " (SHA-512)" ;
fieldDats [ Field . DatItem_SHA512 ] . Header . Name + = " (SHA-512)" ;
fieldDats [ Field . DatItem_SHA512 ] . Header . Description + = " (SHA-512)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA384 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_SHA384 ] . Header . FileName + = " (SHA-384)" ;
fieldDats [ Field . DatItem_SHA384 ] . Header . Name + = " (SHA-384)" ;
fieldDats [ Field . DatItem_SHA384 ] . Header . Description + = " (SHA-384)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA256 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_SHA256 ] . Header . FileName + = " (SHA-256)" ;
fieldDats [ Field . DatItem_SHA256 ] . Header . Name + = " (SHA-256)" ;
fieldDats [ Field . DatItem_SHA256 ] . Header . Description + = " (SHA-256)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA1 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_SHA1 ] . Header . FileName + = " (SHA-1)" ;
fieldDats [ Field . DatItem_SHA1 ] . Header . Name + = " (SHA-1)" ;
fieldDats [ Field . DatItem_SHA1 ] . Header . Description + = " (SHA-1)" ;
2020-07-15 09:41:59 -07:00
#if NET_FRAMEWORK
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_RIPEMD160 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_RIPEMD160 ] . Header . FileName + = " (RIPEMD160)" ;
fieldDats [ Field . DatItem_RIPEMD160 ] . Header . Name + = " (RIPEMD160)" ;
fieldDats [ Field . DatItem_RIPEMD160 ] . Header . Description + = " (RIPEMD160)" ;
2020-07-15 09:41:59 -07:00
#endif
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_MD5 ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_MD5 ] . Header . FileName + = " (MD5)" ;
fieldDats [ Field . DatItem_MD5 ] . Header . Name + = " (MD5)" ;
fieldDats [ Field . DatItem_MD5 ] . Header . Description + = " (MD5)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_CRC ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . DatItem_CRC ] . Header . FileName + = " (CRC)" ;
fieldDats [ Field . DatItem_CRC ] . Header . Name + = " (CRC)" ;
fieldDats [ Field . DatItem_CRC ] . Header . Description + = " (CRC)" ;
2020-07-15 09:41:59 -07:00
2020-08-28 22:59:38 -07:00
fieldDats [ Field . NULL ] = Create ( Header . CloneStandard ( ) ) ;
fieldDats [ Field . NULL ] . Header . FileName + = " (Other)" ;
fieldDats [ Field . NULL ] . Header . Name + = " (Other)" ;
fieldDats [ Field . NULL ] . Header . Description + = " (Other)" ;
2019-01-08 11:49:31 -08:00
// Now populate each of the DAT objects in turn
2020-07-26 22:34:45 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-07-26 22:34:45 -07:00
List < DatItem > items = Items [ key ] ;
2019-01-08 11:49:31 -08:00
foreach ( DatItem item in items )
{
2020-08-27 16:57:22 -07:00
// If the file is not a Disk, Media, or Rom, continue
if ( item . ItemType ! = ItemType . Disk & & item . ItemType ! = ItemType . Media & & item . ItemType ! = ItemType . Rom )
2019-01-08 11:49:31 -08:00
return ;
// If the file is a nodump
2020-08-27 16:57:22 -07:00
if ( ( item . ItemType = = ItemType . Rom & & ( item as Rom ) . ItemStatus = = ItemStatus . Nodump )
| | ( item . ItemType = = ItemType . Disk & & ( item as Disk ) . ItemStatus = = ItemStatus . Nodump ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_Status ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
2019-01-08 11:49:31 -08:00
// If the file has a SHA-512
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . SHA512 ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA512 ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
2019-01-08 11:49:31 -08:00
// If the file has a SHA-384
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . SHA384 ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA384 ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
2019-01-08 11:49:31 -08:00
// If the file has a SHA-256
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Media & & ! string . IsNullOrWhiteSpace ( ( item as Media ) . SHA256 ) )
| | ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . SHA256 ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA256 ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
2019-01-08 11:49:31 -08:00
// If the file has a SHA-1
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Disk & & ! string . IsNullOrWhiteSpace ( ( item as Disk ) . SHA1 ) )
| | ( item . ItemType = = ItemType . Media & & ! string . IsNullOrWhiteSpace ( ( item as Media ) . SHA1 ) )
| | ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . SHA1 ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_SHA1 ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 20:56:50 -07:00
2020-07-15 09:41:59 -07:00
#if NET_FRAMEWORK
// If the file has a RIPEMD160
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . RIPEMD160 ) ) )
2020-06-05 22:26:44 -07:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_RIPEMD160 ] . Items . Add ( key , item ) ;
2020-06-05 22:26:44 -07:00
}
2020-07-15 09:41:59 -07:00
#endif
2020-08-27 20:56:50 -07:00
2020-06-05 22:26:44 -07:00
// If the file has an MD5
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Disk & & ! string . IsNullOrWhiteSpace ( ( item as Disk ) . MD5 ) )
2020-08-27 20:56:50 -07:00
| | ( item . ItemType = = ItemType . Media & & ! string . IsNullOrWhiteSpace ( ( item as Media ) . MD5 ) )
2020-08-27 16:57:22 -07:00
| | ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . MD5 ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_MD5 ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
2020-06-05 22:26:44 -07:00
// If the file has a CRC
2020-08-27 16:57:22 -07:00
else if ( ( item . ItemType = = ItemType . Rom & & ! string . IsNullOrWhiteSpace ( ( item as Rom ) . CRC ) ) )
2019-01-08 11:49:31 -08:00
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . DatItem_CRC ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
2020-08-27 16:57:22 -07:00
2019-01-08 11:49:31 -08:00
else
{
2020-08-28 22:59:38 -07:00
fieldDats [ Field . NULL ] . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
}
} ) ;
2020-08-28 22:59:38 -07:00
return fieldDats ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Split a SuperDAT by lowest available directory level
/// </summary>
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
2020-07-27 11:21:32 -07:00
public bool SplitByLevel ( string outDir , bool shortname , bool basedat )
2019-01-08 11:49:31 -08:00
{
2020-07-15 10:47:13 -07:00
// First, bucket by games so that we can do the right thing
2020-08-24 22:25:47 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . None , lower : false , norename : true ) ;
2019-01-08 11:49:31 -08:00
// Create a temporary DAT to add things to
2020-07-27 10:26:08 -07:00
DatFile tempDat = Create ( Header ) ;
tempDat . Header . Name = null ;
2019-01-08 11:49:31 -08:00
// Sort the input keys
2020-07-26 22:34:45 -07:00
List < string > keys = Items . Keys . ToList ( ) ;
2019-01-08 11:49:31 -08:00
keys . Sort ( SplitByLevelSort ) ;
// Then, we loop over the games
Parallel . ForEach ( keys , Globals . ParallelOptions , key = >
{
// Here, the key is the name of the game to be used for comparison
2020-07-27 10:26:08 -07:00
if ( tempDat . Header . Name ! = null & & tempDat . Header . Name ! = Path . GetDirectoryName ( key ) )
2019-01-08 11:49:31 -08:00
{
// Reset the DAT for the next items
2020-07-27 10:26:08 -07:00
tempDat = Create ( Header ) ;
tempDat . Header . Name = null ;
2019-01-08 11:49:31 -08:00
}
// Clean the input list and set all games to be pathless
2020-07-26 22:34:45 -07:00
List < DatItem > items = Items [ key ] ;
2020-08-20 13:17:14 -07:00
items . ForEach ( item = > item . Machine . Name = Path . GetFileName ( item . Machine . Name ) ) ;
items . ForEach ( item = > item . Machine . Description = Path . GetFileName ( item . Machine . Description ) ) ;
2019-01-08 11:49:31 -08:00
// Now add the game to the output DAT
2020-07-26 22:34:45 -07:00
tempDat . Items . AddRange ( key , items ) ;
2019-01-08 11:49:31 -08:00
// Then set the DAT name to be the parent directory name
2020-07-27 10:26:08 -07:00
tempDat . Header . Name = Path . GetDirectoryName ( key ) ;
2019-01-08 11:49:31 -08:00
} ) ;
return true ;
}
/// <summary>
/// Helper function for SplitByLevel to sort the input game names
/// </summary>
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private int SplitByLevelSort ( string a , string b )
{
NaturalComparer nc = new NaturalComparer ( ) ;
int adeep = a . Count ( c = > c = = '/' | | c = = '\\' ) ;
int bdeep = b . Count ( c = > c = = '/' | | c = = '\\' ) ;
if ( adeep = = bdeep )
return nc . Compare ( a , b ) ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
return adeep - bdeep ;
}
/// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT
/// </summary>
2020-07-27 11:21:32 -07:00
/// <param name="newDatFile">DAT to clean and write out</param>
2019-01-08 11:49:31 -08:00
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
2020-07-27 11:21:32 -07:00
private void SplitByLevelHelper ( DatFile newDatFile , string outDir , bool shortname , bool restore )
2019-01-08 11:49:31 -08:00
{
// Get the name from the DAT to use separately
2020-07-27 11:21:32 -07:00
string name = newDatFile . Header . Name ;
2019-01-08 11:49:31 -08:00
string expName = name . Replace ( "/" , " - " ) . Replace ( "\\" , " - " ) ;
// Now set the new output values
2020-07-27 11:21:32 -07:00
newDatFile . Header . FileName = WebUtility . HtmlDecode ( string . IsNullOrWhiteSpace ( name )
2020-07-27 10:26:08 -07:00
? Header . FileName
2019-01-08 11:49:31 -08:00
: ( shortname
? Path . GetFileName ( name )
: expName
)
) ;
2020-07-27 11:21:32 -07:00
newDatFile . Header . FileName = ( restore ? $"{Header.FileName} ({newDatFile.Header.FileName})" : newDatFile . Header . FileName ) ;
newDatFile . Header . Name = $"{Header.Name} ({expName})" ;
newDatFile . Header . Description = ( string . IsNullOrWhiteSpace ( Header . Description ) ? newDatFile . Header . Name : $"{Header.Description} ({expName})" ) ;
newDatFile . Header . Type = null ;
2019-01-08 11:49:31 -08:00
// Write out the temporary DAT to the proper directory
2020-07-27 11:21:32 -07:00
newDatFile . Write ( outDir ) ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Split a DAT by size of Rom
/// </summary>
/// <param name="radix">Long value representing the split point</param>
2020-08-28 10:32:17 -07:00
/// <returns>Less Than and Greater Than DatFiles</returns>
public ( DatFile lessThan , DatFile greaterThan ) SplitBySize ( long radix )
2019-01-08 11:49:31 -08:00
{
// Create each of the respective output DATs
Globals . Logger . User ( "Creating and populating new DATs" ) ;
2020-07-15 09:41:59 -07:00
2020-08-28 10:32:17 -07:00
DatFile lessThan = Create ( Header . CloneStandard ( ) ) ;
lessThan . Header . FileName + = $" (less than {radix})" ;
lessThan . Header . Name + = $" (less than {radix})" ;
lessThan . Header . Description + = $" (less than {radix})" ;
2020-07-15 09:41:59 -07:00
2020-08-28 10:32:17 -07:00
DatFile greaterThan = Create ( Header . CloneStandard ( ) ) ;
greaterThan . Header . FileName + = $" (equal-greater than {radix})" ;
greaterThan . Header . Name + = $" (equal-greater than {radix})" ;
greaterThan . Header . Description + = $" (equal-greater than {radix})" ;
2019-01-08 11:49:31 -08:00
// Now populate each of the DAT objects in turn
2020-07-26 22:34:45 -07:00
Parallel . ForEach ( Items . Keys , Globals . ParallelOptions , key = >
2019-01-08 11:49:31 -08:00
{
2020-07-26 22:34:45 -07:00
List < DatItem > items = Items [ key ] ;
2019-01-08 11:49:31 -08:00
foreach ( DatItem item in items )
{
// If the file is not a Rom, it automatically goes in the "lesser" dat
2019-01-08 12:11:55 -08:00
if ( item . ItemType ! = ItemType . Rom )
2020-08-28 10:32:17 -07:00
lessThan . Items . Add ( key , item ) ;
2020-06-10 22:37:19 -07:00
2020-09-04 23:03:27 -07:00
// If the file is a Rom and has no size, put it in the "lesser" dat
else if ( item . ItemType = = ItemType . Rom & & ( item as Rom ) . Size = = null )
lessThan . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
// If the file is a Rom and less than the radix, put it in the "lesser" dat
2020-08-27 16:57:22 -07:00
else if ( item . ItemType = = ItemType . Rom & & ( item as Rom ) . Size < radix )
2020-08-28 10:32:17 -07:00
lessThan . Items . Add ( key , item ) ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
2020-08-27 16:57:22 -07:00
else if ( item . ItemType = = ItemType . Rom & & ( item as Rom ) . Size > = radix )
2020-08-28 10:32:17 -07:00
greaterThan . Items . Add ( key , item ) ;
2019-01-08 11:49:31 -08:00
}
} ) ;
2020-08-28 10:32:17 -07:00
// Then return both DatFiles
return ( lessThan , greaterThan ) ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
/// Split a DAT by type of DatItem
/// </summary>
2020-08-28 10:32:17 -07:00
/// <returns>Dictionary of ItemType to DatFile mappings</returns>
public Dictionary < ItemType , DatFile > SplitByType ( )
2019-01-08 11:49:31 -08:00
{
// Create each of the respective output DATs
Globals . Logger . User ( "Creating and populating new DATs" ) ;
2020-07-15 09:41:59 -07:00
2020-08-28 10:32:17 -07:00
// Create the set of type-to-dat mappings
Dictionary < ItemType , DatFile > typeDats = new Dictionary < ItemType , DatFile > ( ) ;
2020-07-15 09:41:59 -07:00
2020-08-28 10:32:17 -07:00
// We only care about a subset of types
List < ItemType > outputTypes = new List < ItemType >
{
ItemType . Disk ,
ItemType . Media ,
ItemType . Rom ,
ItemType . Sample ,
} ;
2020-08-27 16:57:22 -07:00
2020-08-28 10:32:17 -07:00
// Setup all of the DatFiles
foreach ( ItemType itemType in outputTypes )
{
typeDats [ itemType ] = Create ( Header . CloneStandard ( ) ) ;
typeDats [ itemType ] . Header . FileName + = $" ({itemType})" ;
typeDats [ itemType ] . Header . Name + = $" ({itemType})" ;
typeDats [ itemType ] . Header . Description + = $" ({itemType})" ;
}
2019-01-08 11:49:31 -08:00
// Now populate each of the DAT objects in turn
2020-08-28 10:32:17 -07:00
Parallel . ForEach ( outputTypes , Globals . ParallelOptions , itemType = >
2019-01-08 11:49:31 -08:00
{
2020-08-28 10:32:17 -07:00
FillWithItemType ( typeDats [ itemType ] , itemType ) ;
2019-01-08 11:49:31 -08:00
} ) ;
2020-08-28 10:32:17 -07:00
return typeDats ;
2019-01-08 11:49:31 -08:00
}
#endregion
#region Writing
/// <summary>
/// Create and open an output file for writing direct from a dictionary
/// </summary>
2020-07-31 14:04:10 -07:00
/// <param name="outDir">Set the output directory (current directory on null)</param>
2019-01-08 11:49:31 -08:00
/// <param name="overwrite">True if files should be overwritten (default), false if they should be renamed instead</param>
2020-09-18 10:45:40 -07:00
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
2020-09-20 21:12:57 -07:00
/// <param name="quotes">True if quotes are assumed in supported types (default), false otherwise</param>
2020-09-15 14:23:40 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2019-01-08 11:49:31 -08:00
/// <returns>True if the DAT was written correctly, false otherwise</returns>
2020-09-20 21:12:57 -07:00
public bool Write (
string outDir ,
bool overwrite = true ,
bool ignoreblanks = false ,
bool quotes = true ,
bool throwOnError = false )
2019-01-08 11:49:31 -08:00
{
2020-08-30 22:40:31 -07:00
// If we have nothing writable, abort
if ( ! HasWritable ( ) )
2020-08-29 23:09:25 -07:00
{
Globals . Logger . User ( "There were no items to write out!" ) ;
return false ;
}
2019-01-08 11:49:31 -08:00
// Ensure the output directory is set and created
2020-07-15 09:41:59 -07:00
outDir = DirectoryExtensions . Ensure ( outDir , create : true ) ;
2019-01-08 11:49:31 -08:00
// If the DAT has no output format, default to XML
2020-07-27 10:26:08 -07:00
if ( Header . DatFormat = = 0 )
2019-01-08 11:49:31 -08:00
{
Globals . Logger . Verbose ( "No DAT format defined, defaulting to XML" ) ;
2020-07-27 10:26:08 -07:00
Header . DatFormat = DatFormat . Logiqx ;
2019-01-08 11:49:31 -08:00
}
// Make sure that the three essential fields are filled in
2020-08-30 22:40:31 -07:00
EnsureHeaderFields ( ) ;
2019-01-08 11:49:31 -08:00
// Bucket roms by game name, if not already
2020-09-18 10:44:01 -07:00
Items . BucketBy ( Field . Machine_Name , DedupeType . None ) ;
2019-01-08 11:49:31 -08:00
// Output the number of items we're going to be writing
2020-08-30 17:02:07 -07:00
Globals . Logger . User ( $"A total of {Items.TotalCount - Items.RemovedCount} items will be written out to '{Header.FileName}'" ) ;
2019-01-08 11:49:31 -08:00
// Get the outfile names
2020-07-27 10:26:08 -07:00
Dictionary < DatFormat , string > outfiles = Header . CreateOutFileNames ( outDir , overwrite ) ;
2019-01-08 11:49:31 -08:00
try
{
// Write out all required formats
Parallel . ForEach ( outfiles . Keys , Globals . ParallelOptions , datFormat = >
{
string outfile = outfiles [ datFormat ] ;
try
{
2020-09-20 21:12:57 -07:00
Create ( datFormat , this , quotes ) ? . WriteToFile ( outfile , ignoreblanks , throwOnError ) ;
2019-01-08 11:49:31 -08:00
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex , $"Datfile {outfile} could not be written out" ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2019-01-08 11:49:31 -08:00
}
2019-09-20 10:30:30 -07:00
2019-01-08 11:49:31 -08:00
} ) ;
}
catch ( Exception ex )
{
2020-09-15 14:38:37 -07:00
Globals . Logger . Error ( ex ) ;
2020-09-15 14:46:39 -07:00
if ( throwOnError ) throw ex ;
2019-01-08 11:49:31 -08:00
return false ;
}
return true ;
}
2020-09-18 10:42:06 -07:00
/// <summary>
/// Write the stats out to console for the current DatFile
/// </summary>
public void WriteStatsToConsole ( )
{
if ( Items . RomCount + Items . DiskCount = = 0 )
Items . RecalculateStats ( ) ;
Items . BucketBy ( Field . Machine_Name , DedupeType . None , norename : true ) ;
var consoleOutput = BaseReport . Create ( StatReportFormat . None , null , true , true ) ;
consoleOutput . ReplaceStatistics ( Header . FileName , Items . Keys . Count ( ) , Items ) ;
}
2019-01-08 11:49:31 -08:00
/// <summary>
/// Create and open an output file for writing direct from a dictionary
/// </summary>
/// <param name="outfile">Name of the file to write to</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
2020-09-15 14:23:40 -07:00
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
2019-01-08 11:49:31 -08:00
/// <returns>True if the DAT was written correctly, false otherwise</returns>
2020-09-15 14:23:40 -07:00
public abstract bool WriteToFile ( string outfile , bool ignoreblanks = false , bool throwOnError = false ) ;
2019-01-08 11:49:31 -08:00
2020-08-28 15:06:07 -07:00
/// <summary>
/// Create a prefix or postfix from inputs
/// </summary>
/// <param name="item">DatItem to create a prefix/postfix for</param>
/// <param name="prefix">True for prefix, false for postfix</param>
/// <returns>Sanitized string representing the postfix or prefix</returns>
protected string CreatePrefixPostfix ( DatItem item , bool prefix )
{
// Initialize strings
string fix = string . Empty ,
game = item . Machine . Name ,
2020-09-02 12:19:12 -07:00
name = item . GetName ( ) ? ? item . ItemType . ToString ( ) ,
2020-08-28 15:06:07 -07:00
crc = string . Empty ,
md5 = string . Empty ,
ripemd160 = string . Empty ,
sha1 = string . Empty ,
sha256 = string . Empty ,
sha384 = string . Empty ,
sha512 = string . Empty ,
2020-09-04 15:02:15 -07:00
size = string . Empty ,
spamsum = string . Empty ;
2020-08-28 15:06:07 -07:00
// If we have a prefix
if ( prefix )
fix = Header . Prefix + ( Header . Quotes ? "\"" : string . Empty ) ;
// If we have a postfix
else
fix = ( Header . Quotes ? "\"" : string . Empty ) + Header . Postfix ;
// Ensure we have the proper values for replacement
if ( item . ItemType = = ItemType . Disk )
{
md5 = ( item as Disk ) . MD5 ? ? string . Empty ;
sha1 = ( item as Disk ) . SHA1 ? ? string . Empty ;
}
else if ( item . ItemType = = ItemType . Media )
{
md5 = ( item as Media ) . MD5 ? ? string . Empty ;
sha1 = ( item as Media ) . SHA1 ? ? string . Empty ;
sha256 = ( item as Media ) . SHA256 ? ? string . Empty ;
2020-09-04 15:02:15 -07:00
spamsum = ( item as Media ) . SpamSum ? ? string . Empty ;
2020-08-28 15:06:07 -07:00
}
else if ( item . ItemType = = ItemType . Rom )
{
crc = ( item as Rom ) . CRC ? ? string . Empty ;
md5 = ( item as Rom ) . MD5 ? ? string . Empty ;
#if NET_FRAMEWORK
ripemd160 = ( item as Rom ) . RIPEMD160 ? ? string . Empty ;
#endif
sha1 = ( item as Rom ) . SHA1 ? ? string . Empty ;
sha256 = ( item as Rom ) . SHA256 ? ? string . Empty ;
sha384 = ( item as Rom ) . SHA384 ? ? string . Empty ;
sha512 = ( item as Rom ) . SHA512 ? ? string . Empty ;
2020-09-04 23:03:27 -07:00
size = ( item as Rom ) . Size ? . ToString ( ) ? ? string . Empty ;
2020-09-04 15:02:15 -07:00
spamsum = ( item as Rom ) . SpamSum ? ? string . Empty ;
2020-08-28 15:06:07 -07:00
}
// Now do bulk replacement where possible
fix = fix
. Replace ( "%game%" , game )
. Replace ( "%machine%" , game )
. Replace ( "%name%" , name )
. Replace ( "%manufacturer%" , item . Machine . Manufacturer ? ? string . Empty )
. Replace ( "%publisher%" , item . Machine . Publisher ? ? string . Empty )
. Replace ( "%category%" , item . Machine . Category ? ? string . Empty )
. Replace ( "%crc%" , crc )
. Replace ( "%md5%" , md5 )
. Replace ( "%ripemd160%" , ripemd160 )
. Replace ( "%sha1%" , sha1 )
. Replace ( "%sha256%" , sha256 )
. Replace ( "%sha384%" , sha384 )
. Replace ( "%sha512%" , sha512 )
2020-09-04 15:02:15 -07:00
. Replace ( "%size%" , size )
. Replace ( "%spamsum%" , spamsum ) ;
2020-08-28 15:06:07 -07:00
// TODO: Add GameName logic here too?
2020-09-03 21:59:53 -07:00
// TODO: Figure out what I meant by the above ^
2020-08-28 15:06:07 -07:00
return fix ;
}
2019-01-08 11:49:31 -08:00
/// <summary>
/// Process an item and correctly set the item name
/// </summary>
/// <param name="item">DatItem to update</param>
/// <param name="forceRemoveQuotes">True if the Quotes flag should be ignored, false otherwise</param>
/// <param name="forceRomName">True if the UseRomName should be always on (default), false otherwise</param>
protected void ProcessItemName ( DatItem item , bool forceRemoveQuotes , bool forceRomName = true )
{
2020-09-02 12:19:12 -07:00
string name = item . GetName ( ) ? ? string . Empty ;
2019-01-08 11:49:31 -08:00
// Backup relevant values and set new ones accordingly
2020-07-27 10:26:08 -07:00
bool quotesBackup = Header . Quotes ;
bool useRomNameBackup = Header . UseRomName ;
2019-01-08 11:49:31 -08:00
if ( forceRemoveQuotes )
2020-07-27 10:26:08 -07:00
Header . Quotes = false ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
if ( forceRomName )
2020-07-27 10:26:08 -07:00
Header . UseRomName = true ;
2019-01-08 11:49:31 -08:00
// Create the proper Prefix and Postfix
string pre = CreatePrefixPostfix ( item , true ) ;
string post = CreatePrefixPostfix ( item , false ) ;
2020-08-20 11:23:48 -07:00
// If we're in Depot mode, take care of that instead
2020-08-28 01:13:55 -07:00
if ( Header . OutputDepot ? . IsActive = = true )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
if ( item . ItemType = = ItemType . Disk )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
Disk disk = item as Disk ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
// We can only write out if there's a SHA-1
2020-08-27 16:57:22 -07:00
if ( ! string . IsNullOrWhiteSpace ( disk . SHA1 ) )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
name = PathExtensions . GetDepotPath ( disk . SHA1 , Header . OutputDepot . Depth ) . Replace ( '\\' , '/' ) ;
2020-09-02 12:19:12 -07:00
item . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = $"{pre}{name}{post}" } ) ;
2019-01-08 11:49:31 -08:00
}
}
2020-08-27 16:57:22 -07:00
else if ( item . ItemType = = ItemType . Media )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
Media media = item as Media ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
// We can only write out if there's a SHA-1
2020-08-27 16:57:22 -07:00
if ( ! string . IsNullOrWhiteSpace ( media . SHA1 ) )
2019-01-08 11:49:31 -08:00
{
2020-08-27 16:57:22 -07:00
name = PathExtensions . GetDepotPath ( media . SHA1 , Header . OutputDepot . Depth ) . Replace ( '\\' , '/' ) ;
2020-09-02 12:19:12 -07:00
item . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = $"{pre}{name}{post}" } ) ;
2019-01-08 11:49:31 -08:00
}
}
2020-08-27 16:57:22 -07:00
else if ( item . ItemType = = ItemType . Rom )
{
Rom rom = item as Rom ;
// We can only write out if there's a SHA-1
if ( ! string . IsNullOrWhiteSpace ( rom . SHA1 ) )
{
name = PathExtensions . GetDepotPath ( rom . SHA1 , Header . OutputDepot . Depth ) . Replace ( '\\' , '/' ) ;
2020-09-02 12:19:12 -07:00
item . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = $"{pre}{name}{post}" } ) ;
2020-08-27 16:57:22 -07:00
}
}
2019-01-08 11:49:31 -08:00
return ;
}
2020-07-27 10:26:08 -07:00
if ( ! string . IsNullOrWhiteSpace ( Header . ReplaceExtension ) | | Header . RemoveExtension )
2019-01-08 11:49:31 -08:00
{
2020-07-27 10:26:08 -07:00
if ( Header . RemoveExtension )
Header . ReplaceExtension = string . Empty ;
2019-01-08 11:49:31 -08:00
string dir = Path . GetDirectoryName ( name ) ;
2020-06-10 22:37:19 -07:00
dir = dir . TrimStart ( Path . DirectorySeparatorChar ) ;
2020-07-27 10:26:08 -07:00
name = Path . Combine ( dir , Path . GetFileNameWithoutExtension ( name ) + Header . ReplaceExtension ) ;
2019-01-08 11:49:31 -08:00
}
2020-06-10 22:37:19 -07:00
2020-07-27 10:26:08 -07:00
if ( ! string . IsNullOrWhiteSpace ( Header . AddExtension ) )
name + = Header . AddExtension ;
2019-01-08 11:49:31 -08:00
2020-07-27 10:26:08 -07:00
if ( Header . UseRomName & & Header . GameName )
2020-08-20 13:17:14 -07:00
name = Path . Combine ( item . Machine . Name , name ) ;
2019-01-08 11:49:31 -08:00
// Now assign back the item name
2020-09-02 12:19:12 -07:00
item . SetFields ( new Dictionary < Field , string > { [ Field . DatItem_Name ] = pre + name + post } ) ;
2019-01-08 11:49:31 -08:00
// Restore all relevant values
if ( forceRemoveQuotes )
2020-07-27 10:26:08 -07:00
Header . Quotes = quotesBackup ;
2020-06-10 22:37:19 -07:00
2019-01-08 11:49:31 -08:00
if ( forceRomName )
2020-07-27 10:26:08 -07:00
Header . UseRomName = useRomNameBackup ;
2019-01-08 11:49:31 -08:00
}
/// <summary>
2020-08-28 15:06:07 -07:00
/// Process any DatItems that are "null", usually created from directory population
2019-01-08 11:49:31 -08:00
/// </summary>
2020-08-28 15:06:07 -07:00
/// <param name="datItem">DatItem to check for "null" status</param>
/// <returns>Cleaned DatItem</returns>
protected DatItem ProcessNullifiedItem ( DatItem datItem )
2019-01-08 11:49:31 -08:00
{
2020-08-28 15:06:07 -07:00
// If we don't have a Rom, we can ignore it
if ( datItem . ItemType ! = ItemType . Rom )
return datItem ;
2019-01-08 11:49:31 -08:00
2020-08-28 15:06:07 -07:00
// Cast for easier parsing
Rom rom = datItem as Rom ;
2019-01-08 11:49:31 -08:00
2020-08-28 15:06:07 -07:00
// If the Rom has "null" characteristics, ensure all fields
2020-09-04 23:03:27 -07:00
if ( rom . Size = = null & & rom . CRC = = "null" )
2019-01-08 11:49:31 -08:00
{
2020-08-28 15:06:07 -07:00
Globals . Logger . Verbose ( $"Empty folder found: {datItem.Machine.Name}" ) ;
2020-09-02 12:19:12 -07:00
rom . Name = ( rom . Name = = "null" ? "-" : rom . Name ) ;
2020-08-28 15:06:07 -07:00
rom . Size = Constants . SizeZero ;
rom . CRC = rom . CRC = = "null" ? Constants . CRCZero : null ;
rom . MD5 = rom . MD5 = = "null" ? Constants . MD5Zero : null ;
2020-07-15 09:41:59 -07:00
#if NET_FRAMEWORK
2020-08-28 15:06:07 -07:00
rom . RIPEMD160 = rom . RIPEMD160 = = "null" ? Constants . RIPEMD160Zero : null ;
2020-07-15 09:41:59 -07:00
#endif
2020-08-28 15:06:07 -07:00
rom . SHA1 = rom . SHA1 = = "null" ? Constants . SHA1Zero : null ;
rom . SHA256 = rom . SHA256 = = "null" ? Constants . SHA256Zero : null ;
rom . SHA384 = rom . SHA384 = = "null" ? Constants . SHA384Zero : null ;
rom . SHA512 = rom . SHA512 = = "null" ? Constants . SHA512Zero : null ;
2020-09-17 23:37:42 -07:00
rom . SpamSum = rom . SpamSum = = "null" ? Constants . SpamSumZero : null ;
2019-01-08 11:49:31 -08:00
}
2020-08-28 15:06:07 -07:00
return rom ;
}
2019-01-08 11:49:31 -08:00
2020-09-18 17:12:31 -07:00
/// <summary>
/// Get supported types for write
/// </summary>
/// <returns>List of supported types for writing</returns>
protected virtual ItemType [ ] GetSupportedTypes ( )
{
return Enum . GetValues ( typeof ( ItemType ) ) as ItemType [ ] ;
}
2020-09-25 20:25:29 -07:00
/// <summary>
/// Get if a machine contains any writable items
/// </summary>
/// <param name="datItems">DatItems to check</param>
/// <returns>True if the machine contains at least one writable item, false otherwise</returns>
/// <remarks>Empty machines are kept with this</remarks>
protected bool ContainsWritable ( List < DatItem > datItems )
{
// Empty machines are considered writable
if ( datItems = = null | | datItems . Count = = 0 )
return true ;
foreach ( DatItem datItem in datItems )
{
if ( GetSupportedTypes ( ) . Contains ( datItem . ItemType ) )
return true ;
}
return false ;
}
2020-08-28 15:06:07 -07:00
/// <summary>
/// Get if an item should be ignored on write
/// </summary>
/// <param name="datItem">DatItem to check</param>
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise</param>
/// <returns>True if the item should be skipped on write, false otherwise</returns>
protected bool ShouldIgnore ( DatItem datItem , bool ignoreBlanks )
{
// If the item is supposed to be removed, we ignore
if ( datItem . Remove )
return true ;
2019-01-08 11:49:31 -08:00
2020-08-28 15:06:07 -07:00
// If we have the Blank dat item, we ignore
if ( datItem . ItemType = = ItemType . Blank )
return true ;
// If we're ignoring blanks and we have a Rom
if ( ignoreBlanks & & datItem . ItemType = = ItemType . Rom )
{
Rom rom = datItem as Rom ;
// If we have a 0-size or blank rom, then we ignore
2020-09-04 23:03:27 -07:00
if ( rom . Size = = 0 | | rom . Size = = null )
2020-08-28 15:06:07 -07:00
return true ;
}
2020-09-18 17:12:31 -07:00
// If we have an item type not in the list of supported values
if ( ! GetSupportedTypes ( ) . Contains ( datItem . ItemType ) )
return true ;
2020-08-28 15:06:07 -07:00
return false ;
2019-01-08 11:49:31 -08:00
}
2020-08-30 22:40:31 -07:00
/// <summary>
/// Ensure that FileName, Name, and Description are filled with some value
/// </summary>
private void EnsureHeaderFields ( )
{
// Empty FileName
if ( string . IsNullOrWhiteSpace ( Header . FileName ) )
{
if ( string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
Header . FileName = Header . Name = Header . Description = "Default" ;
else if ( string . IsNullOrWhiteSpace ( Header . Name ) & & ! string . IsNullOrWhiteSpace ( Header . Description ) )
Header . FileName = Header . Name = Header . Description ;
else if ( ! string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
Header . FileName = Header . Description = Header . Name ;
else if ( ! string . IsNullOrWhiteSpace ( Header . Name ) & & ! string . IsNullOrWhiteSpace ( Header . Description ) )
Header . FileName = Header . Description ;
}
// Filled FileName
else
{
if ( string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Name = Header . Description = Header . FileName ;
else if ( string . IsNullOrWhiteSpace ( Header . Name ) & & ! string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Name = Header . Description ;
else if ( ! string . IsNullOrWhiteSpace ( Header . Name ) & & string . IsNullOrWhiteSpace ( Header . Description ) )
Header . Description = Header . Name ;
}
}
/// <summary>
/// Get if the DatFile has any writable items
/// </summary>
/// <returns>True if there are any writable items, false otherwise</returns>
private bool HasWritable ( )
{
// Force a statistics recheck, just in case
Items . RecalculateStats ( ) ;
// If there's nothing there, abort
if ( Items . TotalCount = = 0 )
return false ;
// If every item is removed, abort
if ( Items . TotalCount = = Items . RemovedCount )
return false ;
return true ;
}
2019-01-08 11:49:31 -08:00
#endregion
}
2016-04-19 01:11:23 -07:00
}