2017-10-09 18:04:49 -07:00
|
|
|
|
using System;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
using System.Collections.Concurrent;
|
2017-10-09 18:04:49 -07:00
|
|
|
|
using System.Collections.Generic;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
using System.IO;
|
2017-09-25 12:21:52 -07:00
|
|
|
|
using System.Linq;
|
2019-03-29 00:15:40 -07:00
|
|
|
|
using System.Net;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
using System.Text.RegularExpressions;
|
2017-10-09 18:04:49 -07:00
|
|
|
|
using System.Threading.Tasks;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2017-05-04 02:41:11 -07:00
|
|
|
|
using SabreTools.Library.Data;
|
2017-11-02 15:44:15 -07:00
|
|
|
|
using SabreTools.Library.DatItems;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
using SabreTools.Library.FileTypes;
|
2020-07-30 22:51:33 -07:00
|
|
|
|
using SabreTools.Library.Filtering;
|
2020-08-01 23:04:11 -07:00
|
|
|
|
using SabreTools.Library.IO;
|
2017-11-07 13:56:15 -08:00
|
|
|
|
using SabreTools.Library.Reports;
|
2017-10-09 20:38:15 -07:00
|
|
|
|
using SabreTools.Library.Skippers;
|
2017-10-09 18:04:49 -07:00
|
|
|
|
using SabreTools.Library.Tools;
|
2017-10-09 20:38:15 -07:00
|
|
|
|
using NaturalSort;
|
2020-08-24 01:06:52 -07:00
|
|
|
|
using Newtonsoft.Json;
|
2016-10-26 22:10:47 -07:00
|
|
|
|
|
2017-10-06 20:46:43 -07:00
|
|
|
|
namespace SabreTools.Library.DatFiles
|
2016-04-19 01:11:23 -07:00
|
|
|
|
{
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Represents a format-agnostic DAT
|
|
|
|
|
|
/// </summary>
|
2020-08-24 01:06:52 -07:00
|
|
|
|
[JsonObject("datfile")]
|
2020-07-15 09:41:59 -07:00
|
|
|
|
public abstract class DatFile
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-31 14:04:10 -07:00
|
|
|
|
#region Fields
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-27 10:26:08 -07:00
|
|
|
|
/// Header values
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-08-24 01:06:52 -07:00
|
|
|
|
[JsonProperty("header")]
|
2020-07-27 10:26:08 -07:00
|
|
|
|
public DatHeader Header { get; set; } = new DatHeader();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-27 10:26:08 -07:00
|
|
|
|
/// DatItems and related statistics
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-08-24 01:06:52 -07:00
|
|
|
|
[JsonProperty("items")]
|
2020-07-27 10:26:08 -07:00
|
|
|
|
public ItemDictionary Items { get; set; } = new ItemDictionary();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#region Constructors
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Create a new DatFile from an existing one
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datFile">DatFile to get the values from</param>
|
|
|
|
|
|
public DatFile(DatFile datFile)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
if (datFile != null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header = datFile.Header;
|
2020-07-31 14:04:10 -07:00
|
|
|
|
Items = datFile.Items;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Create a specific type of DatFile to be used based on a format and a base DAT
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datFormat">Format of the DAT to be created</param>
|
|
|
|
|
|
/// <param name="baseDat">DatFile containing the information to use in specific operations</param>
|
|
|
|
|
|
/// <returns>DatFile of the specific internal type that corresponds to the inputs</returns>
|
|
|
|
|
|
public static DatFile Create(DatFormat? datFormat = null, DatFile baseDat = null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
switch (datFormat)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.AttractMode:
|
|
|
|
|
|
return new AttractMode(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.ClrMamePro:
|
|
|
|
|
|
return new ClrMamePro(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.CSV:
|
|
|
|
|
|
return new SeparatedValue(baseDat, ',');
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.DOSCenter:
|
|
|
|
|
|
return new DosCenter(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.EverdriveSMDB:
|
|
|
|
|
|
return new EverdriveSMDB(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.Json:
|
|
|
|
|
|
return new Json(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.Listrom:
|
|
|
|
|
|
return new Listrom(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.Listxml:
|
|
|
|
|
|
return new Listxml(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.Logiqx:
|
|
|
|
|
|
return new Logiqx(baseDat, false);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.LogiqxDeprecated:
|
|
|
|
|
|
return new Logiqx(baseDat, true);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.MissFile:
|
|
|
|
|
|
return new Missfile(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.OfflineList:
|
|
|
|
|
|
return new OfflineList(baseDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case DatFormat.OpenMSX:
|
|
|
|
|
|
return new OpenMSX(baseDat);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpMD5:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.MD5);
|
|
|
|
|
|
|
|
|
|
|
|
#if NET_FRAMEWORK
|
|
|
|
|
|
case DatFormat.RedumpRIPEMD160:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.RIPEMD160);
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpSFV:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.CRC);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpSHA1:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.SHA1);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpSHA256:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.SHA256);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpSHA384:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.SHA384);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RedumpSHA512:
|
|
|
|
|
|
return new Hashfile(baseDat, Hash.SHA512);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.RomCenter:
|
|
|
|
|
|
return new RomCenter(baseDat);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.SabreDat:
|
|
|
|
|
|
return new SabreDat(baseDat);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.SoftwareList:
|
|
|
|
|
|
return new SoftwareList(baseDat);
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.SSV:
|
|
|
|
|
|
return new SeparatedValue(baseDat, ';');
|
|
|
|
|
|
|
|
|
|
|
|
case DatFormat.TSV:
|
|
|
|
|
|
return new SeparatedValue(baseDat, '\t');
|
|
|
|
|
|
|
|
|
|
|
|
// We use new-style Logiqx as a backup for generic DatFile
|
|
|
|
|
|
case null:
|
|
|
|
|
|
default:
|
|
|
|
|
|
return new Logiqx(baseDat, false);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Create a new DatFile from an existing DatHeader
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datHeader">DatHeader to get the values from</param>
|
|
|
|
|
|
public static DatFile Create(DatHeader datHeader)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatFile datFile = Create(datHeader.DatFormat);
|
2020-07-27 10:26:08 -07:00
|
|
|
|
datFile.Header = (DatHeader)datHeader.Clone();
|
2020-07-15 09:41:59 -07:00
|
|
|
|
return datFile;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Add items from another DatFile to the existing DatFile
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datFile">DatFile to add from</param>
|
|
|
|
|
|
/// <param name="delete">If items should be deleted from the source DatFile</param>
|
|
|
|
|
|
public void AddFromExisting(DatFile datFile, bool delete = false)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Get the list of keys from the DAT
|
2020-08-21 23:24:32 -07:00
|
|
|
|
var keys = datFile.Items.Keys.ToList();
|
|
|
|
|
|
foreach (string key in keys)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Add everything from the key to the internal DAT
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.AddRange(key, datFile.Items[key]);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
// Now remove the key from the source DAT
|
|
|
|
|
|
if (delete)
|
2020-07-26 22:34:45 -07:00
|
|
|
|
datFile.Items.Remove(key);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
// Now remove the file dictionary from the source DAT
|
|
|
|
|
|
if (delete)
|
2020-07-26 22:34:45 -07:00
|
|
|
|
datFile.Items = null;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Apply a DatHeader to an existing DatFile
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="datHeader">DatHeader to get the values from</param>
|
|
|
|
|
|
public void ApplyDatHeader(DatHeader datHeader)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.ConditionalCopy(datHeader);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 20:56:50 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Fill the header values based on existing Header and path
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="path">Path used for creating a name, if necessary</param>
|
|
|
|
|
|
/// <param name="bare">True if the date should be omitted from name and description, false otherwise</param>
|
|
|
|
|
|
public void FillHeaderFromPath(string path, bool bare)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the description is defined but not the name, set the name from the description
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
{
|
|
|
|
|
|
Header.Name = Header.Description;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the name is defined but not the description, set the description from the name
|
|
|
|
|
|
else if (!string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
{
|
|
|
|
|
|
Header.Description = Header.Name + (bare ? string.Empty : $" ({Header.Date})");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If neither the name or description are defined, set them from the automatic values
|
|
|
|
|
|
else if (string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
{
|
|
|
|
|
|
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
|
|
|
|
|
|
Header.Name = splitpath.Last();
|
|
|
|
|
|
Header.Description = Header.Name + (bare ? string.Empty : $" ({Header.Date})");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-07-27 12:41:36 -07:00
|
|
|
|
#region Converting and Updating
|
|
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <summary>
|
2020-07-27 12:41:36 -07:00
|
|
|
|
/// Replace item values from the base set represented by the current DAT
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-08-27 21:40:08 -07:00
|
|
|
|
/// <param name="intDat">DatFile to replace the values in</param>
|
|
|
|
|
|
/// <param name="updateFields">List of Fields representing what should be updated</param>
|
2020-07-27 12:41:36 -07:00
|
|
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
2020-08-27 21:40:08 -07:00
|
|
|
|
public void BaseReplace(DatFile intDat, List<Field> updateFields, bool onlySame)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
Globals.Logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// If we are matching based on DatItem fields of any sort
|
|
|
|
|
|
if (updateFields.Intersect(DatItem.DatItemFields).Any())
|
2020-07-15 09:41:59 -07:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// For comparison's sake, we want to use CRC as the base bucketing
|
|
|
|
|
|
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
|
|
|
|
|
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Then we do a hashwise comparison against the base DAT
|
|
|
|
|
|
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
|
|
|
|
List<DatItem> newDatItems = new List<DatItem>();
|
|
|
|
|
|
foreach (DatItem datItem in datItems)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
List<DatItem> dupes = Items.GetDuplicates(datItem, sorted: true);
|
|
|
|
|
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
2020-06-06 13:53:31 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Replace fields from the first duplicate, if we have one
|
|
|
|
|
|
if (dupes.Count > 0)
|
|
|
|
|
|
newDatItem.ReplaceFields(dupes.First(), updateFields);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
newDatItems.Add(newDatItem);
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
|
intDat.Items.AddRange(key, newDatItems);
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// If we are matching based on Machine fields of any sort
|
|
|
|
|
|
if (updateFields.Intersect(DatItem.MachineFields).Any())
|
|
|
|
|
|
{
|
|
|
|
|
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
|
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
|
|
|
|
|
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Then we do a namewise comparison against the base DAT
|
|
|
|
|
|
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
|
|
|
|
List<DatItem> newDatItems = new List<DatItem>();
|
|
|
|
|
|
foreach (DatItem datItem in datItems)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
|
|
|
|
|
if (Items.ContainsKey(key) && Items[key].Count() > 0)
|
|
|
|
|
|
newDatItem.Machine.ReplaceFields(Items[key][0].Machine, updateFields, onlySame);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
newDatItems.Add(newDatItem);
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
|
intDat.Items.AddRange(key, newDatItems);
|
|
|
|
|
|
});
|
2020-07-15 09:41:59 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Output diffs against a base set represented by the current DAT
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-08-27 21:40:08 -07:00
|
|
|
|
/// <param name="intDat">DatFile to replace the values in</param>
|
2020-08-01 15:03:00 -07:00
|
|
|
|
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
2020-08-27 21:40:08 -07:00
|
|
|
|
public void DiffAgainst(DatFile intDat, bool useGames)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-01 15:03:00 -07:00
|
|
|
|
// For comparison's sake, we want to use a base ordering
|
|
|
|
|
|
if (useGames)
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
2020-08-01 15:03:00 -07:00
|
|
|
|
else
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
Globals.Logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// For comparison's sake, we want to a the base bucketing
|
|
|
|
|
|
if (useGames)
|
|
|
|
|
|
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
|
|
|
|
|
else
|
|
|
|
|
|
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Then we compare against the base DAT
|
|
|
|
|
|
List<string> keys = intDat.Items.Keys.ToList();
|
|
|
|
|
|
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
// Game Against uses game names
|
2020-08-01 15:03:00 -07:00
|
|
|
|
if (useGames)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// If the base DAT doesn't contain the key, keep it
|
|
|
|
|
|
if (!Items.ContainsKey(key))
|
|
|
|
|
|
return;
|
2020-08-01 15:03:00 -07:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// If the number of items is different, then keep it
|
|
|
|
|
|
if (Items[key].Count != intDat.Items[key].Count)
|
|
|
|
|
|
return;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Otherwise, compare by name and hash the remaining files
|
|
|
|
|
|
bool exactMatch = true;
|
|
|
|
|
|
foreach (DatItem item in intDat.Items[key])
|
2020-08-01 15:03:00 -07:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// TODO: Make this granular to name as well
|
|
|
|
|
|
if (!Items[key].Contains(item))
|
2020-08-01 15:03:00 -07:00
|
|
|
|
{
|
2020-08-27 21:40:08 -07:00
|
|
|
|
exactMatch = false;
|
|
|
|
|
|
break;
|
2020-08-01 15:03:00 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// If we have an exact match, remove the game
|
|
|
|
|
|
if (exactMatch)
|
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Standard Against uses hashes
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
|
|
|
|
List<DatItem> keepDatItems = new List<DatItem>();
|
|
|
|
|
|
foreach (DatItem datItem in datItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!Items.HasDuplicates(datItem, true))
|
|
|
|
|
|
keepDatItems.Add(datItem);
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
// Now add the new list to the key
|
|
|
|
|
|
intDat.Items.Remove(key);
|
|
|
|
|
|
intDat.Items.AddRange(key, keepDatItems);
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Output cascading diffs
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-08-01 21:42:28 -07:00
|
|
|
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
2020-08-27 22:19:31 -07:00
|
|
|
|
/// <returns>List of DatFiles representing the individually indexed items</returns>
|
|
|
|
|
|
public List<DatFile> DiffCascade(List<DatHeader> datHeaders)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Create a list of DatData objects representing output files
|
|
|
|
|
|
List<DatFile> outDats = new List<DatFile>();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 22:19:31 -07:00
|
|
|
|
// Ensure the current DatFile is sorted optimally
|
|
|
|
|
|
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
2020-08-27 22:19:31 -07:00
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 22:19:31 -07:00
|
|
|
|
// Create the DatFiles from the set of headers
|
|
|
|
|
|
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
|
|
|
|
|
|
Parallel.For(0, datHeaders.Count, Globals.ParallelOptions, j =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 22:19:31 -07:00
|
|
|
|
DatFile diffData = Create(datHeaders[j]);
|
2020-07-26 22:34:45 -07:00
|
|
|
|
diffData.Items = new ItemDictionary();
|
2020-08-27 22:19:31 -07:00
|
|
|
|
FillWithSourceIndex(diffData, j);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
outDatsArray[j] = diffData;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
outDats = outDatsArray.ToList();
|
|
|
|
|
|
watch.Stop();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 22:19:31 -07:00
|
|
|
|
return outDats;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-08-01 21:42:28 -07:00
|
|
|
|
/// Output duplicate item diff
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public DatFile DiffDuplicates(List<string> inputs)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return DiffDuplicates(paths);
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Output duplicate item diff
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public DatFile DiffDuplicates(List<ParentablePath> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Fill in any information not in the base DAT
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
|
|
|
|
|
Header.FileName = "All DATs";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
|
|
|
|
|
Header.Name = "All DATs";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
Header.Description = "All DATs";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
string post = " (Duplicates)";
|
|
|
|
|
|
DatFile dupeData = Create(Header);
|
|
|
|
|
|
dupeData.Header.FileName += post;
|
|
|
|
|
|
dupeData.Header.Name += post;
|
|
|
|
|
|
dupeData.Header.Description += post;
|
|
|
|
|
|
dupeData.Items = new ItemDictionary();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
watch.Stop();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
|
watch.Start("Populating duplicate DAT");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
|
foreach (DatItem item in items)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
if (item.DupeType.HasFlag(DupeType.External))
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem newrom = item.Clone() as DatItem;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
dupeData.Items.Add(key, newrom);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return dupeData;
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Output non-cascading diffs
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public List<DatFile> DiffIndividuals(List<string> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return DiffIndividuals(paths);
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Output non-cascading diffs
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public List<DatFile> DiffIndividuals(List<ParentablePath> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
|
|
|
|
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
|
|
|
|
|
Header.FileName = "All DATs";
|
|
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
|
|
|
|
|
Header.Name = "All DATs";
|
|
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
Header.Description = "All DATs";
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
|
|
|
|
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
|
|
|
|
|
|
|
|
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
|
|
|
|
|
{
|
|
|
|
|
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
|
|
|
|
|
DatFile diffData = Create(Header);
|
|
|
|
|
|
diffData.Header.FileName += innerpost;
|
|
|
|
|
|
diffData.Header.Name += innerpost;
|
|
|
|
|
|
diffData.Header.Description += innerpost;
|
|
|
|
|
|
diffData.Items = new ItemDictionary();
|
|
|
|
|
|
outDatsArray[j] = diffData;
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Create a list of DatData objects representing individual output files
|
|
|
|
|
|
List<DatFile> outDats = outDatsArray.ToList();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
watch.Stop();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
2020-08-01 21:42:28 -07:00
|
|
|
|
watch.Start("Populating all individual DATs");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
|
return;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
|
foreach (DatItem item in items)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
2020-08-20 13:17:14 -07:00
|
|
|
|
outDats[item.Source.Index].Items.Add(key, item);
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
|
|
|
|
|
});
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
watch.Stop();
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return outDats.ToList();
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Output non-duplicate item diff
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public DatFile DiffNoDuplicates(List<string> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return DiffNoDuplicates(paths);
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Output non-duplicate item diff
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
2020-08-27 22:27:23 -07:00
|
|
|
|
public DatFile DiffNoDuplicates(List<ParentablePath> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// Fill in any information not in the base DAT
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
|
|
|
|
|
Header.FileName = "All DATs";
|
|
|
|
|
|
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
|
|
|
|
|
Header.Name = "All DATs";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
|
|
|
|
|
Header.Description = "All DATs";
|
|
|
|
|
|
|
|
|
|
|
|
string post = " (No Duplicates)";
|
|
|
|
|
|
DatFile outerDiffData = Create(Header);
|
|
|
|
|
|
outerDiffData.Header.FileName += post;
|
|
|
|
|
|
outerDiffData.Header.Name += post;
|
|
|
|
|
|
outerDiffData.Header.Description += post;
|
|
|
|
|
|
outerDiffData.Items = new ItemDictionary();
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
|
|
|
|
watch.Start("Populating no duplicate DAT");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
|
|
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through and add the items correctly
|
|
|
|
|
|
foreach (DatItem item in items)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-01 21:42:28 -07:00
|
|
|
|
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem newrom = item.Clone() as DatItem;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
newrom.Machine.Name += $" ({Path.GetFileNameWithoutExtension(inputs[item.Source.Index].CurrentPath)})";
|
2020-08-01 21:42:28 -07:00
|
|
|
|
outerDiffData.Items.Add(key, newrom);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-01 21:42:28 -07:00
|
|
|
|
watch.Stop();
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-27 22:27:23 -07:00
|
|
|
|
return outerDiffData;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Fill a DatFile with all items with a particular ItemType
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="indexDat">DatFile to add found items to</param>
|
|
|
|
|
|
/// <param name="itemType">ItemType to retrieve items for</param>
|
|
|
|
|
|
/// <returns>DatFile containing all items with the ItemType/returns>
|
|
|
|
|
|
public void FillWithItemType(DatFile indexDat, ItemType itemType)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Loop through and add the items for this index to the output
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
|
|
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (item.ItemType == itemType)
|
|
|
|
|
|
indexDat.Items.Add(key, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Fill a DatFile with all items with a particular source index ID
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="indexDat">DatFile to add found items to</param>
|
|
|
|
|
|
/// <param name="index">Source index ID to retrieve items for</param>
|
|
|
|
|
|
/// <returns>DatFile containing all items with the source index ID/returns>
|
|
|
|
|
|
public void FillWithSourceIndex(DatFile indexDat, int index)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Loop through and add the items for this index to the output
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
|
|
|
|
|
|
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
|
|
|
|
if (items == null || items.Count == 0)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (item.Source.Index == index)
|
|
|
|
|
|
indexDat.Items.Add(key, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-27 12:41:36 -07:00
|
|
|
|
/// <summary>
|
2020-08-27 21:46:19 -07:00
|
|
|
|
/// Populate from multiple paths while returning the invividual headers
|
2020-07-27 12:41:36 -07:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
2020-08-27 21:46:19 -07:00
|
|
|
|
/// <returns>List of DatHeader objects representing headers</returns>
|
|
|
|
|
|
public List<DatHeader> PopulateUserData(List<string> inputs)
|
2020-08-01 21:42:28 -07:00
|
|
|
|
{
|
|
|
|
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
2020-08-27 21:46:19 -07:00
|
|
|
|
return PopulateUserData(paths);
|
2020-08-01 21:42:28 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-08-27 21:46:19 -07:00
|
|
|
|
/// Populate from multiple paths while returning the invividual headers
|
2020-08-01 21:42:28 -07:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
2020-08-27 21:46:19 -07:00
|
|
|
|
/// <returns>List of DatHeader objects representing headers</returns>
|
|
|
|
|
|
public List<DatHeader> PopulateUserData(List<ParentablePath> inputs)
|
2020-07-27 12:41:36 -07:00
|
|
|
|
{
|
|
|
|
|
|
DatFile[] datFiles = new DatFile[inputs.Count];
|
|
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
|
|
|
|
|
|
|
|
|
|
|
// Parse all of the DATs into their own DatFiles in the array
|
|
|
|
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
|
|
|
|
|
{
|
|
|
|
|
|
var input = inputs[i];
|
|
|
|
|
|
Globals.Logger.User($"Adding DAT: {input.CurrentPath}");
|
|
|
|
|
|
datFiles[i] = Create(Header.CloneFiltering());
|
|
|
|
|
|
datFiles[i].Parse(input, i, keep: true);
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
|
|
watch.Start("Populating internal DAT");
|
|
|
|
|
|
for (int i = 0; i < inputs.Count; i++)
|
|
|
|
|
|
{
|
|
|
|
|
|
AddFromExisting(datFiles[i], true);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
|
|
return datFiles.Select(d => d.Header).ToList();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
#region Filtering
|
|
|
|
|
|
|
2020-08-28 13:33:05 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Apply cleaning methods to the DatFile
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="cleaner">Cleaner to use</param>
|
|
|
|
|
|
/// <returns>True if cleaning was successful, false on error</returns>
|
|
|
|
|
|
public bool ApplyCleaning(Cleaner cleaner)
|
|
|
|
|
|
{
|
2020-08-28 13:45:01 -07:00
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// Perform item-level cleaning
|
|
|
|
|
|
CleanDatItems(cleaner);
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// Process description to machine name
|
|
|
|
|
|
if (cleaner?.DescriptionAsName == true)
|
|
|
|
|
|
MachineDescriptionToName();
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// If we are removing scene dates, do that now
|
|
|
|
|
|
if (cleaner?.SceneDateStrip == true)
|
|
|
|
|
|
StripSceneDatesFromItems();
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// Run the one rom per game logic, if required
|
|
|
|
|
|
if (cleaner?.OneGamePerRegion == true)
|
|
|
|
|
|
OneGamePerRegion(cleaner.RegionList);
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// Run the one rom per game logic, if required
|
|
|
|
|
|
if (cleaner?.OneRomPerGame == true)
|
|
|
|
|
|
OneRomPerGame();
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// If we are removing fields, do that now
|
|
|
|
|
|
if (cleaner.ExcludeFields != null && cleaner.ExcludeFields.Any())
|
|
|
|
|
|
RemoveFieldsFromItems(cleaner.ExcludeFields);
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// Remove all marked items
|
|
|
|
|
|
Items.ClearMarked();
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
// We remove any blanks, if we aren't supposed to have any
|
|
|
|
|
|
if (cleaner?.KeepEmptyGames == false)
|
|
|
|
|
|
Items.ClearEmpty();
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
2020-08-28 13:33:05 -07:00
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-21 10:38:42 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Apply a set of Extra INIs on the DatFile
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="extras">ExtrasIni to use</param>
|
|
|
|
|
|
/// <returns>True if the extras were applied, false on error</returns>
|
|
|
|
|
|
public bool ApplyExtras(ExtraIni extras)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// Bucket by game first
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
2020-08-21 10:38:42 -07:00
|
|
|
|
|
|
|
|
|
|
// Create a new set of mappings based on the items
|
|
|
|
|
|
var map = new Dictionary<string, Dictionary<Field, string>>();
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through each of the extras
|
|
|
|
|
|
foreach (ExtraIniItem item in extras.Items)
|
|
|
|
|
|
{
|
|
|
|
|
|
foreach (var mapping in item.Mappings)
|
|
|
|
|
|
{
|
|
|
|
|
|
string key = mapping.Key;
|
|
|
|
|
|
List<string> machineNames = mapping.Value;
|
|
|
|
|
|
|
|
|
|
|
|
// If we have the root folder, assume boolean
|
|
|
|
|
|
if (string.Equals(key, "ROOT_FOLDER"))
|
|
|
|
|
|
key = "true";
|
|
|
|
|
|
|
|
|
|
|
|
// Loop through the machines and add the new mappings
|
|
|
|
|
|
foreach (string machine in machineNames)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!map.ContainsKey(machine))
|
|
|
|
|
|
map[machine] = new Dictionary<Field, string>();
|
|
|
|
|
|
|
|
|
|
|
|
map[machine][item.Field] = key;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now apply the new set of mappings
|
|
|
|
|
|
foreach (string key in map.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the key doesn't exist, continue
|
|
|
|
|
|
if (!Items.ContainsKey(key))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
List<DatItem> datItems = Items[key];
|
|
|
|
|
|
var mappings = map[key];
|
|
|
|
|
|
|
|
|
|
|
|
foreach (var datItem in datItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
datItem.SetFields(mappings);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Apply a Filter on the DatFile
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filter">Filter to use</param>
|
|
|
|
|
|
/// <returns>True if the DatFile was filtered, false on error</returns>
|
2020-08-28 13:54:53 -07:00
|
|
|
|
public bool ApplyFilter(Filter filter)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-28 13:33:05 -07:00
|
|
|
|
// If we have a null filter, return false
|
|
|
|
|
|
if (filter == null)
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// Loop over every key in the dictionary
|
|
|
|
|
|
List<string> keys = Items.Keys.ToList();
|
|
|
|
|
|
foreach (string key in keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// For every item in the current key
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If we have a null item, we can't pass it
|
|
|
|
|
|
if (item == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
2020-08-28 13:33:05 -07:00
|
|
|
|
// If the rom doesn't pass the filter, mark for removal
|
|
|
|
|
|
if (!item.PassesFilter(filter))
|
2020-08-28 11:44:49 -07:00
|
|
|
|
item.Remove = true;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-28 11:44:49 -07:00
|
|
|
|
// Assign back for caution
|
|
|
|
|
|
Items[key] = items;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-28 13:45:01 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Apply splitting on the DatFile
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="splitType">Split type to try</param>
|
|
|
|
|
|
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if the DatFile was split, false on error</returns>
|
|
|
|
|
|
public bool ApplySplitting(MergingFlag splitType, bool useTags)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// If we are using tags from the DAT, set the proper input for split type unless overridden
|
|
|
|
|
|
if (useTags && splitType == MergingFlag.None)
|
|
|
|
|
|
splitType = Header.ForceMerging;
|
|
|
|
|
|
|
|
|
|
|
|
// Run internal splitting
|
|
|
|
|
|
switch (splitType)
|
|
|
|
|
|
{
|
|
|
|
|
|
case MergingFlag.None:
|
|
|
|
|
|
// No-op
|
|
|
|
|
|
break;
|
|
|
|
|
|
case MergingFlag.Device:
|
|
|
|
|
|
CreateDeviceNonMergedSets(DedupeType.None);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case MergingFlag.Full:
|
|
|
|
|
|
CreateFullyNonMergedSets(DedupeType.None);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case MergingFlag.NonMerged:
|
|
|
|
|
|
CreateNonMergedSets(DedupeType.None);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case MergingFlag.Merged:
|
|
|
|
|
|
CreateMergedSets(DedupeType.None);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case MergingFlag.Split:
|
|
|
|
|
|
CreateSplitSets(DedupeType.None);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 21:40:08 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Apply SuperDAT naming logic to a merged DatFile
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of inputs to use for renaming</param>
|
|
|
|
|
|
public void ApplySuperDAT(List<ParentablePath> inputs)
|
|
|
|
|
|
{
|
2020-08-28 10:48:27 -07:00
|
|
|
|
List<string> keys = Items.Keys.ToList();
|
|
|
|
|
|
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
2020-08-27 21:40:08 -07:00
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key].ToList();
|
|
|
|
|
|
List<DatItem> newItems = new List<DatItem>();
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem newItem = item;
|
|
|
|
|
|
string filename = inputs[newItem.Source.Index].CurrentPath;
|
|
|
|
|
|
string rootpath = inputs[newItem.Source.Index].ParentPath;
|
|
|
|
|
|
|
|
|
|
|
|
if (!string.IsNullOrWhiteSpace(rootpath))
|
|
|
|
|
|
rootpath += Path.DirectorySeparatorChar.ToString();
|
|
|
|
|
|
|
|
|
|
|
|
filename = filename.Remove(0, rootpath.Length);
|
|
|
|
|
|
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
|
|
|
|
|
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
|
|
|
|
|
+ newItem.Machine.Name;
|
|
|
|
|
|
|
|
|
|
|
|
newItems.Add(newItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Items.Remove(key);
|
|
|
|
|
|
Items.AddRange(key, newItems);
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-28 13:33:05 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Clean individual items based on the current filter
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="cleaner">Cleaner to use</param>
|
|
|
|
|
|
public void CleanDatItems(Cleaner cleaner)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> keys = Items.Keys.ToList();
|
|
|
|
|
|
foreach (string key in keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// For every item in the current key
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If we have a null item, we can't clean it it
|
|
|
|
|
|
if (item == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If we're stripping unicode characters, do so from all relevant things
|
|
|
|
|
|
if (cleaner?.RemoveUnicode == true)
|
|
|
|
|
|
{
|
|
|
|
|
|
item.Name = Sanitizer.RemoveUnicodeCharacters(item.Name);
|
|
|
|
|
|
item.Machine.Name = Sanitizer.RemoveUnicodeCharacters(item.Machine.Name);
|
|
|
|
|
|
item.Machine.Description = Sanitizer.RemoveUnicodeCharacters(item.Machine.Description);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we're in cleaning mode, do so from all relevant things
|
|
|
|
|
|
if (cleaner?.Clean == true)
|
|
|
|
|
|
{
|
|
|
|
|
|
item.Machine.Name = Sanitizer.CleanGameName(item.Machine.Name);
|
|
|
|
|
|
item.Machine.Description = Sanitizer.CleanGameName(item.Machine.Description);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we are in single game mode, rename all games
|
|
|
|
|
|
if (cleaner?.Single == true)
|
|
|
|
|
|
item.Machine.Name = "!";
|
|
|
|
|
|
|
|
|
|
|
|
// If we are in NTFS trim mode, trim the game name
|
|
|
|
|
|
if (cleaner?.Trim == true)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Windows max name length is 260
|
|
|
|
|
|
int usableLength = 260 - item.Machine.Name.Length - (cleaner.Root?.Length ?? 0);
|
|
|
|
|
|
if (item.Name.Length > usableLength)
|
|
|
|
|
|
{
|
|
|
|
|
|
string ext = Path.GetExtension(item.Name);
|
|
|
|
|
|
item.Name = item.Name.Substring(0, usableLength - ext.Length);
|
|
|
|
|
|
item.Name += ext;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Assign back for caution
|
|
|
|
|
|
Items[key] = items;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void MachineDescriptionToName()
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// First we want to get a mapping for all games to description
|
|
|
|
|
|
ConcurrentDictionary<string, string> mapping = new ConcurrentDictionary<string, string>();
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the key mapping doesn't exist, add it
|
2020-08-20 13:17:14 -07:00
|
|
|
|
mapping.TryAdd(item.Machine.Name, item.Machine.Description.Replace('/', '_').Replace("\"", "''").Replace(":", " -"));
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Now we loop through every item and update accordingly
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
List<DatItem> newItems = new List<DatItem>();
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Update machine name
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(item.Machine.Name) && mapping.ContainsKey(item.Machine.Name))
|
|
|
|
|
|
item.Machine.Name = mapping[item.Machine.Name];
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Update cloneof
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(item.Machine.CloneOf) && mapping.ContainsKey(item.Machine.CloneOf))
|
|
|
|
|
|
item.Machine.CloneOf = mapping[item.Machine.CloneOf];
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Update romof
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(item.Machine.RomOf) && mapping.ContainsKey(item.Machine.RomOf))
|
|
|
|
|
|
item.Machine.RomOf = mapping[item.Machine.RomOf];
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Update sampleof
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(item.Machine.SampleOf) && mapping.ContainsKey(item.Machine.SampleOf))
|
|
|
|
|
|
item.Machine.SampleOf = mapping[item.Machine.SampleOf];
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Add the new item to the output list
|
|
|
|
|
|
newItems.Add(item);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Replace the old list of roms with the new one
|
|
|
|
|
|
Items.Remove(key);
|
|
|
|
|
|
Items.AddRange(key, newItems);
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Warning(ex.ToString());
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Filter a DAT using 1G1R logic given an ordered set of regions
|
|
|
|
|
|
/// </summary>
|
2020-08-28 13:33:05 -07:00
|
|
|
|
/// <param name="regions">Ordered list of regions to use</param>
|
2020-08-17 23:28:12 -07:00
|
|
|
|
/// <remarks>
|
|
|
|
|
|
/// In the most technical sense, the way that the region list is being used does not
|
|
|
|
|
|
/// confine its values to be just regions. Since it's essentially acting like a
|
|
|
|
|
|
/// specialized version of the machine name filter, anything that is usually encapsulated
|
|
|
|
|
|
/// in parenthesis would be matched on, including disc numbers, languages, editions,
|
|
|
|
|
|
/// and anything else commonly used. Please note that, unlike other existing 1G1R
|
|
|
|
|
|
/// solutions, this does not have the ability to contain custom mappings of parent
|
|
|
|
|
|
/// to clone sets based on name, nor does it have the ability to match on the
|
|
|
|
|
|
/// Release DatItem type.
|
|
|
|
|
|
/// </remarks>
|
2020-08-28 13:33:05 -07:00
|
|
|
|
public void OneGamePerRegion(List<string> regions)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-28 13:33:05 -07:00
|
|
|
|
// If we have null region list, make it empty
|
|
|
|
|
|
if (regions == null)
|
|
|
|
|
|
regions = new List<string>();
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Then we want to get a mapping of all machines to parents
|
|
|
|
|
|
Dictionary<string, List<string>> parents = new Dictionary<string, List<string>>();
|
|
|
|
|
|
foreach (string key in Items.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem item = Items[key][0];
|
|
|
|
|
|
|
|
|
|
|
|
// Match on CloneOf first
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrEmpty(item.Machine.CloneOf))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!parents.ContainsKey(item.Machine.CloneOf.ToLowerInvariant()))
|
|
|
|
|
|
parents.Add(item.Machine.CloneOf.ToLowerInvariant(), new List<string>());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
2020-08-20 13:17:14 -07:00
|
|
|
|
parents[item.Machine.CloneOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Then by RomOf
|
2020-08-20 13:17:14 -07:00
|
|
|
|
else if (!string.IsNullOrEmpty(item.Machine.RomOf))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!parents.ContainsKey(item.Machine.RomOf.ToLowerInvariant()))
|
|
|
|
|
|
parents.Add(item.Machine.RomOf.ToLowerInvariant(), new List<string>());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
2020-08-20 13:17:14 -07:00
|
|
|
|
parents[item.Machine.RomOf.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, treat it as a parent
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!parents.ContainsKey(item.Machine.Name.ToLowerInvariant()))
|
|
|
|
|
|
parents.Add(item.Machine.Name.ToLowerInvariant(), new List<string>());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
2020-08-20 13:17:14 -07:00
|
|
|
|
parents[item.Machine.Name.ToLowerInvariant()].Add(item.Machine.Name.ToLowerInvariant());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Once we have the full list of mappings, filter out games to keep
|
|
|
|
|
|
foreach (string key in parents.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Find the first machine that matches the regions in order, if possible
|
|
|
|
|
|
string machine = default;
|
|
|
|
|
|
foreach (string region in regions)
|
|
|
|
|
|
{
|
|
|
|
|
|
machine = parents[key].FirstOrDefault(m => Regex.IsMatch(m, @"\(.*" + region + @".*\)", RegexOptions.IgnoreCase));
|
|
|
|
|
|
if (machine != default)
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we didn't get a match, use the parent
|
|
|
|
|
|
if (machine == default)
|
|
|
|
|
|
machine = key;
|
|
|
|
|
|
|
|
|
|
|
|
// Remove the key from the list
|
|
|
|
|
|
parents[key].Remove(machine);
|
|
|
|
|
|
|
|
|
|
|
|
// Remove the rest of the items from this key
|
|
|
|
|
|
parents[key].ForEach(k => Items.Remove(k));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Finally, strip out the parent tags
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Ensure that all roms are in their own game (or at least try to ensure)
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void OneRomPerGame()
|
|
|
|
|
|
{
|
|
|
|
|
|
// Because this introduces subfolders, we need to set the SuperDAT type
|
|
|
|
|
|
Header.Type = "SuperDAT";
|
|
|
|
|
|
|
|
|
|
|
|
// For each rom, we want to update the game to be "<game name>/<rom name>"
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
for (int i = 0; i < items.Count; i++)
|
|
|
|
|
|
{
|
|
|
|
|
|
string[] splitname = items[i].Name.Split('.');
|
2020-08-20 13:17:14 -07:00
|
|
|
|
items[i].Machine.Name += $"/{string.Join(".", splitname.Take(splitname.Length > 1 ? splitname.Length - 1 : 1))}";
|
2020-08-17 23:28:12 -07:00
|
|
|
|
items[i].Name = Path.GetFileName(items[i].Name);
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove fields as per the header
|
|
|
|
|
|
/// </summary>
|
2020-08-28 13:33:05 -07:00
|
|
|
|
/// <param name="fields">List of fields to use</param>
|
|
|
|
|
|
public void RemoveFieldsFromItems(List<Field> fields)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-28 13:33:05 -07:00
|
|
|
|
// If we have null field list, make it empty
|
|
|
|
|
|
if (fields == null)
|
|
|
|
|
|
fields = new List<Field>();
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
// Output the logging statement
|
|
|
|
|
|
Globals.Logger.User("Removing filtered fields");
|
|
|
|
|
|
|
|
|
|
|
|
// Now process all of the roms
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
for (int j = 0; j < items.Count; j++)
|
|
|
|
|
|
{
|
2020-08-17 23:45:23 -07:00
|
|
|
|
items[j].RemoveFields(fields);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Items.Remove(key);
|
|
|
|
|
|
Items.AddRange(key, items);
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Strip the dates from the beginning of scene-style set names
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
public void StripSceneDatesFromItems()
|
|
|
|
|
|
{
|
|
|
|
|
|
// Output the logging statement
|
|
|
|
|
|
Globals.Logger.User("Stripping scene-style dates");
|
|
|
|
|
|
|
|
|
|
|
|
// Set the regex pattern to use
|
|
|
|
|
|
string pattern = @"([0-9]{2}\.[0-9]{2}\.[0-9]{2}-)(.*?-.*?)";
|
|
|
|
|
|
|
|
|
|
|
|
// Now process all of the roms
|
|
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
for (int j = 0; j < items.Count; j++)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem item = items[j];
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (Regex.IsMatch(item.Machine.Name, pattern))
|
|
|
|
|
|
item.Machine.Name = Regex.Replace(item.Machine.Name, pattern, "$2");
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (Regex.IsMatch(item.Machine.Description, pattern))
|
|
|
|
|
|
item.Machine.Description = Regex.Replace(item.Machine.Description, pattern, "$2");
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
items[j] = item;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Items.Remove(key);
|
|
|
|
|
|
Items.AddRange(key, items);
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-08-27 22:53:21 -07:00
|
|
|
|
// TODO: Should any of these create a new DatFile in the process?
|
|
|
|
|
|
// The reason this comes up is that doing any of the splits or merges
|
|
|
|
|
|
// is an inherently destructive process. Making it output a new DatFile
|
|
|
|
|
|
// might make it easier to deal with multiple internal steps. On the other
|
|
|
|
|
|
// hand, this will increase memory usage significantly and would force the
|
|
|
|
|
|
// existing paths to behave entirely differently
|
2020-08-17 23:28:12 -07:00
|
|
|
|
#region Internal Splitting/Merging
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cdevice_ref tags to get full non-merged sets and remove parenting tags
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mergeroms">Dedupe type to be used</param>
|
|
|
|
|
|
private void CreateDeviceNonMergedSets(DedupeType mergeroms)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("Creating device non-merged sets from the DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, mergeroms, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through all of the games and set the correct information
|
|
|
|
|
|
while (AddRomsFromDevices(false, false)) ;
|
|
|
|
|
|
while (AddRomsFromDevices(true, false)) ;
|
|
|
|
|
|
|
|
|
|
|
|
// Then, remove the romof and cloneof tags so it's not picked up by the manager
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to create non-merged sets and remove the tags plus using the device_ref tags to get full sets
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mergeroms">Dedupe type to be used</param>
|
|
|
|
|
|
private void CreateFullyNonMergedSets(DedupeType mergeroms)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("Creating fully non-merged sets from the DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, mergeroms, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through all of the games and set the correct information
|
|
|
|
|
|
while (AddRomsFromDevices(true, true)) ;
|
|
|
|
|
|
AddRomsFromDevices(false, true);
|
|
|
|
|
|
AddRomsFromParent();
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
|
|
|
|
|
AddRomsFromBios();
|
|
|
|
|
|
|
|
|
|
|
|
// Then, remove the romof and cloneof tags so it's not picked up by the manager
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to create merged sets and remove the tags
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mergeroms">Dedupe type to be used</param>
|
|
|
|
|
|
private void CreateMergedSets(DedupeType mergeroms)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("Creating merged sets from the DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, mergeroms, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through all of the games and set the correct information
|
|
|
|
|
|
AddRomsFromChildren();
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
|
|
|
|
|
RemoveBiosRomsFromChild(false);
|
|
|
|
|
|
RemoveBiosRomsFromChild(true);
|
|
|
|
|
|
|
|
|
|
|
|
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to create non-merged sets and remove the tags
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mergeroms">Dedupe type to be used</param>
|
|
|
|
|
|
private void CreateNonMergedSets(DedupeType mergeroms)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("Creating non-merged sets from the DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, mergeroms, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through all of the games and set the correct information
|
|
|
|
|
|
AddRomsFromParent();
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
|
|
|
|
|
RemoveBiosRomsFromChild(false);
|
|
|
|
|
|
RemoveBiosRomsFromChild(true);
|
|
|
|
|
|
|
|
|
|
|
|
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof and romof tags to create split sets and remove the tags
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="mergeroms">Dedupe type to be used</param>
|
|
|
|
|
|
private void CreateSplitSets(DedupeType mergeroms)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("Creating split sets from the DAT");
|
|
|
|
|
|
|
|
|
|
|
|
// For sake of ease, the first thing we want to do is bucket by game
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, mergeroms, norename: true);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// Now we want to loop through all of the games and set the correct information
|
|
|
|
|
|
RemoveRomsFromChild();
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have looped through the cloneof tags, we loop through the romof tags
|
|
|
|
|
|
RemoveBiosRomsFromChild(false);
|
|
|
|
|
|
RemoveBiosRomsFromChild(true);
|
|
|
|
|
|
|
|
|
|
|
|
// Finally, remove the romof and cloneof tags so it's not picked up by the manager
|
|
|
|
|
|
RemoveTagsFromChild();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use romof tags to add roms to the children
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void AddRomsFromBios()
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game has no items in it, we want to continue
|
|
|
|
|
|
if (Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has a parent or not
|
|
|
|
|
|
string parent = null;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Items[game][0].Machine.RomOf))
|
|
|
|
|
|
parent = Items[game][0].Machine.RomOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// If the parent doesnt exist, we want to continue
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent doesn't have any items, we want to continue
|
|
|
|
|
|
if (Items[parent].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent exists and has items, we copy the items from the parent to the current game
|
|
|
|
|
|
DatItem copyFrom = Items[game][0];
|
|
|
|
|
|
List<DatItem> parentItems = Items[parent];
|
|
|
|
|
|
foreach (DatItem item in parentItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
|
|
|
|
|
datItem.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
if (Items[game].Where(i => i.Name == datItem.Name).Count() == 0 && !Items[game].Contains(datItem))
|
|
|
|
|
|
Items.Add(game, datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use device_ref and optionally slotoption tags to add roms to the children
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="dev">True if only child device sets are touched, false for non-device sets (default)</param>
|
|
|
|
|
|
/// <param name="slotoptions">True if slotoptions tags are used as well, false otherwise</param>
|
|
|
|
|
|
private bool AddRomsFromDevices(bool dev = false, bool slotoptions = false)
|
|
|
|
|
|
{
|
|
|
|
|
|
bool foundnew = false;
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game doesn't have items, we continue
|
|
|
|
|
|
if (Items[game] == null || Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the game (is/is not) a bios, we want to continue
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (dev ^ (Items[game][0].Machine.MachineType.HasFlag(MachineType.Device)))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the game has no devices, we continue
|
2020-08-23 15:42:58 -07:00
|
|
|
|
if (Items[game][0].Machine.DeviceReferences == null
|
|
|
|
|
|
|| Items[game][0].Machine.DeviceReferences.Count == 0
|
2020-08-23 21:10:29 -07:00
|
|
|
|
|| (slotoptions && Items[game][0].Machine.Slots == null)
|
|
|
|
|
|
|| (slotoptions && Items[game][0].Machine.Slots.Count == 0))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has any devices or not
|
2020-08-23 15:42:58 -07:00
|
|
|
|
List<ListXmlDeviceReference> deviceReferences = Items[game][0].Machine.DeviceReferences;
|
|
|
|
|
|
List<ListXmlDeviceReference> newdevs = new List<ListXmlDeviceReference>();
|
|
|
|
|
|
foreach (ListXmlDeviceReference deviceReference in deviceReferences)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
|
|
|
|
|
// If the device doesn't exist then we continue
|
2020-08-23 15:42:58 -07:00
|
|
|
|
if (Items[deviceReference.Name].Count == 0)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, copy the items from the device to the current game
|
|
|
|
|
|
DatItem copyFrom = Items[game][0];
|
2020-08-23 15:42:58 -07:00
|
|
|
|
List<DatItem> devItems = Items[deviceReference.Name];
|
2020-08-17 23:28:12 -07:00
|
|
|
|
foreach (DatItem item in devItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
2020-08-23 15:42:58 -07:00
|
|
|
|
newdevs.AddRange(datItem.Machine.DeviceReferences ?? new List<ListXmlDeviceReference>());
|
2020-08-17 23:28:12 -07:00
|
|
|
|
datItem.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
if (Items[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
foundnew = true;
|
|
|
|
|
|
Items.Add(game, datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now that every device is accounted for, add the new list of devices, if they don't already exist
|
2020-08-23 15:42:58 -07:00
|
|
|
|
foreach (ListXmlDeviceReference device in newdevs)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-23 15:42:58 -07:00
|
|
|
|
if (!Items[game][0].Machine.DeviceReferences.Select(d => d.Name).Contains(device.Name))
|
|
|
|
|
|
Items[game][0].Machine.DeviceReferences.Add(new ListXmlDeviceReference() { Name = device.Name });
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we're checking slotoptions too
|
|
|
|
|
|
if (slotoptions)
|
|
|
|
|
|
{
|
2020-08-23 21:10:29 -07:00
|
|
|
|
// Determine if the game has any slots or not
|
|
|
|
|
|
List<ListXmlSlot> slots = Items[game][0].Machine.Slots;
|
|
|
|
|
|
List<ListXmlSlot> newSlots = new List<ListXmlSlot>();
|
|
|
|
|
|
foreach (ListXmlSlot slot in slots)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-23 21:10:29 -07:00
|
|
|
|
foreach (ListXmlSlotOption slotOption in slot.SlotOptions)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-23 21:10:29 -07:00
|
|
|
|
// If the slotoption doesn't exist then we continue
|
|
|
|
|
|
if (Items[slotOption.Name].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, copy the items from the slotoption to the current game
|
|
|
|
|
|
DatItem copyFrom = Items[game][0];
|
|
|
|
|
|
List<DatItem> slotItems = Items[slotOption.Name];
|
|
|
|
|
|
foreach (DatItem item in slotItems)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-23 21:10:29 -07:00
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
|
|
|
|
|
newSlots.AddRange(datItem.Machine.Slots ?? new List<ListXmlSlot>());
|
|
|
|
|
|
|
|
|
|
|
|
datItem.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
if (Items[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
foundnew = true;
|
|
|
|
|
|
Items.Add(game, datItem);
|
|
|
|
|
|
}
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-23 21:10:29 -07:00
|
|
|
|
// Now that every slotoption is accounted for, add the new list of slots, if they don't already exist
|
|
|
|
|
|
foreach (ListXmlSlot slot in newSlots)
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
2020-08-23 21:10:29 -07:00
|
|
|
|
Items[game][0].Machine.Slots.Add(slot);
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return foundnew;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to add roms to the children, setting the new romof tag in the process
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void AddRomsFromParent()
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game has no items in it, we want to continue
|
|
|
|
|
|
if (Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has a parent or not
|
|
|
|
|
|
string parent = null;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Items[game][0].Machine.CloneOf))
|
|
|
|
|
|
parent = Items[game][0].Machine.CloneOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// If the parent doesnt exist, we want to continue
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent doesn't have any items, we want to continue
|
|
|
|
|
|
if (Items[parent].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent exists and has items, we copy the items from the parent to the current game
|
|
|
|
|
|
DatItem copyFrom = Items[game][0];
|
|
|
|
|
|
List<DatItem> parentItems = Items[parent];
|
|
|
|
|
|
foreach (DatItem item in parentItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
|
|
|
|
|
datItem.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
if (Items[game].Where(i => i.Name.ToLowerInvariant() == datItem.Name.ToLowerInvariant()).Count() == 0
|
|
|
|
|
|
&& !Items[game].Contains(datItem))
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.Add(game, datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now we want to get the parent romof tag and put it in each of the items
|
|
|
|
|
|
List<DatItem> items = Items[game];
|
2020-08-20 13:17:14 -07:00
|
|
|
|
string romof = Items[parent][0].Machine.RomOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Machine.RomOf = romof;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to add roms to the parents, removing the child sets in the process
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="subfolder">True to add DatItems to subfolder of parent (not including Disk), false otherwise</param>
|
|
|
|
|
|
private void AddRomsFromChildren(bool subfolder = true)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game has no items in it, we want to continue
|
|
|
|
|
|
if (Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has a parent or not
|
|
|
|
|
|
string parent = null;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Items[game][0].Machine.CloneOf))
|
|
|
|
|
|
parent = Items[game][0].Machine.CloneOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// If there is no parent, then we continue
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, move the items from the current game to a subfolder of the parent game
|
2020-08-20 13:17:14 -07:00
|
|
|
|
DatItem copyFrom;
|
|
|
|
|
|
if (Items[parent].Count == 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
copyFrom = new Rom();
|
|
|
|
|
|
copyFrom.Machine.Name = parent;
|
|
|
|
|
|
copyFrom.Machine.Description = parent;
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
copyFrom = Items[parent][0];
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-17 23:28:12 -07:00
|
|
|
|
List<DatItem> items = Items[game];
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Special disk handling
|
|
|
|
|
|
if (item.ItemType == ItemType.Disk)
|
|
|
|
|
|
{
|
|
|
|
|
|
Disk disk = item as Disk;
|
|
|
|
|
|
|
|
|
|
|
|
// If the merge tag exists and the parent already contains it, skip
|
|
|
|
|
|
if (disk.MergeTag != null && Items[parent].Select(i => i.Name).Contains(disk.MergeTag))
|
|
|
|
|
|
{
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the merge tag exists but the parent doesn't contain it, add to parent
|
|
|
|
|
|
else if (disk.MergeTag != null && !Items[parent].Select(i => i.Name).Contains(disk.MergeTag))
|
|
|
|
|
|
{
|
|
|
|
|
|
item.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
Items.Add(parent, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If there is no merge tag, add to parent
|
|
|
|
|
|
else if (disk.MergeTag == null)
|
|
|
|
|
|
{
|
|
|
|
|
|
item.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
Items.Add(parent, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Special rom handling
|
|
|
|
|
|
else if (item.ItemType == ItemType.Rom)
|
|
|
|
|
|
{
|
|
|
|
|
|
Rom rom = item as Rom;
|
|
|
|
|
|
|
|
|
|
|
|
// If the merge tag exists and the parent already contains it, skip
|
|
|
|
|
|
if (rom.MergeTag != null && Items[parent].Select(i => i.Name).Contains(rom.MergeTag))
|
|
|
|
|
|
{
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the merge tag exists but the parent doesn't contain it, add to subfolder of parent
|
|
|
|
|
|
else if (rom.MergeTag != null && !Items[parent].Select(i => i.Name).Contains(rom.MergeTag))
|
|
|
|
|
|
{
|
|
|
|
|
|
if (subfolder)
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Name = $"{item.Machine.Name}\\{item.Name}";
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
item.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
Items.Add(parent, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent doesn't already contain this item, add to subfolder of parent
|
|
|
|
|
|
else if (!Items[parent].Contains(item))
|
|
|
|
|
|
{
|
|
|
|
|
|
if (subfolder)
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Name = $"{item.Machine.Name}\\{item.Name}";
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
item.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
Items.Add(parent, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// All other that would be missing to subfolder of parent
|
|
|
|
|
|
else if (!Items[parent].Contains(item))
|
|
|
|
|
|
{
|
|
|
|
|
|
if (subfolder)
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Name = $"{item.Machine.Name}\\{item.Name}";
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
item.CopyMachineInformation(copyFrom);
|
|
|
|
|
|
Items.Add(parent, item);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Then, remove the old game so it's not picked up by the writer
|
|
|
|
|
|
Items.Remove(game);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove all BIOS and device sets
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void RemoveBiosAndDeviceSets()
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (Items[game].Count > 0
|
2020-08-20 13:17:14 -07:00
|
|
|
|
&& (Items[game][0].Machine.MachineType.HasFlag(MachineType.Bios)
|
|
|
|
|
|
|| Items[game][0].Machine.MachineType.HasFlag(MachineType.Device)))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
{
|
|
|
|
|
|
Items.Remove(game);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use romof tags to remove bios roms from children
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="bios">True if only child Bios sets are touched, false for non-bios sets (default)</param>
|
|
|
|
|
|
private void RemoveBiosRomsFromChild(bool bios = false)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Loop through the romof tags
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game has no items in it, we want to continue
|
|
|
|
|
|
if (Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the game (is/is not) a bios, we want to continue
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (bios ^ Items[game][0].Machine.MachineType.HasFlag(MachineType.Bios))
|
2020-08-17 23:28:12 -07:00
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has a parent or not
|
|
|
|
|
|
string parent = null;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Items[game][0].Machine.RomOf))
|
|
|
|
|
|
parent = Items[game][0].Machine.RomOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// If the parent doesnt exist, we want to continue
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent doesn't have any items, we want to continue
|
|
|
|
|
|
if (Items[parent].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent exists and has items, we remove the items that are in the parent from the current game
|
|
|
|
|
|
List<DatItem> parentItems = Items[parent];
|
|
|
|
|
|
foreach (DatItem item in parentItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
|
|
|
|
|
while (Items[game].Contains(datItem))
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.Remove(game, datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Use cloneof tags to remove roms from the children
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void RemoveRomsFromChild()
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the game has no items in it, we want to continue
|
|
|
|
|
|
if (Items[game].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Determine if the game has a parent or not
|
|
|
|
|
|
string parent = null;
|
2020-08-20 13:17:14 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Items[game][0].Machine.CloneOf))
|
|
|
|
|
|
parent = Items[game][0].Machine.CloneOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
|
|
|
|
|
|
// If the parent doesnt exist, we want to continue
|
|
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent doesn't have any items, we want to continue
|
|
|
|
|
|
if (Items[parent].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If the parent exists and has items, we remove the parent items from the current game
|
|
|
|
|
|
List<DatItem> parentItems = Items[parent];
|
|
|
|
|
|
foreach (DatItem item in parentItems)
|
|
|
|
|
|
{
|
|
|
|
|
|
DatItem datItem = (DatItem)item.Clone();
|
|
|
|
|
|
while (Items[game].Contains(datItem))
|
|
|
|
|
|
{
|
|
|
|
|
|
Items.Remove(game, datItem);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now we want to get the parent romof tag and put it in each of the remaining items
|
|
|
|
|
|
List<DatItem> items = Items[game];
|
2020-08-20 13:17:14 -07:00
|
|
|
|
string romof = Items[parent][0].Machine.RomOf;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Machine.RomOf = romof;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Remove all romof and cloneof tags from all games
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
private void RemoveTagsFromChild()
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> games = Items.Keys.OrderBy(g => g).ToList();
|
|
|
|
|
|
foreach (string game in games)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> items = Items[game];
|
|
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Machine.CloneOf = null;
|
|
|
|
|
|
item.Machine.RomOf = null;
|
|
|
|
|
|
item.Machine.SampleOf = null;
|
2020-08-17 23:28:12 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#region Parsing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// Create a DatFile and parse a file into it
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
public static DatFile CreateAndParse(string filename)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatFile datFile = Create();
|
2020-07-26 23:39:33 -07:00
|
|
|
|
datFile.Parse(new ParentablePath(filename));
|
2020-07-15 09:41:59 -07:00
|
|
|
|
return datFile;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-07-27 15:21:59 -07:00
|
|
|
|
/// Parse a DAT and return all found games and roms within
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
|
|
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
|
|
|
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
|
|
|
|
|
|
public void Parse(string filename, int indexId = 0, bool keep = false, bool keepext = false)
|
|
|
|
|
|
{
|
2020-08-26 17:11:24 -07:00
|
|
|
|
ParentablePath path = new ParentablePath(filename.Trim('"'));
|
2020-07-27 15:21:59 -07:00
|
|
|
|
Parse(path, indexId, keep, keepext);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// Parse a DAT and return all found games and roms within
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
|
2020-07-26 23:39:33 -07:00
|
|
|
|
public void Parse(ParentablePath filename, int indexId = 0, bool keep = false, bool keepext = false)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 23:39:33 -07:00
|
|
|
|
// Get the current path from the filename
|
|
|
|
|
|
string currentPath = filename.CurrentPath;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Check the file extension first as a safeguard
|
2020-07-26 23:39:33 -07:00
|
|
|
|
if (!PathExtensions.HasValidDatExtension(currentPath))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// If the output filename isn't set already, get the internal filename
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = (string.IsNullOrWhiteSpace(Header.FileName) ? (keepext ? Path.GetFileName(currentPath) : Path.GetFileNameWithoutExtension(currentPath)) : Header.FileName);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If the output type isn't set already, get the internal output type
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.DatFormat = (Header.DatFormat == 0 ? currentPath.GetDatFormat() : Header.DatFormat);
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.SetBucketedBy(Field.DatItem_CRC); // Setting this because it can reduce issues later
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now parse the correct type of DAT
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2020-07-26 23:39:33 -07:00
|
|
|
|
Create(currentPath.GetDatFormat(), this)?.ParseFile(currentPath, indexId, keep);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Error($"Error with file '{filename}': {ex}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Add a rom to the Dat after checking
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">Item data to check against</param>
|
|
|
|
|
|
/// <returns>The key for the item</returns>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
protected string ParseAddHelper(DatItem item)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
string key = string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If there's no name in the rom, we log and skip it
|
|
|
|
|
|
if (item.Name == null)
|
|
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Globals.Logger.Warning($"{Header.FileName}: Rom with no name found! Skipping...");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return key;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// If we have a Disk, Media, or Rom, clean the hash data
|
|
|
|
|
|
if (item.ItemType == ItemType.Disk)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Disk disk = item as Disk;
|
|
|
|
|
|
|
|
|
|
|
|
// If the file has aboslutely no hashes, skip and log
|
|
|
|
|
|
if (disk.ItemStatus != ItemStatus.Nodump
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(disk.MD5)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(disk.SHA1))
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Verbose($"Incomplete entry for '{disk.Name}' will be output as nodump");
|
|
|
|
|
|
disk.ItemStatus = ItemStatus.Nodump;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
item = disk;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item.ItemType == ItemType.Rom)
|
|
|
|
|
|
{
|
|
|
|
|
|
Rom rom = item as Rom;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (rom.Size == -1
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.CRC)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.MD5)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-08-27 16:57:22 -07:00
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.RIPEMD160)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 16:57:22 -07:00
|
|
|
|
&& !string.IsNullOrWhiteSpace(rom.SHA1)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA256)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA384)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA512))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// No-op, just catch it so it doesn't go further
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Globals.Logger.Verbose($"{Header.FileName}: Entry with only SHA-1 found - '{rom.Name}'");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((rom.Size == 0 || rom.Size == -1)
|
|
|
|
|
|
&& ((rom.CRC == Constants.CRCZero || string.IsNullOrWhiteSpace(rom.CRC))
|
|
|
|
|
|
|| rom.MD5 == Constants.MD5Zero
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|| rom.RIPEMD160 == Constants.RIPEMD160Zero
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|| rom.SHA1 == Constants.SHA1Zero
|
|
|
|
|
|
|| rom.SHA256 == Constants.SHA256Zero
|
|
|
|
|
|
|| rom.SHA384 == Constants.SHA384Zero
|
|
|
|
|
|
|| rom.SHA512 == Constants.SHA512Zero))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-08-27 16:57:22 -07:00
|
|
|
|
rom.Size = Constants.SizeZero;
|
|
|
|
|
|
rom.CRC = Constants.CRCZero;
|
|
|
|
|
|
rom.MD5 = Constants.MD5Zero;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-08-27 16:57:22 -07:00
|
|
|
|
rom.RIPEMD160 = null; // Constants.RIPEMD160Zero;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 16:57:22 -07:00
|
|
|
|
rom.SHA1 = Constants.SHA1Zero;
|
|
|
|
|
|
rom.SHA256 = null; // Constants.SHA256Zero;
|
|
|
|
|
|
rom.SHA384 = null; // Constants.SHA384Zero;
|
|
|
|
|
|
rom.SHA512 = null; // Constants.SHA512Zero;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file has no size and it's not the above case, skip and log
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (rom.ItemStatus != ItemStatus.Nodump && (rom.Size == 0 || rom.Size == -1))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Globals.Logger.Verbose($"{Header.FileName}: Incomplete entry for '{rom.Name}' will be output as nodump");
|
|
|
|
|
|
rom.ItemStatus = ItemStatus.Nodump;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// If the file has a size but aboslutely no hashes, skip and log
|
|
|
|
|
|
else if (rom.ItemStatus != ItemStatus.Nodump
|
|
|
|
|
|
&& rom.Size > 0
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.CRC)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.MD5)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-08-27 16:57:22 -07:00
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.RIPEMD160)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 16:57:22 -07:00
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA1)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA256)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA384)
|
|
|
|
|
|
&& string.IsNullOrWhiteSpace(rom.SHA512))
|
2020-06-10 22:37:19 -07:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Globals.Logger.Verbose($"{Header.FileName}: Incomplete entry for '{rom.Name}' will be output as nodump");
|
|
|
|
|
|
rom.ItemStatus = ItemStatus.Nodump;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
item = rom;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Get the key and add the file
|
2020-08-25 11:20:50 -07:00
|
|
|
|
key = item.GetKey(Field.DatItem_CRC);
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
return key;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Parse DatFile and return all found games and roms within
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
protected abstract void ParseFile(string filename, int indexId, bool keep);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// TODO: See if any of the helper methods can be broken up a bit more neatly
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#region Populate DAT from Directory
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create a new Dat from a directory
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
|
|
|
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
2020-08-02 13:23:47 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="skipFileType">Type of files that should be skipped</param>
|
|
|
|
|
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
|
|
|
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
|
|
|
|
|
/// <param name="outDir">Output directory to </param>
|
|
|
|
|
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
2020-08-02 13:34:35 -07:00
|
|
|
|
/// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-08-27 22:53:21 -07:00
|
|
|
|
/// TODO: Look into removing "copyFiles". I don't think it's useful anymore
|
2020-07-27 11:21:32 -07:00
|
|
|
|
public bool PopulateFromDir(
|
|
|
|
|
|
string basePath,
|
2020-08-02 13:34:35 -07:00
|
|
|
|
Hash omitFromScan = Hash.DeepHashes,
|
|
|
|
|
|
TreatAsFiles asFiles = 0x00,
|
|
|
|
|
|
SkipFileType skipFileType = SkipFileType.None,
|
|
|
|
|
|
bool addBlanks = false,
|
|
|
|
|
|
bool addDate = false,
|
2020-08-27 20:56:50 -07:00
|
|
|
|
bool copyFiles = false)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Clean the temp directory path
|
2020-08-02 13:08:33 -07:00
|
|
|
|
Globals.TempDir = DirectoryExtensions.Ensure(Globals.TempDir, temp: true);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Process the input
|
|
|
|
|
|
if (Directory.Exists(basePath))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"Folder found: {basePath}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Process the files in the main folder or any subfolder
|
|
|
|
|
|
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
|
|
|
|
|
|
Parallel.ForEach(files, Globals.ParallelOptions, item =>
|
|
|
|
|
|
{
|
2020-08-02 13:23:47 -07:00
|
|
|
|
CheckFileForHashes(item, basePath, omitFromScan, asFiles, skipFileType, addBlanks, addDate, copyFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Now find all folders that are empty, if we are supposed to
|
2020-08-28 01:13:55 -07:00
|
|
|
|
if (Header.OutputDepot?.IsActive != true && addBlanks)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
List<string> empties = DirectoryExtensions.ListEmpty(basePath);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
|
|
|
|
|
|
{
|
|
|
|
|
|
// Get the full path for the directory
|
|
|
|
|
|
string fulldir = Path.GetFullPath(dir);
|
|
|
|
|
|
|
|
|
|
|
|
// Set the temporary variables
|
2020-06-10 22:37:19 -07:00
|
|
|
|
string gamename = string.Empty;
|
|
|
|
|
|
string romname = string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.Type == "SuperDAT")
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
gamename = fulldir.Remove(0, basePath.Length + 1);
|
|
|
|
|
|
romname = "_";
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
|
|
|
|
|
|
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Sanitize the names
|
2020-06-10 22:37:19 -07:00
|
|
|
|
gamename = gamename.Trim(Path.DirectorySeparatorChar);
|
|
|
|
|
|
romname = romname.Trim(Path.DirectorySeparatorChar);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"Adding blank empty folder: {gamename}");
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items["null"].Add(new Rom(romname, gamename));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (File.Exists(basePath))
|
|
|
|
|
|
{
|
2020-08-02 13:23:47 -07:00
|
|
|
|
CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, asFiles,
|
|
|
|
|
|
skipFileType, addBlanks, addDate, copyFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we're done, delete the temp folder (if it's not the default)
|
|
|
|
|
|
Globals.Logger.User("Cleaning temp folder");
|
2020-08-02 13:08:33 -07:00
|
|
|
|
if (Globals.TempDir != Path.GetTempPath())
|
|
|
|
|
|
DirectoryExtensions.TryDelete(Globals.TempDir);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Check a given file for hashes, based on current settings
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">Filename of the item to be checked</param>
|
|
|
|
|
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
|
|
|
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
2020-08-02 13:23:47 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="skipFileType">Type of files that should be skipped</param>
|
|
|
|
|
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
|
|
|
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
|
|
|
|
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
2020-07-27 11:21:32 -07:00
|
|
|
|
private void CheckFileForHashes(
|
|
|
|
|
|
string item,
|
|
|
|
|
|
string basePath,
|
|
|
|
|
|
Hash omitFromScan,
|
2020-08-02 13:23:47 -07:00
|
|
|
|
TreatAsFiles asFiles,
|
2020-07-27 11:21:32 -07:00
|
|
|
|
SkipFileType skipFileType,
|
|
|
|
|
|
bool addBlanks,
|
|
|
|
|
|
bool addDate,
|
2020-08-02 13:23:47 -07:00
|
|
|
|
bool copyFiles)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-20 11:23:48 -07:00
|
|
|
|
// Special case for if we are in Depot mode (all names are supposed to be SHA-1 hashes)
|
2020-08-28 01:13:55 -07:00
|
|
|
|
if (Header.OutputDepot?.IsActive == true)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
GZipArchive gzarc = new GZipArchive(item);
|
|
|
|
|
|
BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
|
|
|
|
|
|
|
|
|
|
|
|
// If the rom is valid, write it out
|
|
|
|
|
|
if (baseFile != null && baseFile.Filename != null)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Add the list if it doesn't exist already
|
|
|
|
|
|
Rom rom = new Rom(baseFile);
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.Add(rom.GetKey(Field.DatItem_CRC), rom);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"File added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"File not added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we're copying files, copy it first and get the new filename
|
|
|
|
|
|
string newItem = item;
|
|
|
|
|
|
string newBasePath = basePath;
|
|
|
|
|
|
if (copyFiles)
|
|
|
|
|
|
{
|
2020-08-02 13:08:33 -07:00
|
|
|
|
newBasePath = Path.Combine(Globals.TempDir, Guid.NewGuid().ToString());
|
2019-01-08 11:49:31 -08:00
|
|
|
|
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DirectoryExtensions.TryCreateDirectory(Path.GetDirectoryName(newItem));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
File.Copy(item, newItem, true);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Initialize possible archive variables
|
2020-07-15 09:41:59 -07:00
|
|
|
|
BaseArchive archive = BaseArchive.Create(newItem);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
List<BaseFile> extracted = null;
|
|
|
|
|
|
|
|
|
|
|
|
// If we have an archive and we're supposed to scan it
|
2020-08-02 13:23:47 -07:00
|
|
|
|
if (archive != null && !asFiles.HasFlag(TreatAsFiles.Archives))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
|
|
|
|
|
|
|
|
|
|
|
|
// If the file should be skipped based on type, do so now
|
|
|
|
|
|
if ((extracted != null && skipFileType == SkipFileType.Archive)
|
|
|
|
|
|
|| (extracted == null && skipFileType == SkipFileType.File))
|
|
|
|
|
|
{
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the extracted list is null, just scan the item itself
|
|
|
|
|
|
if (extracted == null)
|
|
|
|
|
|
{
|
2020-08-02 13:38:24 -07:00
|
|
|
|
ProcessFile(newItem, newBasePath, omitFromScan, addDate, asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-02 13:38:24 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// Otherwise, add all of the found items
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
// First take care of the found items
|
|
|
|
|
|
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatItem datItem = DatItem.Create(rom);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
ProcessFileHelper(newItem,
|
|
|
|
|
|
datItem,
|
|
|
|
|
|
basePath,
|
|
|
|
|
|
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Then, if we're looking for blanks, get all of the blank folders and add them
|
|
|
|
|
|
if (addBlanks)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> empties = new List<string>();
|
|
|
|
|
|
|
|
|
|
|
|
// Now get all blank folders from the archive
|
|
|
|
|
|
if (archive != null)
|
|
|
|
|
|
empties = archive.GetEmptyFolders();
|
2019-09-20 10:30:30 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// Add add all of the found empties to the DAT
|
|
|
|
|
|
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
ProcessFileHelper(newItem,
|
|
|
|
|
|
emptyRom,
|
|
|
|
|
|
basePath,
|
|
|
|
|
|
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Cue to delete the file if it's a copy
|
|
|
|
|
|
if (copyFiles && item != newItem)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DirectoryExtensions.TryDelete(newBasePath);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process a single file as a file
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">File to be added</param>
|
|
|
|
|
|
/// <param name="basePath">Path the represents the parent directory</param>
|
|
|
|
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
|
|
|
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
2020-08-02 13:23:47 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
|
2020-08-02 13:38:24 -07:00
|
|
|
|
private void ProcessFile(string item, string basePath, Hash omitFromScan, bool addDate, TreatAsFiles asFiles)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
|
2020-08-27 16:57:22 -07:00
|
|
|
|
BaseFile baseFile = FileExtensions.GetInfo(
|
|
|
|
|
|
item,
|
|
|
|
|
|
omitFromScan: omitFromScan,
|
|
|
|
|
|
date: addDate,
|
|
|
|
|
|
header: Header.HeaderSkipper,
|
2020-08-27 20:56:50 -07:00
|
|
|
|
asFiles: asFiles);
|
2020-08-02 13:38:24 -07:00
|
|
|
|
ProcessFileHelper(item, DatItem.Create(baseFile), basePath, string.Empty);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process a single file as a file (with found Rom data)
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">File to be added</param>
|
|
|
|
|
|
/// <param name="item">Rom data to be used to write to file</param>
|
|
|
|
|
|
/// <param name="basepath">Path the represents the parent directory</param>
|
|
|
|
|
|
/// <param name="parent">Parent game to be used</param>
|
|
|
|
|
|
private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
|
|
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// If we didn't get an accepted parsed type somehow, cancel out
|
|
|
|
|
|
List<ItemType> parsed = new List<ItemType> { ItemType.Disk, ItemType.Media, ItemType.Rom };
|
|
|
|
|
|
if (!parsed.Contains(datItem.ItemType))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
// If the basepath doesn't end with a directory separator, add it
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
|
|
|
|
|
basepath += Path.DirectorySeparatorChar.ToString();
|
|
|
|
|
|
|
|
|
|
|
|
// Make sure we have the full item path
|
|
|
|
|
|
item = Path.GetFullPath(item);
|
|
|
|
|
|
|
|
|
|
|
|
// Process the item to sanitize names based on input
|
|
|
|
|
|
SetDatItemInfo(datItem, item, parent, basepath);
|
|
|
|
|
|
|
|
|
|
|
|
// Add the file information to the DAT
|
2020-08-25 11:20:50 -07:00
|
|
|
|
string key = datItem.GetKey(Field.DatItem_CRC);
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.Add(key, datItem);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"File added: {datItem.Name}{Environment.NewLine}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
catch (IOException ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Set proper Game and Rom names from user inputs
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">DatItem representing the input file</param>
|
|
|
|
|
|
/// <param name="item">Item name to use</param>
|
|
|
|
|
|
/// <param name="parent">Parent name to use</param>
|
|
|
|
|
|
/// <param name="basepath">Base path to use</param>
|
|
|
|
|
|
private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Get the data to be added as game and item names
|
2020-08-27 16:57:22 -07:00
|
|
|
|
string machineName, itemName;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If the parent is blank, then we have a non-archive file
|
2020-06-10 22:37:19 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(parent))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.Type == "SuperDAT")
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
machineName = Path.GetDirectoryName(item.Remove(0, basepath.Length));
|
|
|
|
|
|
itemName = Path.GetFileName(item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
machineName = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
|
|
|
|
|
|
itemName = item.Remove(0, (Path.Combine(basepath, machineName).Length));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we assume that we have an archive
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.Type == "SuperDAT")
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
machineName = parent;
|
|
|
|
|
|
itemName = datItem.Name;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Otherwise, we want the archive name as the game, and the file as everything else
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
machineName = parent;
|
|
|
|
|
|
itemName = datItem.Name;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Sanitize the names
|
2020-08-27 16:57:22 -07:00
|
|
|
|
machineName = machineName.Trim(Path.DirectorySeparatorChar);
|
|
|
|
|
|
itemName = itemName?.Trim(Path.DirectorySeparatorChar) ?? string.Empty;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(machineName) && string.IsNullOrWhiteSpace(itemName))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
itemName = machineName;
|
|
|
|
|
|
machineName = "Default";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// Update machine information
|
|
|
|
|
|
datItem.Machine.Name = machineName;
|
|
|
|
|
|
datItem.Machine.Description = machineName;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If we have a Disk, then the ".chd" extension needs to be removed
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (datItem.ItemType == ItemType.Disk && datItem.Name.EndsWith(".chd"))
|
|
|
|
|
|
{
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 4);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a Media, then the extension needs to be removed
|
|
|
|
|
|
else if (datItem.ItemType == ItemType.Media)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (datItem.Name.EndsWith(".dicf"))
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 5);
|
|
|
|
|
|
else if (datItem.Name.EndsWith(".aaru"))
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 5);
|
|
|
|
|
|
else if (datItem.Name.EndsWith(".aaruformat"))
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 11);
|
|
|
|
|
|
else if (datItem.Name.EndsWith(".aaruf"))
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 6);
|
|
|
|
|
|
else if (datItem.Name.EndsWith(".aif"))
|
|
|
|
|
|
datItem.Name = itemName.Substring(0, itemName.Length - 3);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// All others leave the extension alone
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
datItem.Name = itemName;
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// TODO: See if any of the helper methods can be broken up a bit more neatly
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#region Rebuilding and Verifying
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process the DAT and find all matches in input files and folders assuming they're a depot
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of input files/folders to check</param>
|
|
|
|
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
|
|
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
|
|
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
|
|
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
|
|
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
|
|
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
2020-06-10 22:37:19 -07:00
|
|
|
|
public bool RebuildDepot(
|
|
|
|
|
|
List<string> inputs,
|
2020-08-02 13:44:45 -07:00
|
|
|
|
string outDir = null,
|
|
|
|
|
|
bool date = false,
|
|
|
|
|
|
bool delete = false,
|
|
|
|
|
|
bool inverse = false,
|
|
|
|
|
|
OutputFormat outputFormat = OutputFormat.Folder,
|
|
|
|
|
|
bool updateDat = true)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
#region Perform setup
|
|
|
|
|
|
|
|
|
|
|
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
2020-07-27 01:39:32 -07:00
|
|
|
|
if (Items.TotalCount == 0 && !inverse)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("No entries were found to rebuild, exiting...");
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Check that the output directory exists
|
2020-07-15 09:41:59 -07:00
|
|
|
|
outDir = DirectoryExtensions.Ensure(outDir, create: true);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now we want to get forcepack flag if it's not overridden
|
2020-08-20 20:38:29 -07:00
|
|
|
|
if (outputFormat == OutputFormat.Folder && Header.ForcePacking != PackingFlag.None)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
switch (Header.ForcePacking)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-20 20:38:29 -07:00
|
|
|
|
case PackingFlag.Zip:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
outputFormat = OutputFormat.TorrentZip;
|
|
|
|
|
|
break;
|
2020-08-20 20:38:29 -07:00
|
|
|
|
case PackingFlag.Unzip:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
outputFormat = OutputFormat.Folder;
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Preload the Skipper list
|
2020-07-30 21:07:25 -07:00
|
|
|
|
Transform.Init();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
bool success = true;
|
|
|
|
|
|
|
|
|
|
|
|
#region Rebuild from depots in order
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
string format = string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
switch (outputFormat)
|
|
|
|
|
|
{
|
|
|
|
|
|
case OutputFormat.Folder:
|
|
|
|
|
|
format = "directory";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TapeArchive:
|
|
|
|
|
|
format = "TAR";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.Torrent7Zip:
|
|
|
|
|
|
format = "Torrent7Z";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentGzip:
|
2020-06-10 22:37:19 -07:00
|
|
|
|
case OutputFormat.TorrentGzipRomba:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
format = "TorrentGZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentLRZip:
|
|
|
|
|
|
format = "TorrentLRZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentRar:
|
|
|
|
|
|
format = "TorrentRAR";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentXZ:
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case OutputFormat.TorrentXZRomba:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
format = "TorrentXZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentZip:
|
|
|
|
|
|
format = "TorrentZip";
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now loop through and get only directories from the input paths
|
|
|
|
|
|
List<string> directories = new List<string>();
|
|
|
|
|
|
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
|
|
|
|
|
|
{
|
|
|
|
|
|
// Add to the list if the input is a directory
|
|
|
|
|
|
if (Directory.Exists(input))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"Adding depot: {input}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
lock (directories)
|
|
|
|
|
|
{
|
|
|
|
|
|
directories.Add(input);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// If we don't have any directories, we want to exit
|
|
|
|
|
|
if (directories.Count == 0)
|
|
|
|
|
|
return success;
|
|
|
|
|
|
|
2020-07-15 10:47:13 -07:00
|
|
|
|
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
2020-08-11 13:33:08 -07:00
|
|
|
|
var keys = Items.SortedKeys.ToList();
|
|
|
|
|
|
foreach (string hash in keys)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Pre-empt any issues that could arise from string length
|
|
|
|
|
|
if (hash.Length != Constants.SHA1Length)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Checking hash '{hash}'");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Get the extension path for the hash
|
2020-08-20 11:23:48 -07:00
|
|
|
|
string subpath = PathExtensions.GetDepotPath(hash, Header.InputDepot.Depth);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Find the first depot that includes the hash
|
|
|
|
|
|
string foundpath = null;
|
|
|
|
|
|
foreach (string directory in directories)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (File.Exists(Path.Combine(directory, subpath)))
|
|
|
|
|
|
{
|
|
|
|
|
|
foundpath = Path.Combine(directory, subpath);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we didn't find a path, then we continue
|
|
|
|
|
|
if (foundpath == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a path, we want to try to get the rom information
|
|
|
|
|
|
GZipArchive archive = new GZipArchive(foundpath);
|
|
|
|
|
|
BaseFile fileinfo = archive.GetTorrentGZFileInfo();
|
|
|
|
|
|
|
|
|
|
|
|
// If the file information is null, then we continue
|
|
|
|
|
|
if (fileinfo == null)
|
|
|
|
|
|
continue;
|
2017-10-09 20:38:15 -07:00
|
|
|
|
|
2020-08-11 13:33:08 -07:00
|
|
|
|
// If there are no items in the hash, we continue
|
|
|
|
|
|
if (Items[hash] == null || Items[hash].Count == 0)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
2019-01-02 23:17:49 -08:00
|
|
|
|
// Otherwise, we rebuild that file to all locations that we need to
|
2020-07-15 09:41:59 -07:00
|
|
|
|
bool usedInternally;
|
2020-07-26 22:34:45 -07:00
|
|
|
|
if (Items[hash][0].ItemType == ItemType.Disk)
|
2020-08-20 11:23:48 -07:00
|
|
|
|
usedInternally = RebuildIndividualFile(new Disk(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */);
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (Items[hash][0].ItemType == ItemType.Media)
|
|
|
|
|
|
usedInternally = RebuildIndividualFile(new Media(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */);
|
2019-01-02 23:17:49 -08:00
|
|
|
|
else
|
2020-08-20 11:23:48 -07:00
|
|
|
|
usedInternally = RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
// If we are supposed to delete the depot file, do so
|
|
|
|
|
|
if (delete && usedInternally)
|
|
|
|
|
|
FileExtensions.TryDelete(foundpath);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
// If we're updating the DAT, output to the rebuild directory
|
|
|
|
|
|
if (updateDat)
|
|
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
|
|
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
|
|
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.ClearMarked();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
Write(outDir);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return success;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process the DAT and find all matches in input files and folders
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of input files/folders to check</param>
|
|
|
|
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
|
|
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
|
|
|
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
|
|
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
|
|
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
|
|
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
|
|
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
2020-08-27 20:56:50 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
2020-06-10 22:37:19 -07:00
|
|
|
|
public bool RebuildGeneric(
|
|
|
|
|
|
List<string> inputs,
|
2020-08-02 13:44:45 -07:00
|
|
|
|
string outDir = null,
|
|
|
|
|
|
bool quickScan = false,
|
|
|
|
|
|
bool date = false,
|
|
|
|
|
|
bool delete = false,
|
|
|
|
|
|
bool inverse = false,
|
|
|
|
|
|
OutputFormat outputFormat = OutputFormat.Folder,
|
|
|
|
|
|
bool updateDat = true,
|
|
|
|
|
|
TreatAsFiles asFiles = 0x00)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
#region Perform setup
|
|
|
|
|
|
|
|
|
|
|
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
2020-07-27 01:39:32 -07:00
|
|
|
|
if (Items.TotalCount == 0 && !inverse)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("No entries were found to rebuild, exiting...");
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Check that the output directory exists
|
|
|
|
|
|
if (!Directory.Exists(outDir))
|
|
|
|
|
|
{
|
|
|
|
|
|
Directory.CreateDirectory(outDir);
|
|
|
|
|
|
outDir = Path.GetFullPath(outDir);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now we want to get forcepack flag if it's not overridden
|
2020-08-20 20:38:29 -07:00
|
|
|
|
if (outputFormat == OutputFormat.Folder && Header.ForcePacking != PackingFlag.None)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
switch (Header.ForcePacking)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-20 20:38:29 -07:00
|
|
|
|
case PackingFlag.Zip:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
outputFormat = OutputFormat.TorrentZip;
|
|
|
|
|
|
break;
|
2020-08-20 20:38:29 -07:00
|
|
|
|
case PackingFlag.Unzip:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
outputFormat = OutputFormat.Folder;
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Preload the Skipper list
|
2020-07-30 21:07:25 -07:00
|
|
|
|
Transform.Init();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
bool success = true;
|
|
|
|
|
|
|
|
|
|
|
|
#region Rebuild from sources in order
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
string format = string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
switch (outputFormat)
|
|
|
|
|
|
{
|
|
|
|
|
|
case OutputFormat.Folder:
|
|
|
|
|
|
format = "directory";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TapeArchive:
|
|
|
|
|
|
format = "TAR";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.Torrent7Zip:
|
|
|
|
|
|
format = "Torrent7Z";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentGzip:
|
2020-06-10 22:37:19 -07:00
|
|
|
|
case OutputFormat.TorrentGzipRomba:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
format = "TorrentGZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentLRZip:
|
|
|
|
|
|
format = "TorrentLRZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentRar:
|
|
|
|
|
|
format = "TorrentRAR";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentXZ:
|
2020-07-15 09:41:59 -07:00
|
|
|
|
case OutputFormat.TorrentXZRomba:
|
2019-01-08 11:49:31 -08:00
|
|
|
|
format = "TorrentXZ";
|
|
|
|
|
|
break;
|
|
|
|
|
|
case OutputFormat.TorrentZip:
|
|
|
|
|
|
format = "TorrentZip";
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now loop through all of the files in all of the inputs
|
|
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the input is a file
|
|
|
|
|
|
if (File.Exists(input))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Checking file: {input}");
|
2020-08-20 11:23:48 -07:00
|
|
|
|
RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the input is a directory
|
|
|
|
|
|
else if (Directory.Exists(input))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"Checking directory: {input}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Checking file: {file}");
|
2020-08-20 11:23:48 -07:00
|
|
|
|
RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
// If we're updating the DAT, output to the rebuild directory
|
|
|
|
|
|
if (updateDat)
|
|
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
|
|
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
|
|
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.ClearMarked();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
Write(outDir);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return success;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Attempt to add a file to the output if it matches
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="file">Name of the file to process</param>
|
|
|
|
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
|
|
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
|
|
|
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
|
|
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
|
|
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
|
|
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
|
|
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
2020-08-27 20:56:50 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing special format scanning</param>
|
2020-06-10 22:37:19 -07:00
|
|
|
|
private void RebuildGenericHelper(
|
|
|
|
|
|
string file,
|
|
|
|
|
|
string outDir,
|
|
|
|
|
|
bool quickScan,
|
|
|
|
|
|
bool date,
|
|
|
|
|
|
bool delete,
|
|
|
|
|
|
bool inverse,
|
|
|
|
|
|
OutputFormat outputFormat,
|
|
|
|
|
|
bool updateDat,
|
2020-08-02 13:23:47 -07:00
|
|
|
|
TreatAsFiles asFiles)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If we somehow have a null filename, return
|
|
|
|
|
|
if (file == null)
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// Set the deletion variables
|
2020-07-15 09:41:59 -07:00
|
|
|
|
bool usedExternally, usedInternally = false;
|
|
|
|
|
|
|
|
|
|
|
|
// Scan the file externally
|
|
|
|
|
|
|
|
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-08-27 16:57:22 -07:00
|
|
|
|
BaseFile externalFileInfo = FileExtensions.GetInfo(
|
|
|
|
|
|
file,
|
|
|
|
|
|
omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
|
|
|
|
|
|
header: Header.HeaderSkipper,
|
2020-08-27 20:56:50 -07:00
|
|
|
|
asFiles: asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatItem externalDatItem = null;
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (externalFileInfo.Type == FileType.AaruFormat)
|
|
|
|
|
|
externalDatItem = new Media(externalFileInfo);
|
|
|
|
|
|
else if (externalFileInfo.Type == FileType.CHD)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
externalDatItem = new Disk(externalFileInfo);
|
|
|
|
|
|
else if (externalFileInfo.Type == FileType.None)
|
|
|
|
|
|
externalDatItem = new Rom(externalFileInfo);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-20 11:23:48 -07:00
|
|
|
|
usedExternally = RebuildIndividualFile(externalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
// Scan the file internally
|
|
|
|
|
|
|
|
|
|
|
|
// Create an empty list of BaseFile for archive entries
|
|
|
|
|
|
List<BaseFile> entries = null;
|
|
|
|
|
|
|
|
|
|
|
|
// Get the TGZ status for later
|
|
|
|
|
|
GZipArchive tgz = new GZipArchive(file);
|
|
|
|
|
|
bool isTorrentGzip = tgz.IsTorrent();
|
|
|
|
|
|
|
|
|
|
|
|
// Get the base archive first
|
|
|
|
|
|
BaseArchive archive = BaseArchive.Create(file);
|
|
|
|
|
|
|
|
|
|
|
|
// Now get all extracted items from the archive
|
|
|
|
|
|
if (archive != null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-07-15 09:41:59 -07:00
|
|
|
|
entries = archive.GetChildren(omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), date: date);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If the entries list is null, we encountered an error and should scan exteranlly
|
|
|
|
|
|
if (entries == null && File.Exists(file))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-08-27 16:57:22 -07:00
|
|
|
|
BaseFile internalFileInfo = FileExtensions.GetInfo(
|
|
|
|
|
|
file,
|
|
|
|
|
|
omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
|
2020-08-27 20:56:50 -07:00
|
|
|
|
asFiles: asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatItem internalDatItem = null;
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (internalFileInfo.Type == FileType.AaruFormat)
|
|
|
|
|
|
internalDatItem = new Media(internalFileInfo);
|
|
|
|
|
|
else if (internalFileInfo.Type == FileType.CHD)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
internalDatItem = new Disk(internalFileInfo);
|
|
|
|
|
|
else if (internalFileInfo.Type == FileType.None)
|
|
|
|
|
|
internalDatItem = new Rom(internalFileInfo);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-20 11:23:48 -07:00
|
|
|
|
usedExternally = RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
}
|
|
|
|
|
|
// Otherwise, loop through the entries and try to match
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
foreach (BaseFile entry in entries)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
DatItem internalDatItem = DatItem.Create(entry);
|
2020-08-20 11:23:48 -07:00
|
|
|
|
usedInternally |= RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, !isTorrentGzip /* isZip */);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we are supposed to delete the file, do so
|
|
|
|
|
|
if (delete && (usedExternally || usedInternally))
|
2020-07-15 09:41:59 -07:00
|
|
|
|
FileExtensions.TryDelete(file);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Find duplicates and rebuild individual files to output
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="datItem">Information for the current file to rebuild from</param>
|
|
|
|
|
|
/// <param name="file">Name of the file to process</param>
|
|
|
|
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
|
|
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
|
|
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
|
|
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
|
|
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
|
|
|
|
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
|
|
|
|
|
|
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
|
2020-06-10 22:37:19 -07:00
|
|
|
|
private bool RebuildIndividualFile(
|
|
|
|
|
|
DatItem datItem,
|
|
|
|
|
|
string file,
|
|
|
|
|
|
string outDir,
|
|
|
|
|
|
bool date,
|
|
|
|
|
|
bool inverse,
|
|
|
|
|
|
OutputFormat outputFormat,
|
|
|
|
|
|
bool updateDat,
|
2020-08-02 12:54:27 -07:00
|
|
|
|
bool? isZip)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-04-03 13:19:21 -07:00
|
|
|
|
// Set the initial output value
|
|
|
|
|
|
bool rebuilt = false;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// If the DatItem is a Disk or Media, force rebuilding to a folder except if TGZ or TXZ
|
|
|
|
|
|
if ((datItem.ItemType == ItemType.Disk || datItem.ItemType == ItemType.Media)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
&& !(outputFormat == OutputFormat.TorrentGzip || outputFormat == OutputFormat.TorrentGzipRomba)
|
|
|
|
|
|
&& !(outputFormat == OutputFormat.TorrentXZ || outputFormat == OutputFormat.TorrentXZRomba))
|
|
|
|
|
|
{
|
2019-01-08 11:49:31 -08:00
|
|
|
|
outputFormat = OutputFormat.Folder;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-08-27 20:56:50 -07:00
|
|
|
|
// If we have a Disk or Media, change it into a Rom for later use
|
2019-09-20 10:30:30 -07:00
|
|
|
|
if (datItem.ItemType == ItemType.Disk)
|
2020-08-27 16:57:22 -07:00
|
|
|
|
datItem = (datItem as Disk).ConvertToRom();
|
|
|
|
|
|
if (datItem.ItemType == ItemType.Media)
|
|
|
|
|
|
datItem = (datItem as Media).ConvertToRom();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2019-09-20 10:30:30 -07:00
|
|
|
|
// Prepopluate a few key strings
|
2020-08-27 16:57:22 -07:00
|
|
|
|
string crc = (datItem as Rom).CRC ?? string.Empty;
|
|
|
|
|
|
string sha1 = (datItem as Rom).SHA1 ?? string.Empty;
|
2019-09-20 10:30:30 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// Find if the file has duplicates in the DAT
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> dupes = Items.GetDuplicates(datItem, remove: updateDat);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
bool hasDuplicates = dupes.Count > 0;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If either we have duplicates or we're filtering
|
|
|
|
|
|
if (hasDuplicates ^ inverse)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If we have a very specific TGZ->TGZ case, just copy it accordingly
|
|
|
|
|
|
GZipArchive tgz = new GZipArchive(file);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
BaseFile tgzRom = tgz.GetTorrentGZFileInfo();
|
|
|
|
|
|
if (isZip == false && tgzRom != null && (outputFormat == OutputFormat.TorrentGzip || outputFormat == OutputFormat.TorrentGzipRomba))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Get the proper output path
|
2020-06-10 22:37:19 -07:00
|
|
|
|
if (outputFormat == OutputFormat.TorrentGzipRomba)
|
2020-08-20 11:23:48 -07:00
|
|
|
|
outDir = Path.Combine(outDir, PathExtensions.GetDepotPath(sha1, Header.OutputDepot.Depth));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
else
|
|
|
|
|
|
outDir = Path.Combine(outDir, sha1 + ".gz");
|
|
|
|
|
|
|
|
|
|
|
|
// Make sure the output folder is created
|
|
|
|
|
|
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
|
|
|
|
|
|
|
|
|
|
|
|
// Now copy the file over
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
File.Copy(file, outDir);
|
2020-04-03 13:19:21 -07:00
|
|
|
|
return true;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
catch
|
|
|
|
|
|
{
|
2020-04-03 13:19:21 -07:00
|
|
|
|
return false;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If we have a very specific TXZ->TXZ case, just copy it accordingly
|
|
|
|
|
|
XZArchive txz = new XZArchive(file);
|
|
|
|
|
|
BaseFile txzRom = txz.GetTorrentXZFileInfo();
|
|
|
|
|
|
if (isZip == false && txzRom != null && (outputFormat == OutputFormat.TorrentXZ || outputFormat == OutputFormat.TorrentXZRomba))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Get the proper output path
|
2020-08-18 11:34:43 -07:00
|
|
|
|
if (outputFormat == OutputFormat.TorrentXZRomba)
|
2020-08-20 11:23:48 -07:00
|
|
|
|
outDir = Path.Combine(outDir, PathExtensions.GetDepotPath(sha1, Header.OutputDepot.Depth));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
else
|
2020-07-15 09:41:59 -07:00
|
|
|
|
outDir = Path.Combine(outDir, sha1 + ".xz");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Make sure the output folder is created
|
|
|
|
|
|
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
|
|
|
|
|
|
|
|
|
|
|
|
// Now copy the file over
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
File.Copy(file, outDir);
|
2020-04-03 13:19:21 -07:00
|
|
|
|
return true;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
catch
|
|
|
|
|
|
{
|
2020-04-03 13:19:21 -07:00
|
|
|
|
return false;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Get a generic stream for the file
|
|
|
|
|
|
Stream fileStream = new MemoryStream();
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a zipfile, extract the stream to memory
|
|
|
|
|
|
if (isZip != null)
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
BaseArchive archive = BaseArchive.Create(file);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (archive != null)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
(fileStream, _) = archive.CopyToStream(datItem.Name);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
// Otherwise, just open the filestream
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
fileStream = FileExtensions.TryOpenRead(file);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the stream is null, then continue
|
|
|
|
|
|
if (fileStream == null)
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Seek to the beginning of the stream
|
|
|
|
|
|
if (fileStream.CanSeek)
|
|
|
|
|
|
fileStream.Seek(0, SeekOrigin.Begin);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If we are inverse, create an output to rebuild to
|
|
|
|
|
|
if (inverse)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
string machinename = null;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Get the item from the current file
|
|
|
|
|
|
Rom item = new Rom(fileStream.GetInfo(keepReadOpen: true));
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Machine.Name = Path.GetFileNameWithoutExtension(item.Name);
|
|
|
|
|
|
item.Machine.Description = Path.GetFileNameWithoutExtension(item.Name);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// If we are coming from an archive, set the correct machine name
|
|
|
|
|
|
if (machinename != null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-20 13:17:14 -07:00
|
|
|
|
item.Machine.Name = machinename;
|
|
|
|
|
|
item.Machine.Description = machinename;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
}
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
dupes.Add(item);
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
Globals.Logger.User($"{(inverse ? "No matches" : "Matches")} found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
|
|
|
|
|
rebuilt = true;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Now loop through the list and rebuild accordingly
|
|
|
|
|
|
foreach (DatItem item in dupes)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// Get the output archive, if possible
|
|
|
|
|
|
Folder outputArchive = Folder.Create(outputFormat);
|
|
|
|
|
|
|
|
|
|
|
|
// Now rebuild to the output file
|
2020-08-27 16:57:22 -07:00
|
|
|
|
outputArchive.Write(fileStream, outDir, item as Rom, date: date, depth: Header.OutputDepot.Depth);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Close the input stream
|
|
|
|
|
|
fileStream?.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now we want to take care of headers, if applicable
|
2020-08-02 12:54:27 -07:00
|
|
|
|
if (Header.HeaderSkipper != null)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Get a generic stream for the file
|
|
|
|
|
|
Stream fileStream = new MemoryStream();
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a zipfile, extract the stream to memory
|
|
|
|
|
|
if (isZip != null)
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
BaseArchive archive = BaseArchive.Create(file);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (archive != null)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
(fileStream, _) = archive.CopyToStream(datItem.Name);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
// Otherwise, just open the filestream
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
fileStream = FileExtensions.TryOpenRead(file);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If the stream is null, then continue
|
|
|
|
|
|
if (fileStream == null)
|
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
|
|
// Check to see if we have a matching header first
|
2020-08-02 12:54:27 -07:00
|
|
|
|
SkipperRule rule = Transform.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(Header.HeaderSkipper));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If there's a match, create the new file to write
|
|
|
|
|
|
if (rule.Tests != null && rule.Tests.Count != 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the file could be transformed correctly
|
|
|
|
|
|
MemoryStream transformStream = new MemoryStream();
|
|
|
|
|
|
if (rule.TransformStream(fileStream, transformStream, keepReadOpen: true, keepWriteOpen: true))
|
|
|
|
|
|
{
|
|
|
|
|
|
// Get the file informations that we will be using
|
2020-07-15 09:41:59 -07:00
|
|
|
|
Rom headerless = new Rom(transformStream.GetInfo(keepReadOpen: true));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Find if the file has duplicates in the DAT
|
2020-07-26 22:34:45 -07:00
|
|
|
|
dupes = Items.GetDuplicates(headerless, remove: updateDat);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
hasDuplicates = dupes.Count > 0;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If it has duplicates and we're not filtering, rebuild it
|
|
|
|
|
|
if (hasDuplicates && !inverse)
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Headerless matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
rebuilt = true;
|
|
|
|
|
|
|
|
|
|
|
|
// Now loop through the list and rebuild accordingly
|
|
|
|
|
|
foreach (DatItem item in dupes)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Create a headered item to use as well
|
|
|
|
|
|
datItem.CopyMachineInformation(item);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
datItem.Name += $"_{crc}";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If either copy succeeds, then we want to set rebuilt to true
|
|
|
|
|
|
bool eitherSuccess = false;
|
|
|
|
|
|
|
|
|
|
|
|
// Get the output archive, if possible
|
2020-07-15 09:41:59 -07:00
|
|
|
|
Folder outputArchive = Folder.Create(outputFormat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now rebuild to the output file
|
2020-08-27 16:57:22 -07:00
|
|
|
|
eitherSuccess |= outputArchive.Write(transformStream, outDir, item as Rom, date: date, depth: Header.OutputDepot.Depth);
|
|
|
|
|
|
eitherSuccess |= outputArchive.Write(fileStream, outDir, datItem as Rom, date: date, depth: Header.OutputDepot.Depth);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now add the success of either rebuild
|
|
|
|
|
|
rebuilt &= eitherSuccess;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Dispose of the stream
|
|
|
|
|
|
transformStream?.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Dispose of the stream
|
|
|
|
|
|
fileStream?.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return rebuilt;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-08-27 22:53:21 -07:00
|
|
|
|
/// Verify a DatFile against a set of depots, leaving only missing files
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of input directories to compare against</param>
|
|
|
|
|
|
/// <returns>True if verification was a success, false otherwise</returns>
|
2020-08-27 22:53:21 -07:00
|
|
|
|
public bool VerifyDepot(List<string> inputs)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
bool success = true;
|
|
|
|
|
|
|
|
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied depots");
|
|
|
|
|
|
|
|
|
|
|
|
// Now loop through and get only directories from the input paths
|
|
|
|
|
|
List<string> directories = new List<string>();
|
|
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Add to the list if the input is a directory
|
|
|
|
|
|
if (Directory.Exists(input))
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Verbose($"Adding depot: {input}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
directories.Add(input);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we don't have any directories, we want to exit
|
|
|
|
|
|
if (directories.Count == 0)
|
|
|
|
|
|
return success;
|
|
|
|
|
|
|
2020-07-15 10:47:13 -07:00
|
|
|
|
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.BucketBy(Field.DatItem_SHA1, DedupeType.None);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
2020-08-14 23:11:29 -07:00
|
|
|
|
var keys = Items.SortedKeys.ToList();
|
|
|
|
|
|
foreach (string hash in keys)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Pre-empt any issues that could arise from string length
|
|
|
|
|
|
if (hash.Length != Constants.SHA1Length)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.User($"Checking hash '{hash}'");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Get the extension path for the hash
|
2020-08-20 11:23:48 -07:00
|
|
|
|
string subpath = PathExtensions.GetDepotPath(hash, Header.InputDepot.Depth);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Find the first depot that includes the hash
|
|
|
|
|
|
string foundpath = null;
|
|
|
|
|
|
foreach (string directory in directories)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (File.Exists(Path.Combine(directory, subpath)))
|
|
|
|
|
|
{
|
|
|
|
|
|
foundpath = Path.Combine(directory, subpath);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// If we didn't find a path, then we continue
|
|
|
|
|
|
if (foundpath == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a path, we want to try to get the rom information
|
|
|
|
|
|
GZipArchive tgz = new GZipArchive(foundpath);
|
|
|
|
|
|
BaseFile fileinfo = tgz.GetTorrentGZFileInfo();
|
|
|
|
|
|
|
|
|
|
|
|
// If the file information is null, then we continue
|
|
|
|
|
|
if (fileinfo == null)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
// Now we want to remove all duplicates from the DAT
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.GetDuplicates(new Rom(fileinfo), remove: true)
|
|
|
|
|
|
.AddRange(Items.GetDuplicates(new Disk(fileinfo), remove: true));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
watch.Stop();
|
|
|
|
|
|
|
2020-08-27 22:53:21 -07:00
|
|
|
|
// Set fixdat headers in case of writing out
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
|
|
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
|
|
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.ClearMarked();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
return success;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
2020-08-27 22:53:21 -07:00
|
|
|
|
/// Verify a DatFile against a set of inputs, leaving only missing files
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs">List of input directories to compare against</param>
|
|
|
|
|
|
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
|
|
|
|
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
2020-08-02 13:23:47 -07:00
|
|
|
|
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <returns>True if verification was a success, false otherwise</returns>
|
2020-08-28 01:13:55 -07:00
|
|
|
|
public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, TreatAsFiles asFiles = 0x00)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
bool success = true;
|
|
|
|
|
|
|
2020-08-27 22:53:21 -07:00
|
|
|
|
// Loop through and check each of the inputs
|
2019-01-08 11:49:31 -08:00
|
|
|
|
Globals.Logger.User("Processing files:\n");
|
|
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
2020-08-27 22:53:21 -07:00
|
|
|
|
PopulateFromDir(input, quickScan ? Hash.SecureHashes : Hash.DeepHashes, asFiles: asFiles);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-28 01:13:55 -07:00
|
|
|
|
// Force bucketing according to the flags
|
|
|
|
|
|
Items.SetBucketedBy(Field.NULL);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (hashOnly)
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
else
|
2020-08-28 01:13:55 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
|
|
|
|
|
|
|
|
|
|
|
// Then mark items for removal
|
|
|
|
|
|
var keys = Items.SortedKeys.ToList();
|
|
|
|
|
|
foreach (string key in keys)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-28 01:13:55 -07:00
|
|
|
|
List<DatItem> items = Items[key];
|
|
|
|
|
|
for (int i = 0; i < items.Count; i++)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-28 01:13:55 -07:00
|
|
|
|
// Unmatched items will have a source ID of 99, remove all others
|
|
|
|
|
|
if (items[i].Source.Index != 99)
|
|
|
|
|
|
items[i].Remove = true;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-28 01:13:55 -07:00
|
|
|
|
|
|
|
|
|
|
// Set the list back, just in case
|
|
|
|
|
|
Items[key] = items;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-08-27 22:53:21 -07:00
|
|
|
|
// Set fixdat headers in case of writing out
|
|
|
|
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
|
|
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
|
|
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.ClearMarked();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
return success;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#endregion
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
// TODO: Implement Level split
|
2019-01-08 11:49:31 -08:00
|
|
|
|
#region Splitting
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Split a DAT by input extensions
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="extA">List of extensions to split on (first DAT)</param>
|
|
|
|
|
|
/// <param name="extB">List of extensions to split on (second DAT)</param>
|
2020-08-28 10:32:17 -07:00
|
|
|
|
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
|
|
|
|
|
|
public (DatFile extADat, DatFile extBDat) SplitByExtension(List<string> extA, List<string> extB)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
// If roms is empty, return false
|
2020-07-27 01:39:32 -07:00
|
|
|
|
if (Items.TotalCount == 0)
|
2020-08-28 10:32:17 -07:00
|
|
|
|
return (null, null);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
|
|
|
|
|
// Make sure all of the extensions don't have a dot at the beginning
|
2020-07-15 09:41:59 -07:00
|
|
|
|
var newExtA = extA.Select(s => s.TrimStart('.').ToLowerInvariant());
|
2019-01-08 11:49:31 -08:00
|
|
|
|
string newExtAString = string.Join(",", newExtA);
|
|
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
var newExtB = extB.Select(s => s.TrimStart('.').ToLowerInvariant());
|
2019-01-08 11:49:31 -08:00
|
|
|
|
string newExtBString = string.Join(",", newExtB);
|
|
|
|
|
|
|
|
|
|
|
|
// Set all of the appropriate outputs for each of the subsets
|
2020-08-28 10:32:17 -07:00
|
|
|
|
DatFile extADat = Create(Header.CloneStandard());
|
|
|
|
|
|
extADat.Header.FileName += $" ({newExtAString})";
|
|
|
|
|
|
extADat.Header.Name += $" ({newExtAString})";
|
|
|
|
|
|
extADat.Header.Description += $" ({newExtAString})";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
DatFile extBDat = Create(Header.CloneStandard());
|
|
|
|
|
|
extBDat.Header.FileName += $" ({newExtBString})";
|
|
|
|
|
|
extBDat.Header.Name += $" ({newExtBString})";
|
|
|
|
|
|
extBDat.Header.Description += $" ({newExtBString})";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now separate the roms accordingly
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> items = Items[key];
|
2019-01-08 11:49:31 -08:00
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
if (newExtA.Contains(PathExtensions.GetNormalizedExtension(item.Name)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-28 10:32:17 -07:00
|
|
|
|
extADat.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-15 09:41:59 -07:00
|
|
|
|
else if (newExtB.Contains(PathExtensions.GetNormalizedExtension(item.Name)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-28 10:32:17 -07:00
|
|
|
|
extBDat.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-08-28 10:32:17 -07:00
|
|
|
|
extADat.Items.Add(key, item);
|
|
|
|
|
|
extBDat.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// Then return both DatFiles
|
|
|
|
|
|
return (extADat, extBDat);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Split a DAT by best available hashes
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
|
|
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
2020-08-28 10:32:17 -07:00
|
|
|
|
/// TODO: Can this follow the same pattern as type split?
|
2020-07-27 11:21:32 -07:00
|
|
|
|
public bool SplitByHash(string outDir)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Create each of the respective output DATs
|
|
|
|
|
|
Globals.Logger.User("Creating and populating new DATs");
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile nodump = Create(Header.CloneStandard());
|
|
|
|
|
|
nodump.Header.FileName += " (Nodump)";
|
|
|
|
|
|
nodump.Header.Name += " (Nodump)";
|
|
|
|
|
|
nodump.Header.Description += " (Nodump)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile sha512 = Create(Header.CloneStandard());
|
|
|
|
|
|
sha512.Header.FileName += " (SHA-512)";
|
|
|
|
|
|
sha512.Header.Name += " (SHA-512)";
|
|
|
|
|
|
sha512.Header.Description += " (SHA-512)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile sha384 = Create(Header.CloneStandard());
|
|
|
|
|
|
sha384.Header.FileName += " (SHA-384)";
|
|
|
|
|
|
sha384.Header.Name += " (SHA-384)";
|
|
|
|
|
|
sha384.Header.Description += " (SHA-384)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile sha256 = Create(Header.CloneStandard());
|
|
|
|
|
|
sha256.Header.FileName += " (SHA-256)";
|
|
|
|
|
|
sha256.Header.Name += " (SHA-256)";
|
|
|
|
|
|
sha256.Header.Description += " (SHA-256)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile sha1 = Create(Header.CloneStandard());
|
|
|
|
|
|
sha1.Header.FileName += " (SHA-1)";
|
|
|
|
|
|
sha1.Header.Name += " (SHA-1)";
|
|
|
|
|
|
sha1.Header.Description += " (SHA-1)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
#if NET_FRAMEWORK
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile ripemd160 = Create(Header.CloneStandard());
|
|
|
|
|
|
ripemd160.Header.FileName += " (RIPEMD160)";
|
|
|
|
|
|
ripemd160.Header.Name += " (RIPEMD160)";
|
|
|
|
|
|
ripemd160.Header.Description += " (RIPEMD160)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
|
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile md5 = Create(Header.CloneStandard());
|
|
|
|
|
|
md5.Header.FileName += " (MD5)";
|
|
|
|
|
|
md5.Header.Name += " (MD5)";
|
|
|
|
|
|
md5.Header.Description += " (MD5)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile crc = Create(Header.CloneStandard());
|
|
|
|
|
|
crc.Header.FileName += " (CRC)";
|
|
|
|
|
|
crc.Header.Name += " (CRC)";
|
|
|
|
|
|
crc.Header.Description += " (CRC)";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile other = Create(Header.CloneStandard());
|
|
|
|
|
|
other.Header.FileName += " (Other)";
|
|
|
|
|
|
other.Header.Name += " (Other)";
|
|
|
|
|
|
other.Header.Description += " (Other)";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now populate each of the DAT objects in turn
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> items = Items[key];
|
2019-01-08 11:49:31 -08:00
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
// If the file is not a Disk, Media, or Rom, continue
|
|
|
|
|
|
if (item.ItemType != ItemType.Disk && item.ItemType != ItemType.Media && item.ItemType != ItemType.Rom)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
// If the file is a nodump
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if ((item.ItemType == ItemType.Rom && (item as Rom).ItemStatus == ItemStatus.Nodump)
|
|
|
|
|
|
|| (item.ItemType == ItemType.Disk && (item as Disk).ItemStatus == ItemStatus.Nodump))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
nodump.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file has a SHA-512
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA512)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
sha512.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file has a SHA-384
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA384)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
sha384.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file has a SHA-256
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).SHA256))
|
|
|
|
|
|
|| (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA256)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
sha256.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file has a SHA-1
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace((item as Disk).SHA1))
|
|
|
|
|
|
|| (item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).SHA1))
|
|
|
|
|
|
|| (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).SHA1)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
sha1.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
|
|
|
|
|
// If the file has a RIPEMD160
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).RIPEMD160)))
|
2020-06-05 22:26:44 -07:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
ripemd160.Items.Add(key, item);
|
2020-06-05 22:26:44 -07:00
|
|
|
|
}
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|
2020-06-05 22:26:44 -07:00
|
|
|
|
// If the file has an MD5
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace((item as Disk).MD5))
|
2020-08-27 20:56:50 -07:00
|
|
|
|
|| (item.ItemType == ItemType.Media && !string.IsNullOrWhiteSpace((item as Media).MD5))
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|| (item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).MD5)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
md5.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|
2020-06-05 22:26:44 -07:00
|
|
|
|
// If the file has a CRC
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace((item as Rom).CRC)))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
crc.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
else
|
|
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
other.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Now, output all of the files to the output directory
|
|
|
|
|
|
Globals.Logger.User("DAT information created, outputting new files");
|
|
|
|
|
|
bool success = true;
|
|
|
|
|
|
success &= nodump.Write(outDir);
|
|
|
|
|
|
success &= sha512.Write(outDir);
|
|
|
|
|
|
success &= sha384.Write(outDir);
|
|
|
|
|
|
success &= sha256.Write(outDir);
|
|
|
|
|
|
success &= sha1.Write(outDir);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-06-05 22:26:44 -07:00
|
|
|
|
success &= ripemd160.Write(outDir);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2019-01-08 11:49:31 -08:00
|
|
|
|
success &= md5.Write(outDir);
|
|
|
|
|
|
success &= crc.Write(outDir);
|
|
|
|
|
|
|
|
|
|
|
|
return success;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Split a SuperDAT by lowest available directory level
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
|
|
|
|
/// <param name="shortname">True if short names should be used, false otherwise</param>
|
|
|
|
|
|
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
|
|
|
|
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
2020-07-27 11:21:32 -07:00
|
|
|
|
public bool SplitByLevel(string outDir, bool shortname, bool basedat)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-15 10:47:13 -07:00
|
|
|
|
// First, bucket by games so that we can do the right thing
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None, lower: false, norename: true);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Create a temporary DAT to add things to
|
2020-07-27 10:26:08 -07:00
|
|
|
|
DatFile tempDat = Create(Header);
|
|
|
|
|
|
tempDat.Header.Name = null;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Sort the input keys
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<string> keys = Items.Keys.ToList();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
keys.Sort(SplitByLevelSort);
|
|
|
|
|
|
|
|
|
|
|
|
// Then, we loop over the games
|
|
|
|
|
|
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
|
|
|
|
|
{
|
|
|
|
|
|
// Here, the key is the name of the game to be used for comparison
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (tempDat.Header.Name != null && tempDat.Header.Name != Path.GetDirectoryName(key))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Reset the DAT for the next items
|
2020-07-27 10:26:08 -07:00
|
|
|
|
tempDat = Create(Header);
|
|
|
|
|
|
tempDat.Header.Name = null;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Clean the input list and set all games to be pathless
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> items = Items[key];
|
2020-08-20 13:17:14 -07:00
|
|
|
|
items.ForEach(item => item.Machine.Name = Path.GetFileName(item.Machine.Name));
|
|
|
|
|
|
items.ForEach(item => item.Machine.Description = Path.GetFileName(item.Machine.Description));
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now add the game to the output DAT
|
2020-07-26 22:34:45 -07:00
|
|
|
|
tempDat.Items.AddRange(key, items);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Then set the DAT name to be the parent directory name
|
2020-07-27 10:26:08 -07:00
|
|
|
|
tempDat.Header.Name = Path.GetDirectoryName(key);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Helper function for SplitByLevel to sort the input game names
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="a">First string to compare</param>
|
|
|
|
|
|
/// <param name="b">Second string to compare</param>
|
|
|
|
|
|
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
|
|
|
|
|
|
private int SplitByLevelSort(string a, string b)
|
|
|
|
|
|
{
|
|
|
|
|
|
NaturalComparer nc = new NaturalComparer();
|
|
|
|
|
|
int adeep = a.Count(c => c == '/' || c == '\\');
|
|
|
|
|
|
int bdeep = b.Count(c => c == '/' || c == '\\');
|
|
|
|
|
|
|
|
|
|
|
|
if (adeep == bdeep)
|
|
|
|
|
|
return nc.Compare(a, b);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return adeep - bdeep;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Helper function for SplitByLevel to clean and write out a DAT
|
|
|
|
|
|
/// </summary>
|
2020-07-27 11:21:32 -07:00
|
|
|
|
/// <param name="newDatFile">DAT to clean and write out</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="outDir">Directory to write out to</param>
|
|
|
|
|
|
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
|
|
|
|
|
|
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
|
2020-07-27 11:21:32 -07:00
|
|
|
|
private void SplitByLevelHelper(DatFile newDatFile, string outDir, bool shortname, bool restore)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Get the name from the DAT to use separately
|
2020-07-27 11:21:32 -07:00
|
|
|
|
string name = newDatFile.Header.Name;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
string expName = name.Replace("/", " - ").Replace("\\", " - ");
|
|
|
|
|
|
|
|
|
|
|
|
// Now set the new output values
|
2020-07-27 11:21:32 -07:00
|
|
|
|
newDatFile.Header.FileName = WebUtility.HtmlDecode(string.IsNullOrWhiteSpace(name)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
? Header.FileName
|
2019-01-08 11:49:31 -08:00
|
|
|
|
: (shortname
|
|
|
|
|
|
? Path.GetFileName(name)
|
|
|
|
|
|
: expName
|
|
|
|
|
|
)
|
|
|
|
|
|
);
|
2020-07-27 11:21:32 -07:00
|
|
|
|
newDatFile.Header.FileName = (restore ? $"{Header.FileName} ({newDatFile.Header.FileName})" : newDatFile.Header.FileName);
|
|
|
|
|
|
newDatFile.Header.Name = $"{Header.Name} ({expName})";
|
|
|
|
|
|
newDatFile.Header.Description = (string.IsNullOrWhiteSpace(Header.Description) ? newDatFile.Header.Name : $"{Header.Description} ({expName})");
|
|
|
|
|
|
newDatFile.Header.Type = null;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Write out the temporary DAT to the proper directory
|
2020-07-27 11:21:32 -07:00
|
|
|
|
newDatFile.Write(outDir);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Split a DAT by size of Rom
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="radix">Long value representing the split point</param>
|
2020-08-28 10:32:17 -07:00
|
|
|
|
/// <returns>Less Than and Greater Than DatFiles</returns>
|
|
|
|
|
|
public (DatFile lessThan, DatFile greaterThan) SplitBySize(long radix)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Create each of the respective output DATs
|
|
|
|
|
|
Globals.Logger.User("Creating and populating new DATs");
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
DatFile lessThan = Create(Header.CloneStandard());
|
|
|
|
|
|
lessThan.Header.FileName += $" (less than {radix})";
|
|
|
|
|
|
lessThan.Header.Name += $" (less than {radix})";
|
|
|
|
|
|
lessThan.Header.Description += $" (less than {radix})";
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
DatFile greaterThan = Create(Header.CloneStandard());
|
|
|
|
|
|
greaterThan.Header.FileName += $" (equal-greater than {radix})";
|
|
|
|
|
|
greaterThan.Header.Name += $" (equal-greater than {radix})";
|
|
|
|
|
|
greaterThan.Header.Description += $" (equal-greater than {radix})";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now populate each of the DAT objects in turn
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-26 22:34:45 -07:00
|
|
|
|
List<DatItem> items = Items[key];
|
2019-01-08 11:49:31 -08:00
|
|
|
|
foreach (DatItem item in items)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the file is not a Rom, it automatically goes in the "lesser" dat
|
2019-01-08 12:11:55 -08:00
|
|
|
|
if (item.ItemType != ItemType.Rom)
|
2020-08-28 10:32:17 -07:00
|
|
|
|
lessThan.Items.Add(key, item);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file is a Rom and less than the radix, put it in the "lesser" dat
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (item.ItemType == ItemType.Rom && (item as Rom).Size < radix)
|
2020-08-28 10:32:17 -07:00
|
|
|
|
lessThan.Items.Add(key, item);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (item.ItemType == ItemType.Rom && (item as Rom).Size >= radix)
|
2020-08-28 10:32:17 -07:00
|
|
|
|
greaterThan.Items.Add(key, item);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// Then return both DatFiles
|
|
|
|
|
|
return (lessThan, greaterThan);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Split a DAT by type of DatItem
|
|
|
|
|
|
/// </summary>
|
2020-08-28 10:32:17 -07:00
|
|
|
|
/// <returns>Dictionary of ItemType to DatFile mappings</returns>
|
|
|
|
|
|
public Dictionary<ItemType, DatFile> SplitByType()
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Create each of the respective output DATs
|
|
|
|
|
|
Globals.Logger.User("Creating and populating new DATs");
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// Create the set of type-to-dat mappings
|
|
|
|
|
|
Dictionary<ItemType, DatFile> typeDats = new Dictionary<ItemType, DatFile>();
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// We only care about a subset of types
|
|
|
|
|
|
List<ItemType> outputTypes = new List<ItemType>
|
|
|
|
|
|
{
|
|
|
|
|
|
ItemType.Disk,
|
|
|
|
|
|
ItemType.Media,
|
|
|
|
|
|
ItemType.Rom,
|
|
|
|
|
|
ItemType.Sample,
|
|
|
|
|
|
};
|
2020-08-27 16:57:22 -07:00
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
// Setup all of the DatFiles
|
|
|
|
|
|
foreach (ItemType itemType in outputTypes)
|
|
|
|
|
|
{
|
|
|
|
|
|
typeDats[itemType] = Create(Header.CloneStandard());
|
|
|
|
|
|
typeDats[itemType].Header.FileName += $" ({itemType})";
|
|
|
|
|
|
typeDats[itemType].Header.Name += $" ({itemType})";
|
|
|
|
|
|
typeDats[itemType].Header.Description += $" ({itemType})";
|
|
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now populate each of the DAT objects in turn
|
2020-08-28 10:32:17 -07:00
|
|
|
|
Parallel.ForEach(outputTypes, Globals.ParallelOptions, itemType =>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-28 10:32:17 -07:00
|
|
|
|
FillWithItemType(typeDats[itemType], itemType);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
|
2020-08-28 10:32:17 -07:00
|
|
|
|
return typeDats;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
#region Writing
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
|
|
|
|
/// </summary>
|
2020-07-31 14:04:10 -07:00
|
|
|
|
/// <param name="outDir">Set the output directory (current directory on null)</param>
|
2019-01-08 11:49:31 -08:00
|
|
|
|
/// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param>
|
|
|
|
|
|
/// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
|
|
|
|
/// <param name="overwrite">True if files should be overwritten (default), false if they should be renamed instead</param>
|
|
|
|
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
2020-07-31 14:04:10 -07:00
|
|
|
|
public bool Write(string outDir, bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// If there's nothing there, abort
|
2020-07-27 01:39:32 -07:00
|
|
|
|
if (Items.TotalCount == 0)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.User("There were no items to write out!");
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Ensure the output directory is set and created
|
2020-07-15 09:41:59 -07:00
|
|
|
|
outDir = DirectoryExtensions.Ensure(outDir, create: true);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// If the DAT has no output format, default to XML
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.DatFormat == 0)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Verbose("No DAT format defined, defaulting to XML");
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.DatFormat = DatFormat.Logiqx;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Make sure that the three essential fields are filled in
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = Header.Name = Header.Description = "Default";
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = Header.Name = Header.Description;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = Header.Description = Header.Name;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.FileName = Header.Description;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.Name = Header.Description = Header.FileName;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.Name = Header.Description;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.Description = Header.Name;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
|
|
|
|
|
// Nothing is needed
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Output initial statistics, for kicks
|
|
|
|
|
|
if (stats)
|
2020-07-15 09:41:59 -07:00
|
|
|
|
{
|
2020-07-27 01:39:32 -07:00
|
|
|
|
if (Items.RomCount + Items.DiskCount == 0)
|
2020-07-26 22:34:45 -07:00
|
|
|
|
Items.RecalculateStats();
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: true);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
|
|
|
|
|
|
var consoleOutput = BaseReport.Create(StatReportFormat.None, null, true, true);
|
2020-07-27 10:26:08 -07:00
|
|
|
|
consoleOutput.ReplaceStatistics(Header.FileName, Items.Keys.Count(), Items);
|
2020-07-15 09:41:59 -07:00
|
|
|
|
}
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Bucket and dedupe according to the flag
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.DedupeRoms == DedupeType.Full)
|
2020-08-25 11:20:50 -07:00
|
|
|
|
Items.BucketBy(Field.DatItem_CRC, Header.DedupeRoms, norename: norename);
|
2020-07-27 10:26:08 -07:00
|
|
|
|
else if (Header.DedupeRoms == DedupeType.Game)
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, Header.DedupeRoms, norename: norename);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Bucket roms by game name, if not already
|
2020-08-24 22:25:47 -07:00
|
|
|
|
Items.BucketBy(Field.Machine_Name, DedupeType.None, norename: norename);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Output the number of items we're going to be writing
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Globals.Logger.User($"A total of {Items.TotalCount} items will be written out to '{Header.FileName}'");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Get the outfile names
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Dictionary<DatFormat, string> outfiles = Header.CreateOutFileNames(outDir, overwrite);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// Write out all required formats
|
|
|
|
|
|
Parallel.ForEach(outfiles.Keys, Globals.ParallelOptions, datFormat =>
|
|
|
|
|
|
{
|
|
|
|
|
|
string outfile = outfiles[datFormat];
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
2020-07-15 09:41:59 -07:00
|
|
|
|
Create(datFormat, this)?.WriteToFile(outfile, ignoreblanks);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
2020-06-10 22:37:19 -07:00
|
|
|
|
Globals.Logger.Error($"Datfile {outfile} could not be written out: {ex}");
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2019-09-20 10:30:30 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
Globals.Logger.Error(ex.ToString());
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="outfile">Name of the file to write to</param>
|
|
|
|
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
|
|
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
2020-07-15 09:41:59 -07:00
|
|
|
|
public abstract bool WriteToFile(string outfile, bool ignoreblanks = false);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Process an item and correctly set the item name
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">DatItem to update</param>
|
|
|
|
|
|
/// <param name="forceRemoveQuotes">True if the Quotes flag should be ignored, false otherwise</param>
|
|
|
|
|
|
/// <param name="forceRomName">True if the UseRomName should be always on (default), false otherwise</param>
|
|
|
|
|
|
protected void ProcessItemName(DatItem item, bool forceRemoveQuotes, bool forceRomName = true)
|
|
|
|
|
|
{
|
|
|
|
|
|
string name = item.Name;
|
|
|
|
|
|
|
|
|
|
|
|
// Backup relevant values and set new ones accordingly
|
2020-07-27 10:26:08 -07:00
|
|
|
|
bool quotesBackup = Header.Quotes;
|
|
|
|
|
|
bool useRomNameBackup = Header.UseRomName;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (forceRemoveQuotes)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.Quotes = false;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (forceRomName)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.UseRomName = true;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Create the proper Prefix and Postfix
|
|
|
|
|
|
string pre = CreatePrefixPostfix(item, true);
|
|
|
|
|
|
string post = CreatePrefixPostfix(item, false);
|
|
|
|
|
|
|
2020-08-20 11:23:48 -07:00
|
|
|
|
// If we're in Depot mode, take care of that instead
|
2020-08-28 01:13:55 -07:00
|
|
|
|
if (Header.OutputDepot?.IsActive == true)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (item.ItemType == ItemType.Disk)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Disk disk = item as Disk;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// We can only write out if there's a SHA-1
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(disk.SHA1))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
name = PathExtensions.GetDepotPath(disk.SHA1, Header.OutputDepot.Depth).Replace('\\', '/');
|
|
|
|
|
|
item.Name = pre + name + post;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (item.ItemType == ItemType.Media)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
Media media = item as Media;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// We can only write out if there's a SHA-1
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(media.SHA1))
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
name = PathExtensions.GetDepotPath(media.SHA1, Header.OutputDepot.Depth).Replace('\\', '/');
|
2019-01-08 11:49:31 -08:00
|
|
|
|
item.Name = pre + name + post;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (item.ItemType == ItemType.Rom)
|
|
|
|
|
|
{
|
|
|
|
|
|
Rom rom = item as Rom;
|
|
|
|
|
|
|
|
|
|
|
|
// We can only write out if there's a SHA-1
|
|
|
|
|
|
if (!string.IsNullOrWhiteSpace(rom.SHA1))
|
|
|
|
|
|
{
|
|
|
|
|
|
name = PathExtensions.GetDepotPath(rom.SHA1, Header.OutputDepot.Depth).Replace('\\', '/');
|
|
|
|
|
|
item.Name = $"{pre}{name}{post}";
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.RemoveExtension)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.RemoveExtension)
|
|
|
|
|
|
Header.ReplaceExtension = string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
string dir = Path.GetDirectoryName(name);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
dir = dir.TrimStart(Path.DirectorySeparatorChar);
|
2020-07-27 10:26:08 -07:00
|
|
|
|
name = Path.Combine(dir, Path.GetFileNameWithoutExtension(name) + Header.ReplaceExtension);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (!string.IsNullOrWhiteSpace(Header.AddExtension))
|
|
|
|
|
|
name += Header.AddExtension;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
2020-07-27 10:26:08 -07:00
|
|
|
|
if (Header.UseRomName && Header.GameName)
|
2020-08-20 13:17:14 -07:00
|
|
|
|
name = Path.Combine(item.Machine.Name, name);
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Now assign back the item name
|
|
|
|
|
|
item.Name = pre + name + post;
|
|
|
|
|
|
|
|
|
|
|
|
// Restore all relevant values
|
|
|
|
|
|
if (forceRemoveQuotes)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.Quotes = quotesBackup;
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
if (forceRomName)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
Header.UseRomName = useRomNameBackup;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Create a prefix or postfix from inputs
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="item">DatItem to create a prefix/postfix for</param>
|
|
|
|
|
|
/// <param name="prefix">True for prefix, false for postfix</param>
|
|
|
|
|
|
/// <returns>Sanitized string representing the postfix or prefix</returns>
|
|
|
|
|
|
protected string CreatePrefixPostfix(DatItem item, bool prefix)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Initialize strings
|
2020-06-10 22:37:19 -07:00
|
|
|
|
string fix = string.Empty,
|
2020-08-20 13:17:14 -07:00
|
|
|
|
game = item.Machine.Name,
|
2019-01-08 11:49:31 -08:00
|
|
|
|
name = item.Name,
|
|
|
|
|
|
crc = string.Empty,
|
|
|
|
|
|
md5 = string.Empty,
|
2020-06-05 22:26:44 -07:00
|
|
|
|
ripemd160 = string.Empty,
|
2019-01-08 11:49:31 -08:00
|
|
|
|
sha1 = string.Empty,
|
|
|
|
|
|
sha256 = string.Empty,
|
|
|
|
|
|
sha384 = string.Empty,
|
|
|
|
|
|
sha512 = string.Empty,
|
|
|
|
|
|
size = string.Empty;
|
|
|
|
|
|
|
|
|
|
|
|
// If we have a prefix
|
|
|
|
|
|
if (prefix)
|
2020-07-27 10:26:08 -07:00
|
|
|
|
fix = Header.Prefix + (Header.Quotes ? "\"" : string.Empty);
|
2020-06-10 22:37:19 -07:00
|
|
|
|
|
2019-01-08 11:49:31 -08:00
|
|
|
|
// If we have a postfix
|
|
|
|
|
|
else
|
2020-07-27 10:26:08 -07:00
|
|
|
|
fix = (Header.Quotes ? "\"" : string.Empty) + Header.Postfix;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
|
|
|
|
|
|
// Ensure we have the proper values for replacement
|
2020-08-27 16:57:22 -07:00
|
|
|
|
if (item.ItemType == ItemType.Disk)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
md5 = (item as Disk).MD5 ?? string.Empty;
|
|
|
|
|
|
sha1 = (item as Disk).SHA1 ?? string.Empty;
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (item.ItemType == ItemType.Media)
|
|
|
|
|
|
{
|
|
|
|
|
|
md5 = (item as Media).MD5 ?? string.Empty;
|
|
|
|
|
|
sha1 = (item as Media).SHA1 ?? string.Empty;
|
|
|
|
|
|
sha256 = (item as Media).SHA256 ?? string.Empty;
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
2020-08-27 16:57:22 -07:00
|
|
|
|
else if (item.ItemType == ItemType.Rom)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
{
|
2020-08-27 16:57:22 -07:00
|
|
|
|
crc = (item as Rom).CRC ?? string.Empty;
|
|
|
|
|
|
md5 = (item as Rom).MD5 ?? string.Empty;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#if NET_FRAMEWORK
|
2020-08-27 16:57:22 -07:00
|
|
|
|
ripemd160 = (item as Rom).RIPEMD160 ?? string.Empty;
|
2020-07-15 09:41:59 -07:00
|
|
|
|
#endif
|
2020-08-27 16:57:22 -07:00
|
|
|
|
sha1 = (item as Rom).SHA1 ?? string.Empty;
|
|
|
|
|
|
sha256 = (item as Rom).SHA256 ?? string.Empty;
|
|
|
|
|
|
sha384 = (item as Rom).SHA384 ?? string.Empty;
|
|
|
|
|
|
sha512 = (item as Rom).SHA512 ?? string.Empty;
|
|
|
|
|
|
size = (item as Rom).Size.ToString();
|
2019-01-08 11:49:31 -08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now do bulk replacement where possible
|
|
|
|
|
|
fix = fix
|
|
|
|
|
|
.Replace("%game%", game)
|
|
|
|
|
|
.Replace("%machine%", game)
|
|
|
|
|
|
.Replace("%name%", name)
|
2020-08-24 11:56:49 -07:00
|
|
|
|
.Replace("%manufacturer%", item.Machine.Manufacturer ?? string.Empty)
|
|
|
|
|
|
.Replace("%publisher%", item.Machine.Publisher ?? string.Empty)
|
|
|
|
|
|
.Replace("%category%", item.Machine.Category ?? string.Empty)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
.Replace("%crc%", crc)
|
|
|
|
|
|
.Replace("%md5%", md5)
|
2020-06-05 22:26:44 -07:00
|
|
|
|
.Replace("%ripemd160%", ripemd160)
|
2019-01-08 11:49:31 -08:00
|
|
|
|
.Replace("%sha1%", sha1)
|
|
|
|
|
|
.Replace("%sha256%", sha256)
|
|
|
|
|
|
.Replace("%sha384%", sha384)
|
|
|
|
|
|
.Replace("%sha512%", sha512)
|
|
|
|
|
|
.Replace("%size%", size);
|
|
|
|
|
|
|
|
|
|
|
|
// TODO: Add GameName logic here too?
|
|
|
|
|
|
|
|
|
|
|
|
return fix;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
}
|
2016-04-19 01:11:23 -07:00
|
|
|
|
}
|