mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
3408 lines
150 KiB
C#
3408 lines
150 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.IO;
|
|
using System.Linq;
|
|
using System.Net;
|
|
using System.Threading.Tasks;
|
|
|
|
using SabreTools.Library.Data;
|
|
using SabreTools.Library.DatItems;
|
|
using SabreTools.Library.FileTypes;
|
|
using SabreTools.Library.Filtering;
|
|
using SabreTools.Library.Reports;
|
|
using SabreTools.Library.Skippers;
|
|
using SabreTools.Library.Tools;
|
|
using NaturalSort;
|
|
|
|
namespace SabreTools.Library.DatFiles
|
|
{
|
|
/// <summary>
|
|
/// Represents a format-agnostic DAT
|
|
/// </summary>
|
|
public abstract class DatFile
|
|
{
|
|
#region Fields
|
|
|
|
/// <summary>
|
|
/// Header values
|
|
/// </summary>
|
|
public DatHeader Header { get; set; } = new DatHeader();
|
|
|
|
/// <summary>
|
|
/// DatItems and related statistics
|
|
/// </summary>
|
|
public ItemDictionary Items { get; set; } = new ItemDictionary();
|
|
|
|
#endregion
|
|
|
|
#region Instance Methods
|
|
|
|
#region Constructors
|
|
|
|
/// <summary>
|
|
/// Create a new DatFile from an existing one
|
|
/// </summary>
|
|
/// <param name="datFile">DatFile to get the values from</param>
|
|
public DatFile(DatFile datFile)
|
|
{
|
|
if (datFile != null)
|
|
{
|
|
Header = datFile.Header;
|
|
Items = datFile.Items;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Create a specific type of DatFile to be used based on a format and a base DAT
|
|
/// </summary>
|
|
/// <param name="datFormat">Format of the DAT to be created</param>
|
|
/// <param name="baseDat">DatFile containing the information to use in specific operations</param>
|
|
/// <returns>DatFile of the specific internal type that corresponds to the inputs</returns>
|
|
public static DatFile Create(DatFormat? datFormat = null, DatFile baseDat = null)
|
|
{
|
|
switch (datFormat)
|
|
{
|
|
case DatFormat.AttractMode:
|
|
return new AttractMode(baseDat);
|
|
|
|
case DatFormat.ClrMamePro:
|
|
return new ClrMamePro(baseDat);
|
|
|
|
case DatFormat.CSV:
|
|
return new SeparatedValue(baseDat, ',');
|
|
|
|
case DatFormat.DOSCenter:
|
|
return new DosCenter(baseDat);
|
|
|
|
case DatFormat.EverdriveSMDB:
|
|
return new EverdriveSMDB(baseDat);
|
|
|
|
case DatFormat.Json:
|
|
return new Json(baseDat);
|
|
|
|
case DatFormat.Listrom:
|
|
return new Listrom(baseDat);
|
|
|
|
case DatFormat.Listxml:
|
|
return new Listxml(baseDat);
|
|
|
|
case DatFormat.Logiqx:
|
|
return new Logiqx(baseDat, false);
|
|
|
|
case DatFormat.LogiqxDeprecated:
|
|
return new Logiqx(baseDat, true);
|
|
|
|
case DatFormat.MissFile:
|
|
return new Missfile(baseDat);
|
|
|
|
case DatFormat.OfflineList:
|
|
return new OfflineList(baseDat);
|
|
|
|
case DatFormat.OpenMSX:
|
|
return new OpenMSX(baseDat);
|
|
|
|
case DatFormat.RedumpMD5:
|
|
return new Hashfile(baseDat, Hash.MD5);
|
|
|
|
#if NET_FRAMEWORK
|
|
case DatFormat.RedumpRIPEMD160:
|
|
return new Hashfile(baseDat, Hash.RIPEMD160);
|
|
#endif
|
|
|
|
case DatFormat.RedumpSFV:
|
|
return new Hashfile(baseDat, Hash.CRC);
|
|
|
|
case DatFormat.RedumpSHA1:
|
|
return new Hashfile(baseDat, Hash.SHA1);
|
|
|
|
case DatFormat.RedumpSHA256:
|
|
return new Hashfile(baseDat, Hash.SHA256);
|
|
|
|
case DatFormat.RedumpSHA384:
|
|
return new Hashfile(baseDat, Hash.SHA384);
|
|
|
|
case DatFormat.RedumpSHA512:
|
|
return new Hashfile(baseDat, Hash.SHA512);
|
|
|
|
case DatFormat.RomCenter:
|
|
return new RomCenter(baseDat);
|
|
|
|
case DatFormat.SabreDat:
|
|
return new SabreDat(baseDat);
|
|
|
|
case DatFormat.SoftwareList:
|
|
return new SoftwareList(baseDat);
|
|
|
|
case DatFormat.SSV:
|
|
return new SeparatedValue(baseDat, ';');
|
|
|
|
case DatFormat.TSV:
|
|
return new SeparatedValue(baseDat, '\t');
|
|
|
|
// We use new-style Logiqx as a backup for generic DatFile
|
|
case null:
|
|
default:
|
|
return new Logiqx(baseDat, false);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Create a new DatFile from an existing DatHeader
|
|
/// </summary>
|
|
/// <param name="datHeader">DatHeader to get the values from</param>
|
|
public static DatFile Create(DatHeader datHeader)
|
|
{
|
|
DatFile datFile = Create(datHeader.DatFormat);
|
|
datFile.Header = (DatHeader)datHeader.Clone();
|
|
return datFile;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Add items from another DatFile to the existing DatFile
|
|
/// </summary>
|
|
/// <param name="datFile">DatFile to add from</param>
|
|
/// <param name="delete">If items should be deleted from the source DatFile</param>
|
|
public void AddFromExisting(DatFile datFile, bool delete = false)
|
|
{
|
|
// Get the list of keys from the DAT
|
|
foreach (string key in datFile.Items.Keys)
|
|
{
|
|
// Add everything from the key to the internal DAT
|
|
Items.AddRange(key, datFile.Items[key]);
|
|
|
|
// Now remove the key from the source DAT
|
|
if (delete)
|
|
datFile.Items.Remove(key);
|
|
}
|
|
|
|
// Now remove the file dictionary from the source DAT
|
|
if (delete)
|
|
datFile.Items = null;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Apply a DatHeader to an existing DatFile
|
|
/// </summary>
|
|
/// <param name="datHeader">DatHeader to get the values from</param>
|
|
public void ApplyDatHeader(DatHeader datHeader)
|
|
{
|
|
Header.ConditionalCopy(datHeader);
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Determination Helpers
|
|
|
|
/// <summary>
|
|
/// Split a set of input DATs based on the given information
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to be used</param>
|
|
/// <param name="outDir">Output directory for the split files</param>
|
|
/// <param name="inplace">True if files should be written to the source folders, false otherwise</param>
|
|
/// <param name="splittingMode">Type of split to perform, if any</param>
|
|
/// <param name="exta">First extension to split on (Extension Split only)</param>
|
|
/// <param name="extb">Second extension to split on (Extension Split only)</param>
|
|
/// <param name="shortname">True if short filenames should be used, false otherwise (Level Split only)</param>
|
|
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise (Level Split only)</param>
|
|
/// <param name="radix">Long value representing the split point (Size Split only)</param>
|
|
public void DetermineSplitType(
|
|
List<string> inputs,
|
|
string outDir,
|
|
bool inplace,
|
|
SplittingMode splittingMode,
|
|
List<string> exta,
|
|
List<string> extb,
|
|
bool shortname,
|
|
bool basedat,
|
|
long radix)
|
|
{
|
|
// If we somehow have the "none" split type, return
|
|
if (splittingMode == SplittingMode.None)
|
|
return;
|
|
|
|
// Get only files from the inputs
|
|
List<ParentablePath> files = DirectoryExtensions.GetFilesOnly(inputs, appendparent: true);
|
|
|
|
// Loop over the input files
|
|
foreach (ParentablePath file in files)
|
|
{
|
|
// Create and fill the new DAT
|
|
DatFile internalDat = Create(Header);
|
|
Parse(file);
|
|
|
|
// Get the output directory
|
|
outDir = file.GetOutputPath(outDir, inplace);
|
|
|
|
// Split and write the DAT
|
|
if (splittingMode.HasFlag(SplittingMode.Extension))
|
|
internalDat.SplitByExtension(outDir, exta, extb);
|
|
|
|
if (splittingMode.HasFlag(SplittingMode.Hash))
|
|
internalDat.SplitByHash(outDir);
|
|
|
|
if (splittingMode.HasFlag(SplittingMode.Level))
|
|
internalDat.SplitByLevel(outDir, shortname, basedat);
|
|
|
|
if (splittingMode.HasFlag(SplittingMode.Size))
|
|
internalDat.SplitBySize(outDir, radix);
|
|
|
|
if (splittingMode.HasFlag(SplittingMode.Type))
|
|
internalDat.SplitByType(outDir);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Determine if input files should be merged, diffed, or processed invidually
|
|
/// </summary>
|
|
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
|
/// <param name="basePaths">Names of base files and/or folders</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="updateMode">Non-zero flag for diffing mode, zero otherwise</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise [only for base replacement]</param>
|
|
public void DetermineUpdateType(
|
|
List<string> inputPaths,
|
|
List<string> basePaths,
|
|
string outDir,
|
|
UpdateMode updateMode,
|
|
bool inplace,
|
|
bool skip,
|
|
Filter filter,
|
|
List<Field> updateFields,
|
|
bool onlySame)
|
|
{
|
|
// Ensure we only have files in the inputs
|
|
List<ParentablePath> inputFileNames = DirectoryExtensions.GetFilesOnly(inputPaths, appendparent: true);
|
|
List<ParentablePath> baseFileNames = DirectoryExtensions.GetFilesOnly(basePaths);
|
|
|
|
// If we're in standard update mode, run through all of the inputs
|
|
if (updateMode == UpdateMode.None)
|
|
{
|
|
Update(inputFileNames, outDir, inplace, filter);
|
|
return;
|
|
}
|
|
|
|
// Reverse inputs if we're in a required mode
|
|
if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
|
inputFileNames.Reverse();
|
|
if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
|
baseFileNames.Reverse();
|
|
|
|
// If we're in merging mode
|
|
if (updateMode.HasFlag(UpdateMode.Merge))
|
|
{
|
|
// Populate the combined data and get the headers
|
|
PopulateUserData(inputFileNames, filter);
|
|
MergeNoDiff(inputFileNames, outDir);
|
|
}
|
|
|
|
// If we have one of the standard diffing modes
|
|
else if (updateMode.HasFlag(UpdateMode.DiffDupesOnly)
|
|
|| updateMode.HasFlag(UpdateMode.DiffNoDupesOnly)
|
|
|| updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
{
|
|
// Populate the combined data
|
|
PopulateUserData(inputFileNames, filter);
|
|
DiffNoCascade(inputFileNames, outDir, updateMode);
|
|
}
|
|
|
|
// If we have one of the cascaded diffing modes
|
|
else if (updateMode.HasFlag(UpdateMode.DiffCascade)
|
|
|| updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
|
{
|
|
// Populate the combined data and get the headers
|
|
List<DatHeader> datHeaders = PopulateUserData(inputFileNames, filter);
|
|
DiffCascade(inputFileNames, datHeaders, outDir, inplace, skip);
|
|
}
|
|
|
|
// If we have diff against mode
|
|
else if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
|
{
|
|
// Populate the combined data
|
|
PopulateUserData(baseFileNames, filter);
|
|
DiffAgainst(inputFileNames, outDir, inplace);
|
|
}
|
|
|
|
// If we have one of the base replacement modes
|
|
else if (updateMode.HasFlag(UpdateMode.BaseReplace)
|
|
|| updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
|
{
|
|
// Populate the combined data
|
|
PopulateUserData(baseFileNames, filter);
|
|
BaseReplace(inputFileNames, outDir, inplace, filter, updateFields, onlySame);
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Converting and Updating
|
|
|
|
/// <summary>
|
|
/// Replace item values from the base set represented by the current DAT
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
|
public void BaseReplace(
|
|
List<string> inputs,
|
|
string outDir,
|
|
bool inplace,
|
|
Filter filter,
|
|
List<Field> updateFields,
|
|
bool onlySame)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
BaseReplace(paths, outDir, inplace, filter, updateFields, onlySame);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output diffs against a base set represented by the current DAT
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
public void DiffAgainst(List<string> inputs, string outDir, bool inplace)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
DiffAgainst(paths, outDir, inplace);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output cascading diffs
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
public void DiffCascade(
|
|
List<string> inputs,
|
|
List<DatHeader> datHeaders,
|
|
string outDir,
|
|
bool inplace,
|
|
bool skip)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
DiffCascade(paths, datHeaders, outDir, inplace, skip);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output non-cascading diffs
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
|
public void DiffNoCascade(List<string> inputs, string outDir, UpdateMode diff)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
DiffNoCascade(paths, outDir, diff);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output user defined merge
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
public void MergeNoDiff(List<string> inputs, string outDir)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
MergeNoDiff(paths, outDir);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Populate the user DatData object from the input files
|
|
/// </summary>
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <returns>List of DatData objects representing headers</returns>
|
|
public List<DatHeader> PopulateUserData(List<string> inputs, Filter filter)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
return PopulateUserData(paths, filter);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Convert, update, and filter a DAT file or set of files
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files and/or folders</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
public void Update(List<string> inputs, string outDir, bool inplace, Filter filter)
|
|
{
|
|
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
|
Update(paths, outDir, inplace, filter);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Replace item values from the base set represented by the current DAT
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
|
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
|
internal void BaseReplace(
|
|
List<ParentablePath> inputs,
|
|
string outDir,
|
|
bool inplace,
|
|
Filter filter,
|
|
List<Field> updateFields,
|
|
bool onlySame)
|
|
{
|
|
// Fields unique to a DatItem
|
|
List<Field> datItemFields = new List<Field>()
|
|
{
|
|
Field.Name,
|
|
Field.PartName,
|
|
Field.PartInterface,
|
|
Field.Features,
|
|
Field.AreaName,
|
|
Field.AreaSize,
|
|
Field.BiosDescription,
|
|
Field.Default,
|
|
Field.Language,
|
|
Field.Date,
|
|
Field.Bios,
|
|
Field.Size,
|
|
Field.Offset,
|
|
Field.Merge,
|
|
Field.Region,
|
|
Field.Index,
|
|
Field.Writable,
|
|
Field.Optional,
|
|
Field.Status,
|
|
Field.Inverted,
|
|
|
|
Field.CRC,
|
|
Field.MD5,
|
|
#if NET_FRAMEWORK
|
|
Field.RIPEMD160,
|
|
#endif
|
|
Field.SHA1,
|
|
Field.SHA256,
|
|
Field.SHA384,
|
|
Field.SHA512,
|
|
};
|
|
|
|
// Fields unique to a Machine
|
|
List<Field> machineFields = new List<Field>()
|
|
{
|
|
Field.MachineName,
|
|
Field.Comment,
|
|
Field.Description,
|
|
Field.Year,
|
|
Field.Manufacturer,
|
|
Field.Publisher,
|
|
Field.RomOf,
|
|
Field.CloneOf,
|
|
Field.SampleOf,
|
|
Field.Supported,
|
|
Field.SourceFile,
|
|
Field.Runnable,
|
|
Field.Board,
|
|
Field.RebuildTo,
|
|
Field.Devices,
|
|
Field.SlotOptions,
|
|
Field.Infos,
|
|
Field.MachineType,
|
|
};
|
|
|
|
// We want to try to replace each item in each input DAT from the base
|
|
foreach (ParentablePath path in inputs)
|
|
{
|
|
Globals.Logger.User($"Replacing items in '{path.CurrentPath}' from the base DAT");
|
|
|
|
// First we parse in the DAT internally
|
|
DatFile intDat = Create(Header.CloneFiltering());
|
|
intDat.Parse(path, 1, keep: true);
|
|
filter.FilterDatFile(intDat, false /* useTags */);
|
|
|
|
// If we are matching based on DatItem fields of any sort
|
|
if (updateFields.Intersect(datItemFields).Any())
|
|
{
|
|
// For comparison's sake, we want to use CRC as the base bucketing
|
|
Items.BucketBy(BucketedBy.CRC, DedupeType.Full);
|
|
intDat.Items.BucketBy(BucketedBy.CRC, DedupeType.None);
|
|
|
|
// Then we do a hashwise comparison against the base DAT
|
|
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
List<DatItem> newDatItems = new List<DatItem>();
|
|
foreach (DatItem datItem in datItems)
|
|
{
|
|
List<DatItem> dupes = Items.GetDuplicates(datItem, sorted: true);
|
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
|
|
|
// Cast versions of the new DatItem for use below
|
|
var archive = newDatItem as Archive;
|
|
var biosSet = newDatItem as BiosSet;
|
|
var blank = newDatItem as Blank;
|
|
var disk = newDatItem as Disk;
|
|
var release = newDatItem as Release;
|
|
var rom = newDatItem as Rom;
|
|
var sample = newDatItem as Sample;
|
|
|
|
if (dupes.Count > 0)
|
|
{
|
|
// Get the first duplicate for grabbing information from
|
|
var firstDupe = dupes.First();
|
|
var archiveDupe = firstDupe as Archive;
|
|
var biosSetDupe = firstDupe as BiosSet;
|
|
var blankDupe = firstDupe as Blank;
|
|
var diskDupe = firstDupe as Disk;
|
|
var releaseDupe = firstDupe as Release;
|
|
var romDupe = firstDupe as Rom;
|
|
var sampleDupe = firstDupe as Sample;
|
|
|
|
#region Non-hash fields
|
|
|
|
if (updateFields.Contains(Field.Name))
|
|
newDatItem.Name = firstDupe.Name;
|
|
|
|
if (updateFields.Contains(Field.PartName))
|
|
newDatItem.PartName = firstDupe.PartName;
|
|
|
|
if (updateFields.Contains(Field.PartInterface))
|
|
newDatItem.PartInterface = firstDupe.PartInterface;
|
|
|
|
if (updateFields.Contains(Field.Features))
|
|
newDatItem.Features = firstDupe.Features;
|
|
|
|
if (updateFields.Contains(Field.AreaName))
|
|
newDatItem.AreaName = firstDupe.AreaName;
|
|
|
|
if (updateFields.Contains(Field.AreaSize))
|
|
newDatItem.AreaSize = firstDupe.AreaSize;
|
|
|
|
if (updateFields.Contains(Field.BiosDescription))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.BiosSet)
|
|
biosSet.Description = biosSetDupe.Description;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Default))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.BiosSet)
|
|
biosSet.Default = biosSetDupe.Default;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Release)
|
|
release.Default = releaseDupe.Default;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Language))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Release)
|
|
release.Language = releaseDupe.Language;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Date))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Release)
|
|
release.Date = releaseDupe.Date;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Date = romDupe.Date;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Bios))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Bios = romDupe.Bios;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Size))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Size = romDupe.Size;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Offset))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Offset = romDupe.Offset;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Merge))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.MergeTag = diskDupe.MergeTag;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.MergeTag = romDupe.MergeTag;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Region))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.Region = diskDupe.Region;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Release)
|
|
release.Region = releaseDupe.Region;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Region = romDupe.Region;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Index))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.Index = diskDupe.Index;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Writable))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.Writable = diskDupe.Writable;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Optional))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.Optional = diskDupe.Optional;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Optional = romDupe.Optional;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Status))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
disk.ItemStatus = diskDupe.ItemStatus;
|
|
|
|
else if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.ItemStatus = romDupe.ItemStatus;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Inverted))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
rom.Inverted = romDupe.Inverted;
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Hash fields
|
|
|
|
if (updateFields.Contains(Field.CRC))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.CRC) && !string.IsNullOrEmpty(romDupe.CRC))
|
|
rom.CRC = romDupe.CRC;
|
|
}
|
|
}
|
|
|
|
if (updateFields.Contains(Field.MD5))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.MD5) && !string.IsNullOrEmpty(diskDupe.MD5))
|
|
disk.MD5 = diskDupe.MD5;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.MD5) && !string.IsNullOrEmpty(romDupe.MD5))
|
|
rom.MD5 = romDupe.MD5;
|
|
}
|
|
}
|
|
|
|
#if NET_FRAMEWORK
|
|
if (updateFields.Contains(Field.RIPEMD160))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.RIPEMD160) && !string.IsNullOrEmpty(diskDupe.RIPEMD160))
|
|
disk.RIPEMD160 = diskDupe.RIPEMD160;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.RIPEMD160) && !string.IsNullOrEmpty(romDupe.RIPEMD160))
|
|
rom.RIPEMD160 = romDupe.RIPEMD160;
|
|
}
|
|
}
|
|
#endif
|
|
|
|
if (updateFields.Contains(Field.SHA1))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.SHA1) && !string.IsNullOrEmpty(diskDupe.SHA1))
|
|
disk.SHA1 = diskDupe.SHA1;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.SHA1) && !string.IsNullOrEmpty(romDupe.SHA1))
|
|
rom.SHA1 = romDupe.SHA1;
|
|
}
|
|
}
|
|
|
|
if (updateFields.Contains(Field.SHA256))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.SHA256) && !string.IsNullOrEmpty(diskDupe.SHA256))
|
|
disk.SHA256 = diskDupe.SHA256;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.SHA256) && !string.IsNullOrEmpty(romDupe.SHA256))
|
|
rom.SHA256 = romDupe.SHA256;
|
|
}
|
|
}
|
|
|
|
if (updateFields.Contains(Field.SHA384))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.SHA384) && !string.IsNullOrEmpty(diskDupe.SHA384))
|
|
disk.SHA384 = diskDupe.SHA384;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.SHA384) && !string.IsNullOrEmpty(romDupe.SHA384))
|
|
rom.SHA384 = romDupe.SHA384;
|
|
}
|
|
}
|
|
|
|
if (updateFields.Contains(Field.SHA512))
|
|
{
|
|
if (newDatItem.ItemType == ItemType.Disk)
|
|
{
|
|
if (string.IsNullOrEmpty(disk.SHA512) && !string.IsNullOrEmpty(diskDupe.SHA512))
|
|
disk.SHA512 = diskDupe.SHA512;
|
|
}
|
|
|
|
if (newDatItem.ItemType == ItemType.Rom)
|
|
{
|
|
if (string.IsNullOrEmpty(rom.SHA512) && !string.IsNullOrEmpty(romDupe.SHA512))
|
|
rom.SHA512 = romDupe.SHA512;
|
|
}
|
|
}
|
|
|
|
#endregion
|
|
|
|
// Now assign back the duplicate archive to the original
|
|
switch (newDatItem.ItemType)
|
|
{
|
|
case ItemType.Archive:
|
|
newDatItem = archive.Clone() as Archive;
|
|
break;
|
|
|
|
case ItemType.BiosSet:
|
|
newDatItem = biosSet.Clone() as BiosSet;
|
|
break;
|
|
|
|
case ItemType.Blank:
|
|
newDatItem = blank.Clone() as Blank;
|
|
break;
|
|
|
|
case ItemType.Disk:
|
|
newDatItem = disk.Clone() as Disk;
|
|
break;
|
|
|
|
case ItemType.Release:
|
|
newDatItem = release.Clone() as Release;
|
|
break;
|
|
|
|
case ItemType.Rom:
|
|
newDatItem = rom.Clone() as Rom;
|
|
break;
|
|
|
|
case ItemType.Sample:
|
|
newDatItem = sample.Clone() as Sample;
|
|
break;
|
|
}
|
|
}
|
|
|
|
newDatItems.Add(newDatItem);
|
|
}
|
|
|
|
// Now add the new list to the key
|
|
intDat.Items.Remove(key);
|
|
intDat.Items.AddRange(key, newDatItems);
|
|
});
|
|
}
|
|
|
|
// If we are matching based on Machine fields of any sort
|
|
if (updateFields.Intersect(machineFields).Any())
|
|
{
|
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
|
Items.BucketBy(BucketedBy.Game, DedupeType.Full);
|
|
intDat.Items.BucketBy(BucketedBy.Game, DedupeType.None);
|
|
|
|
// Then we do a namewise comparison against the base DAT
|
|
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
List<DatItem> newDatItems = new List<DatItem>();
|
|
foreach (DatItem datItem in datItems)
|
|
{
|
|
DatItem newDatItem = datItem.Clone() as DatItem;
|
|
if (Items.ContainsKey(key) && Items[key].Count() > 0)
|
|
{
|
|
var firstDupe = Items[key][0];
|
|
|
|
if (updateFields.Contains(Field.MachineName))
|
|
newDatItem.MachineName = firstDupe.MachineName;
|
|
|
|
if (updateFields.Contains(Field.Comment))
|
|
newDatItem.Comment = firstDupe.Comment;
|
|
|
|
if (updateFields.Contains(Field.Description))
|
|
{
|
|
if (!onlySame || (onlySame && newDatItem.MachineName == newDatItem.MachineDescription))
|
|
newDatItem.MachineDescription = firstDupe.MachineDescription;
|
|
}
|
|
|
|
if (updateFields.Contains(Field.Year))
|
|
newDatItem.Year = firstDupe.Year;
|
|
|
|
if (updateFields.Contains(Field.Manufacturer))
|
|
newDatItem.Manufacturer = firstDupe.Manufacturer;
|
|
|
|
if (updateFields.Contains(Field.Publisher))
|
|
newDatItem.Publisher = firstDupe.Publisher;
|
|
|
|
if (updateFields.Contains(Field.Category))
|
|
newDatItem.Category = firstDupe.Category;
|
|
|
|
if (updateFields.Contains(Field.RomOf))
|
|
newDatItem.RomOf = firstDupe.RomOf;
|
|
|
|
if (updateFields.Contains(Field.CloneOf))
|
|
newDatItem.CloneOf = firstDupe.CloneOf;
|
|
|
|
if (updateFields.Contains(Field.SampleOf))
|
|
newDatItem.SampleOf = firstDupe.SampleOf;
|
|
|
|
if (updateFields.Contains(Field.Supported))
|
|
newDatItem.Supported = firstDupe.Supported;
|
|
|
|
if (updateFields.Contains(Field.SourceFile))
|
|
newDatItem.SourceFile = firstDupe.SourceFile;
|
|
|
|
if (updateFields.Contains(Field.Runnable))
|
|
newDatItem.Runnable = firstDupe.Runnable;
|
|
|
|
if (updateFields.Contains(Field.Board))
|
|
newDatItem.Board = firstDupe.Board;
|
|
|
|
if (updateFields.Contains(Field.RebuildTo))
|
|
newDatItem.RebuildTo = firstDupe.RebuildTo;
|
|
|
|
if (updateFields.Contains(Field.Devices))
|
|
newDatItem.Devices = firstDupe.Devices;
|
|
|
|
if (updateFields.Contains(Field.SlotOptions))
|
|
newDatItem.SlotOptions = firstDupe.SlotOptions;
|
|
|
|
if (updateFields.Contains(Field.Infos))
|
|
newDatItem.Infos = firstDupe.Infos;
|
|
|
|
if (updateFields.Contains(Field.MachineType))
|
|
newDatItem.MachineType = firstDupe.MachineType;
|
|
}
|
|
|
|
newDatItems.Add(newDatItem);
|
|
}
|
|
|
|
// Now add the new list to the key
|
|
intDat.Items.Remove(key);
|
|
intDat.Items.AddRange(key, newDatItems);
|
|
});
|
|
}
|
|
|
|
// Determine the output path for the DAT
|
|
string interOutDir = path.GetOutputPath(outDir, inplace);
|
|
|
|
// Once we're done, try writing out
|
|
intDat.Write(interOutDir, overwrite: inplace);
|
|
|
|
// Due to possible memory requirements, we force a garbage collection
|
|
GC.Collect();
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output diffs against a base set represented by the current DAT
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
internal void DiffAgainst(List<ParentablePath> inputs, string outDir, bool inplace)
|
|
{
|
|
// For comparison's sake, we want to use CRC as the base ordering
|
|
Items.BucketBy(BucketedBy.CRC, DedupeType.Full);
|
|
|
|
// Now we want to compare each input DAT against the base
|
|
foreach (ParentablePath path in inputs)
|
|
{
|
|
Globals.Logger.User($"Comparing '{path.CurrentPath}' to base DAT");
|
|
|
|
// First we parse in the DAT internally
|
|
DatFile intDat = Create();
|
|
intDat.Parse(path, 1, keep: true);
|
|
|
|
// For comparison's sake, we want to use CRC as the base bucketing
|
|
intDat.Items.BucketBy(BucketedBy.CRC, DedupeType.Full);
|
|
|
|
// Then we do a hashwise comparison against the base DAT
|
|
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> datItems = intDat.Items[key];
|
|
List<DatItem> keepDatItems = new List<DatItem>();
|
|
foreach (DatItem datItem in datItems)
|
|
{
|
|
if (!Items.HasDuplicates(datItem, true))
|
|
keepDatItems.Add(datItem);
|
|
}
|
|
|
|
// Now add the new list to the key
|
|
intDat.Items.Remove(key);
|
|
intDat.Items.AddRange(key, keepDatItems);
|
|
});
|
|
|
|
// Determine the output path for the DAT
|
|
string interOutDir = path.GetOutputPath(outDir, inplace);
|
|
|
|
// Once we're done, try writing out
|
|
intDat.Write(interOutDir, overwrite: inplace);
|
|
|
|
// Due to possible memory requirements, we force a garbage collection
|
|
GC.Collect();
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output cascading diffs
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
|
internal void DiffCascade(
|
|
List<ParentablePath> inputs,
|
|
List<DatHeader> datHeaders,
|
|
string outDir,
|
|
bool inplace,
|
|
bool skip)
|
|
{
|
|
// Create a list of DatData objects representing output files
|
|
List<DatFile> outDats = new List<DatFile>();
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
|
|
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
|
{
|
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
|
DatFile diffData;
|
|
|
|
// If we're in inplace mode or the output directory is set, take the appropriate DatData object already stored
|
|
if (inplace || outDir != Environment.CurrentDirectory)
|
|
{
|
|
diffData = Create(datHeaders[j]);
|
|
}
|
|
else
|
|
{
|
|
diffData = Create(Header);
|
|
diffData.Header.FileName += innerpost;
|
|
diffData.Header.Name += innerpost;
|
|
diffData.Header.Description += innerpost;
|
|
}
|
|
|
|
diffData.Items = new ItemDictionary();
|
|
outDatsArray[j] = diffData;
|
|
});
|
|
|
|
outDats = outDatsArray.ToList();
|
|
watch.Stop();
|
|
|
|
// Then, ensure that the internal dat can be bucketed in the best possible way
|
|
Items.BucketBy(BucketedBy.CRC, DedupeType.None);
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
watch.Start("Populating all output DATs");
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
if (items == null || items.Count == 0)
|
|
return;
|
|
|
|
foreach (DatItem item in items)
|
|
{
|
|
// There's odd cases where there are items with System ID < 0. Skip them for now
|
|
if (item.IndexId < 0)
|
|
{
|
|
Globals.Logger.Warning($"Item found with a <0 SystemID: {item.Name}");
|
|
continue;
|
|
}
|
|
|
|
outDats[item.IndexId].Items.Add(key, item);
|
|
}
|
|
});
|
|
|
|
watch.Stop();
|
|
|
|
// Finally, loop through and output each of the DATs
|
|
watch.Start("Outputting all created DATs");
|
|
|
|
Parallel.For((skip ? 1 : 0), inputs.Count, Globals.ParallelOptions, j =>
|
|
{
|
|
string path = inputs[j].GetOutputPath(outDir, inplace);
|
|
|
|
// Try to output the file
|
|
outDats[j].Write(path, overwrite: inplace);
|
|
});
|
|
|
|
watch.Stop();
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output non-cascading diffs
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
|
internal void DiffNoCascade(List<ParentablePath> inputs, string outDir, UpdateMode diff)
|
|
{
|
|
InternalStopwatch watch = new InternalStopwatch("Initializing all output DATs");
|
|
|
|
// Default vars for use
|
|
string post = string.Empty;
|
|
DatFile outerDiffData = Create();
|
|
DatFile dupeData = Create();
|
|
|
|
// Fill in any information not in the base DAT
|
|
if (string.IsNullOrWhiteSpace(Header.FileName))
|
|
Header.FileName = "All DATs";
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Name))
|
|
Header.Name = "All DATs";
|
|
|
|
if (string.IsNullOrWhiteSpace(Header.Description))
|
|
Header.Description = "All DATs";
|
|
|
|
// Don't have External dupes
|
|
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
{
|
|
post = " (No Duplicates)";
|
|
outerDiffData = Create(Header);
|
|
outerDiffData.Header.FileName += post;
|
|
outerDiffData.Header.Name += post;
|
|
outerDiffData.Header.Description += post;
|
|
outerDiffData.Items = new ItemDictionary();
|
|
}
|
|
|
|
// Have External dupes
|
|
if (diff.HasFlag(UpdateMode.DiffDupesOnly))
|
|
{
|
|
post = " (Duplicates)";
|
|
dupeData = Create(Header);
|
|
dupeData.Header.FileName += post;
|
|
dupeData.Header.Name += post;
|
|
dupeData.Header.Description += post;
|
|
dupeData.Items = new ItemDictionary();
|
|
}
|
|
|
|
// Create a list of DatData objects representing individual output files
|
|
List<DatFile> outDats = new List<DatFile>();
|
|
|
|
// Loop through each of the inputs and get or create a new DatData object
|
|
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
{
|
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
|
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
|
{
|
|
string innerpost = $" ({j} - {inputs[j].GetNormalizedFileName(true)} Only)";
|
|
DatFile diffData = Create(Header);
|
|
diffData.Header.FileName += innerpost;
|
|
diffData.Header.Name += innerpost;
|
|
diffData.Header.Description += innerpost;
|
|
diffData.Items = new ItemDictionary();
|
|
outDatsArray[j] = diffData;
|
|
});
|
|
|
|
outDats = outDatsArray.ToList();
|
|
}
|
|
|
|
watch.Stop();
|
|
|
|
// Now, loop through the dictionary and populate the correct DATs
|
|
watch.Start("Populating all output DATs");
|
|
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = DatItem.Merge(Items[key]);
|
|
|
|
// If the rom list is empty or null, just skip it
|
|
if (items == null || items.Count == 0)
|
|
return;
|
|
|
|
// Loop through and add the items correctly
|
|
foreach (DatItem item in items)
|
|
{
|
|
// No duplicates
|
|
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly) || diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
{
|
|
if (item.DupeType.HasFlag(DupeType.Internal) || item.DupeType == 0x00)
|
|
{
|
|
// Individual DATs that are output
|
|
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
outDats[item.IndexId].Items.Add(key, item);
|
|
|
|
// Merged no-duplicates DAT
|
|
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
{
|
|
DatItem newrom = item.Clone() as DatItem;
|
|
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
|
|
|
outerDiffData.Items.Add(key, newrom);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Duplicates only
|
|
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
{
|
|
if (item.DupeType.HasFlag(DupeType.External))
|
|
{
|
|
DatItem newrom = item.Clone() as DatItem;
|
|
newrom.MachineName += $" ({Path.GetFileNameWithoutExtension(inputs[item.IndexId].CurrentPath)})";
|
|
|
|
dupeData.Items.Add(key, newrom);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
watch.Stop();
|
|
|
|
// Finally, loop through and output each of the DATs
|
|
watch.Start("Outputting all created DATs");
|
|
|
|
// Output the difflist (a-b)+(b-a) diff
|
|
if (diff.HasFlag(UpdateMode.DiffNoDupesOnly))
|
|
outerDiffData.Write(outDir, overwrite: false);
|
|
|
|
// Output the (ab) diff
|
|
if (diff.HasFlag(UpdateMode.DiffDupesOnly))
|
|
dupeData.Write(outDir, overwrite: false);
|
|
|
|
// Output the individual (a-b) DATs
|
|
if (diff.HasFlag(UpdateMode.DiffIndividualsOnly))
|
|
{
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
|
{
|
|
string path = inputs[j].GetOutputPath(outDir, false /* inplace */);
|
|
|
|
// Try to output the file
|
|
outDats[j].Write(path, overwrite: false);
|
|
});
|
|
}
|
|
|
|
watch.Stop();
|
|
}
|
|
|
|
/// <summary>
|
|
/// Output user defined merge
|
|
/// </summary>
|
|
/// <param name="inputs">List of inputs to write out from</param>
|
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
|
internal void MergeNoDiff(List<ParentablePath> inputs, string outDir)
|
|
{
|
|
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
|
if (Header.Type == "SuperDAT")
|
|
{
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = Items[key].ToList();
|
|
List<DatItem> newItems = new List<DatItem>();
|
|
foreach (DatItem item in items)
|
|
{
|
|
DatItem newItem = item;
|
|
string filename = inputs[newItem.IndexId].CurrentPath;
|
|
string rootpath = inputs[newItem.IndexId].ParentPath;
|
|
|
|
rootpath += (string.IsNullOrWhiteSpace(rootpath) ? string.Empty : Path.DirectorySeparatorChar.ToString());
|
|
filename = filename.Remove(0, rootpath.Length);
|
|
newItem.MachineName = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
|
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
|
+ newItem.MachineName;
|
|
|
|
newItems.Add(newItem);
|
|
}
|
|
|
|
Items.Remove(key);
|
|
Items.AddRange(key, newItems);
|
|
});
|
|
}
|
|
|
|
// Try to output the file
|
|
Write(outDir, overwrite: false);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Populate the user DatData object from the input files
|
|
/// </summary>
|
|
/// <param name="inputs">Paths to DATs to parse</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <returns>List of DatData objects representing headers</returns>
|
|
internal List<DatHeader> PopulateUserData(List<ParentablePath> inputs, Filter filter)
|
|
{
|
|
DatFile[] datFiles = new DatFile[inputs.Count];
|
|
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
|
|
|
|
// Parse all of the DATs into their own DatFiles in the array
|
|
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
|
{
|
|
var input = inputs[i];
|
|
Globals.Logger.User($"Adding DAT: {input.CurrentPath}");
|
|
datFiles[i] = Create(Header.CloneFiltering());
|
|
datFiles[i].Parse(input, i, keep: true);
|
|
});
|
|
|
|
watch.Stop();
|
|
|
|
watch.Start("Populating internal DAT");
|
|
for (int i = 0; i < inputs.Count; i++)
|
|
{
|
|
AddFromExisting(datFiles[i], true);
|
|
}
|
|
|
|
// Now that we have a merged DAT, filter it
|
|
filter.FilterDatFile(this, false /* useTags */);
|
|
|
|
watch.Stop();
|
|
|
|
return datFiles.Select(d => d.Header).ToList();
|
|
}
|
|
|
|
/// <summary>
|
|
/// Convert, update, and filter a DAT file or set of files
|
|
/// </summary>
|
|
/// <param name="inputs">Names of the input files and/or folders</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
internal void Update(List<ParentablePath> inputs, string outDir, bool inplace, Filter filter)
|
|
{
|
|
// Iterate over the files
|
|
foreach (ParentablePath file in inputs)
|
|
{
|
|
DatFile innerDatdata = Create(Header);
|
|
Globals.Logger.User($"Processing '{Path.GetFileName(file.CurrentPath)}'");
|
|
innerDatdata.Parse(file, keep: true,
|
|
keepext: innerDatdata.Header.DatFormat.HasFlag(DatFormat.TSV)
|
|
|| innerDatdata.Header.DatFormat.HasFlag(DatFormat.CSV)
|
|
|| innerDatdata.Header.DatFormat.HasFlag(DatFormat.SSV));
|
|
filter.FilterDatFile(innerDatdata, false /* useTags */);
|
|
|
|
// Get the correct output path
|
|
string realOutDir = file.GetOutputPath(outDir, inplace);
|
|
|
|
// Try to output the file, overwriting only if it's not in the current directory
|
|
innerDatdata.Write(realOutDir, overwrite: inplace);
|
|
}
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Parsing
|
|
|
|
/// <summary>
|
|
/// Create a DatFile and parse a file into it
|
|
/// </summary>
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
public static DatFile CreateAndParse(string filename)
|
|
{
|
|
DatFile datFile = Create();
|
|
datFile.Parse(new ParentablePath(filename));
|
|
return datFile;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Parse a DAT and return all found games and roms within
|
|
/// </summary>
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
|
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
|
|
public void Parse(string filename, int indexId = 0, bool keep = false, bool keepext = false)
|
|
{
|
|
ParentablePath path = new ParentablePath(filename);
|
|
Parse(path, indexId, keep, keepext);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Parse a DAT and return all found games and roms within
|
|
/// </summary>
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
|
/// <param name="keepext">True if original extension should be kept, false otherwise (default)</param>
|
|
public void Parse(ParentablePath filename, int indexId = 0, bool keep = false, bool keepext = false)
|
|
{
|
|
// Get the current path from the filename
|
|
string currentPath = filename.CurrentPath;
|
|
|
|
// Check the file extension first as a safeguard
|
|
if (!PathExtensions.HasValidDatExtension(currentPath))
|
|
return;
|
|
|
|
// If the output filename isn't set already, get the internal filename
|
|
Header.FileName = (string.IsNullOrWhiteSpace(Header.FileName) ? (keepext ? Path.GetFileName(currentPath) : Path.GetFileNameWithoutExtension(currentPath)) : Header.FileName);
|
|
|
|
// If the output type isn't set already, get the internal output type
|
|
Header.DatFormat = (Header.DatFormat == 0 ? currentPath.GetDatFormat() : Header.DatFormat);
|
|
Items.SetBucketedBy(BucketedBy.CRC); // Setting this because it can reduce issues later
|
|
|
|
// Now parse the correct type of DAT
|
|
try
|
|
{
|
|
Create(currentPath.GetDatFormat(), this)?.ParseFile(currentPath, indexId, keep);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Globals.Logger.Error($"Error with file '{filename}': {ex}");
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Add a rom to the Dat after checking
|
|
/// </summary>
|
|
/// <param name="item">Item data to check against</param>
|
|
/// <returns>The key for the item</returns>
|
|
protected string ParseAddHelper(DatItem item)
|
|
{
|
|
string key = string.Empty;
|
|
|
|
// If there's no name in the rom, we log and skip it
|
|
if (item.Name == null)
|
|
{
|
|
Globals.Logger.Warning($"{Header.FileName}: Rom with no name found! Skipping...");
|
|
return key;
|
|
}
|
|
|
|
// If we have a Rom or a Disk, clean the hash data
|
|
if (item.ItemType == ItemType.Rom)
|
|
{
|
|
Rom itemRom = (Rom)item;
|
|
|
|
// If we have the case where there is SHA-1 and nothing else, we don't fill in any other part of the data
|
|
if (itemRom.Size == -1
|
|
&& string.IsNullOrWhiteSpace(itemRom.CRC)
|
|
&& string.IsNullOrWhiteSpace(itemRom.MD5)
|
|
#if NET_FRAMEWORK
|
|
&& string.IsNullOrWhiteSpace(itemRom.RIPEMD160)
|
|
#endif
|
|
&& !string.IsNullOrWhiteSpace(itemRom.SHA1)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA256)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA384)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA512))
|
|
{
|
|
// No-op, just catch it so it doesn't go further
|
|
Globals.Logger.Verbose($"{Header.FileName}: Entry with only SHA-1 found - '{itemRom.Name}'");
|
|
}
|
|
|
|
// If we have a rom and it's missing size AND the hashes match a 0-byte file, fill in the rest of the info
|
|
else if ((itemRom.Size == 0 || itemRom.Size == -1)
|
|
&& ((itemRom.CRC == Constants.CRCZero || string.IsNullOrWhiteSpace(itemRom.CRC))
|
|
|| itemRom.MD5 == Constants.MD5Zero
|
|
#if NET_FRAMEWORK
|
|
|| itemRom.RIPEMD160 == Constants.RIPEMD160Zero
|
|
#endif
|
|
|| itemRom.SHA1 == Constants.SHA1Zero
|
|
|| itemRom.SHA256 == Constants.SHA256Zero
|
|
|| itemRom.SHA384 == Constants.SHA384Zero
|
|
|| itemRom.SHA512 == Constants.SHA512Zero))
|
|
{
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
|
itemRom.Size = Constants.SizeZero;
|
|
itemRom.CRC = Constants.CRCZero;
|
|
itemRom.MD5 = Constants.MD5Zero;
|
|
#if NET_FRAMEWORK
|
|
itemRom.RIPEMD160 = null;
|
|
//itemRom.RIPEMD160 = Constants.RIPEMD160Zero;
|
|
#endif
|
|
itemRom.SHA1 = Constants.SHA1Zero;
|
|
itemRom.SHA256 = null;
|
|
//itemRom.SHA256 = Constants.SHA256Zero;
|
|
itemRom.SHA384 = null;
|
|
//itemRom.SHA384 = Constants.SHA384Zero;
|
|
itemRom.SHA512 = null;
|
|
//itemRom.SHA512 = Constants.SHA512Zero;
|
|
}
|
|
// If the file has no size and it's not the above case, skip and log
|
|
else if (itemRom.ItemStatus != ItemStatus.Nodump && (itemRom.Size == 0 || itemRom.Size == -1))
|
|
{
|
|
Globals.Logger.Verbose($"{Header.FileName}: Incomplete entry for '{itemRom.Name}' will be output as nodump");
|
|
itemRom.ItemStatus = ItemStatus.Nodump;
|
|
}
|
|
// If the file has a size but aboslutely no hashes, skip and log
|
|
else if (itemRom.ItemStatus != ItemStatus.Nodump
|
|
&& itemRom.Size > 0
|
|
&& string.IsNullOrWhiteSpace(itemRom.CRC)
|
|
&& string.IsNullOrWhiteSpace(itemRom.MD5)
|
|
#if NET_FRAMEWORK
|
|
&& string.IsNullOrWhiteSpace(itemRom.RIPEMD160)
|
|
#endif
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA1)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA256)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA384)
|
|
&& string.IsNullOrWhiteSpace(itemRom.SHA512))
|
|
{
|
|
Globals.Logger.Verbose($"{Header.FileName}: Incomplete entry for '{itemRom.Name}' will be output as nodump");
|
|
itemRom.ItemStatus = ItemStatus.Nodump;
|
|
}
|
|
|
|
item = itemRom;
|
|
}
|
|
else if (item.ItemType == ItemType.Disk)
|
|
{
|
|
Disk itemDisk = (Disk)item;
|
|
|
|
// If the file has aboslutely no hashes, skip and log
|
|
if (itemDisk.ItemStatus != ItemStatus.Nodump
|
|
&& string.IsNullOrWhiteSpace(itemDisk.MD5)
|
|
#if NET_FRAMEWORK
|
|
&& string.IsNullOrWhiteSpace(itemDisk.RIPEMD160)
|
|
#endif
|
|
&& string.IsNullOrWhiteSpace(itemDisk.SHA1)
|
|
&& string.IsNullOrWhiteSpace(itemDisk.SHA256)
|
|
&& string.IsNullOrWhiteSpace(itemDisk.SHA384)
|
|
&& string.IsNullOrWhiteSpace(itemDisk.SHA512))
|
|
{
|
|
Globals.Logger.Verbose($"Incomplete entry for '{itemDisk.Name}' will be output as nodump");
|
|
itemDisk.ItemStatus = ItemStatus.Nodump;
|
|
}
|
|
|
|
item = itemDisk;
|
|
}
|
|
|
|
// Get the key and add the file
|
|
key = item.GetKey(BucketedBy.CRC);
|
|
Items.Add(key, item);
|
|
|
|
return key;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Parse DatFile and return all found games and roms within
|
|
/// </summary>
|
|
/// <param name="filename">Name of the file to be parsed</param>
|
|
/// <param name="indexId">Index ID for the DAT</param>
|
|
/// <param name="keep">True if full pathnames are to be kept, false otherwise (default)</param>
|
|
protected abstract void ParseFile(string filename, int indexId, bool keep);
|
|
|
|
#endregion
|
|
|
|
#region Populate DAT from Directory
|
|
|
|
/// <summary>
|
|
/// Create a new Dat from a directory
|
|
/// </summary>
|
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
|
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
|
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
|
/// <param name="skipFileType">Type of files that should be skipped</param>
|
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
|
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
|
/// <param name="outDir">Output directory to </param>
|
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <param name="useTags">True if DatFile tags override splitting, false otherwise</param>
|
|
public bool PopulateFromDir(
|
|
string basePath,
|
|
Hash omitFromScan,
|
|
bool bare,
|
|
bool archivesAsFiles,
|
|
SkipFileType skipFileType,
|
|
bool addBlanks,
|
|
bool addDate,
|
|
string tempDir,
|
|
bool copyFiles,
|
|
string headerToCheckAgainst,
|
|
bool chdsAsFiles,
|
|
Filter filter,
|
|
bool useTags = false)
|
|
{
|
|
// If the description is defined but not the name, set the name from the description
|
|
if (string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.Name = Header.Description;
|
|
}
|
|
|
|
// If the name is defined but not the description, set the description from the name
|
|
else if (!string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.Description = Header.Name + (bare ? string.Empty : $" ({Header.Date})");
|
|
}
|
|
|
|
// If neither the name or description are defined, set them from the automatic values
|
|
else if (string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
string[] splitpath = basePath.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
|
|
Header.Name = splitpath.Last();
|
|
Header.Description = Header.Name + (bare ? string.Empty : $" ({Header.Date})");
|
|
}
|
|
|
|
// Clean the temp directory path
|
|
tempDir = DirectoryExtensions.Ensure(tempDir, temp: true);
|
|
|
|
// Process the input
|
|
if (Directory.Exists(basePath))
|
|
{
|
|
Globals.Logger.Verbose($"Folder found: {basePath}");
|
|
|
|
// Process the files in the main folder or any subfolder
|
|
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
|
|
Parallel.ForEach(files, Globals.ParallelOptions, item =>
|
|
{
|
|
CheckFileForHashes(item, basePath, omitFromScan, archivesAsFiles, skipFileType,
|
|
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
|
|
});
|
|
|
|
// Now find all folders that are empty, if we are supposed to
|
|
if (!Header.Romba && addBlanks)
|
|
{
|
|
List<string> empties = DirectoryExtensions.ListEmpty(basePath);
|
|
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
|
|
{
|
|
// Get the full path for the directory
|
|
string fulldir = Path.GetFullPath(dir);
|
|
|
|
// Set the temporary variables
|
|
string gamename = string.Empty;
|
|
string romname = string.Empty;
|
|
|
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
|
if (Header.Type == "SuperDAT")
|
|
{
|
|
gamename = fulldir.Remove(0, basePath.Length + 1);
|
|
romname = "_";
|
|
}
|
|
|
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
|
else
|
|
{
|
|
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
|
|
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
|
|
}
|
|
|
|
// Sanitize the names
|
|
gamename = gamename.Trim(Path.DirectorySeparatorChar);
|
|
romname = romname.Trim(Path.DirectorySeparatorChar);
|
|
|
|
Globals.Logger.Verbose($"Adding blank empty folder: {gamename}");
|
|
Items["null"].Add(new Rom(romname, gamename));
|
|
});
|
|
}
|
|
}
|
|
else if (File.Exists(basePath))
|
|
{
|
|
CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, archivesAsFiles,
|
|
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
|
|
}
|
|
|
|
// Now that we're done, delete the temp folder (if it's not the default)
|
|
Globals.Logger.User("Cleaning temp folder");
|
|
if (tempDir != Path.GetTempPath())
|
|
DirectoryExtensions.TryDelete(tempDir);
|
|
|
|
// If we have a valid filter, perform the filtering now
|
|
if (filter != null && filter != default(Filter))
|
|
filter.FilterDatFile(this, useTags);
|
|
|
|
return true;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Check a given file for hashes, based on current settings
|
|
/// </summary>
|
|
/// <param name="item">Filename of the item to be checked</param>
|
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
|
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
|
/// <param name="skipFileType">Type of files that should be skipped</param>
|
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
|
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
private void CheckFileForHashes(
|
|
string item,
|
|
string basePath,
|
|
Hash omitFromScan,
|
|
bool archivesAsFiles,
|
|
SkipFileType skipFileType,
|
|
bool addBlanks,
|
|
bool addDate,
|
|
string tempDir,
|
|
bool copyFiles,
|
|
string headerToCheckAgainst,
|
|
bool chdsAsFiles)
|
|
{
|
|
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
|
|
if (Header.Romba)
|
|
{
|
|
GZipArchive gzarc = new GZipArchive(item);
|
|
BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
|
|
|
|
// If the rom is valid, write it out
|
|
if (baseFile != null && baseFile.Filename != null)
|
|
{
|
|
// Add the list if it doesn't exist already
|
|
Rom rom = new Rom(baseFile);
|
|
Items.Add(rom.GetKey(BucketedBy.CRC), rom);
|
|
Globals.Logger.User($"File added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}");
|
|
}
|
|
else
|
|
{
|
|
Globals.Logger.User($"File not added: {Path.GetFileNameWithoutExtension(item)}{Environment.NewLine}");
|
|
return;
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
// If we're copying files, copy it first and get the new filename
|
|
string newItem = item;
|
|
string newBasePath = basePath;
|
|
if (copyFiles)
|
|
{
|
|
newBasePath = Path.Combine(tempDir, Guid.NewGuid().ToString());
|
|
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
|
|
DirectoryExtensions.TryCreateDirectory(Path.GetDirectoryName(newItem));
|
|
File.Copy(item, newItem, true);
|
|
}
|
|
|
|
// Initialize possible archive variables
|
|
BaseArchive archive = BaseArchive.Create(newItem);
|
|
List<BaseFile> extracted = null;
|
|
|
|
// If we have an archive and we're supposed to scan it
|
|
if (archive != null && !archivesAsFiles)
|
|
extracted = archive.GetChildren(omitFromScan: omitFromScan, date: addDate);
|
|
|
|
// If the file should be skipped based on type, do so now
|
|
if ((extracted != null && skipFileType == SkipFileType.Archive)
|
|
|| (extracted == null && skipFileType == SkipFileType.File))
|
|
{
|
|
return;
|
|
}
|
|
|
|
// If the extracted list is null, just scan the item itself
|
|
if (extracted == null)
|
|
{
|
|
ProcessFile(newItem, string.Empty, newBasePath, omitFromScan, addDate, headerToCheckAgainst, chdsAsFiles);
|
|
}
|
|
// Otherwise, add all of the found items
|
|
else
|
|
{
|
|
// First take care of the found items
|
|
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
|
|
{
|
|
DatItem datItem = DatItem.Create(rom);
|
|
ProcessFileHelper(newItem,
|
|
datItem,
|
|
basePath,
|
|
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
|
});
|
|
|
|
// Then, if we're looking for blanks, get all of the blank folders and add them
|
|
if (addBlanks)
|
|
{
|
|
List<string> empties = new List<string>();
|
|
|
|
// Now get all blank folders from the archive
|
|
if (archive != null)
|
|
empties = archive.GetEmptyFolders();
|
|
|
|
// Add add all of the found empties to the DAT
|
|
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
|
|
{
|
|
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem);
|
|
ProcessFileHelper(newItem,
|
|
emptyRom,
|
|
basePath,
|
|
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
|
});
|
|
}
|
|
}
|
|
|
|
// Cue to delete the file if it's a copy
|
|
if (copyFiles && item != newItem)
|
|
DirectoryExtensions.TryDelete(newBasePath);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Process a single file as a file
|
|
/// </summary>
|
|
/// <param name="item">File to be added</param>
|
|
/// <param name="parent">Parent game to be used</param>
|
|
/// <param name="basePath">Path the represents the parent directory</param>
|
|
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
private void ProcessFile(
|
|
string item,
|
|
string parent,
|
|
string basePath,
|
|
Hash omitFromScan,
|
|
bool addDate,
|
|
string headerToCheckAgainst,
|
|
bool chdsAsFiles)
|
|
{
|
|
Globals.Logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
|
|
BaseFile baseFile = FileExtensions.GetInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
|
|
ProcessFileHelper(item, DatItem.Create(baseFile), basePath, parent);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Process a single file as a file (with found Rom data)
|
|
/// </summary>
|
|
/// <param name="item">File to be added</param>
|
|
/// <param name="item">Rom data to be used to write to file</param>
|
|
/// <param name="basepath">Path the represents the parent directory</param>
|
|
/// <param name="parent">Parent game to be used</param>
|
|
private void ProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
|
|
{
|
|
// If we somehow got something other than a Rom or Disk, cancel out
|
|
if (datItem.ItemType != ItemType.Rom && datItem.ItemType != ItemType.Disk)
|
|
return;
|
|
|
|
try
|
|
{
|
|
// If the basepath doesn't end with a directory separator, add it
|
|
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
|
basepath += Path.DirectorySeparatorChar.ToString();
|
|
|
|
// Make sure we have the full item path
|
|
item = Path.GetFullPath(item);
|
|
|
|
// Process the item to sanitize names based on input
|
|
SetDatItemInfo(datItem, item, parent, basepath);
|
|
|
|
// Add the file information to the DAT
|
|
string key = datItem.GetKey(BucketedBy.CRC);
|
|
Items.Add(key, datItem);
|
|
|
|
Globals.Logger.User($"File added: {datItem.Name}{Environment.NewLine}");
|
|
}
|
|
catch (IOException ex)
|
|
{
|
|
Globals.Logger.Error(ex.ToString());
|
|
return;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Set proper Game and Rom names from user inputs
|
|
/// </summary>
|
|
/// <param name="datItem">DatItem representing the input file</param>
|
|
/// <param name="item">Item name to use</param>
|
|
/// <param name="parent">Parent name to use</param>
|
|
/// <param name="basepath">Base path to use</param>
|
|
private void SetDatItemInfo(DatItem datItem, string item, string parent, string basepath)
|
|
{
|
|
// Get the data to be added as game and item names
|
|
string gamename, romname;
|
|
|
|
// If the parent is blank, then we have a non-archive file
|
|
if (string.IsNullOrWhiteSpace(parent))
|
|
{
|
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
|
if (Header.Type == "SuperDAT")
|
|
{
|
|
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
|
|
romname = Path.GetFileName(item);
|
|
}
|
|
|
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
|
else
|
|
{
|
|
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
|
|
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
|
|
}
|
|
}
|
|
|
|
// Otherwise, we assume that we have an archive
|
|
else
|
|
{
|
|
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
|
|
if (Header.Type == "SuperDAT")
|
|
{
|
|
gamename = parent;
|
|
romname = datItem.Name;
|
|
}
|
|
|
|
// Otherwise, we want the archive name as the game, and the file as everything else
|
|
else
|
|
{
|
|
gamename = parent;
|
|
romname = datItem.Name;
|
|
}
|
|
}
|
|
|
|
// Sanitize the names
|
|
gamename = gamename.Trim(Path.DirectorySeparatorChar);
|
|
romname = romname?.Trim(Path.DirectorySeparatorChar) ?? string.Empty;
|
|
|
|
if (!string.IsNullOrWhiteSpace(gamename) && string.IsNullOrWhiteSpace(romname))
|
|
{
|
|
romname = gamename;
|
|
gamename = "Default";
|
|
}
|
|
|
|
// Update rom information
|
|
datItem.Name = romname;
|
|
datItem.MachineName = gamename;
|
|
datItem.MachineDescription = gamename;
|
|
|
|
// If we have a Disk, then the ".chd" extension needs to be removed
|
|
if (datItem.ItemType == ItemType.Disk)
|
|
datItem.Name = datItem.Name.Replace(".chd", string.Empty);
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Rebuilding and Verifying
|
|
|
|
/// <summary>
|
|
/// Process the DAT and find all matches in input files and folders assuming they're a depot
|
|
/// </summary>
|
|
/// <param name="inputs">List of input files/folders to check</param>
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
|
public bool RebuildDepot(
|
|
List<string> inputs,
|
|
string outDir,
|
|
bool date,
|
|
bool delete,
|
|
bool inverse,
|
|
OutputFormat outputFormat,
|
|
bool updateDat,
|
|
string headerToCheckAgainst)
|
|
{
|
|
#region Perform setup
|
|
|
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
|
if (Items.TotalCount == 0 && !inverse)
|
|
{
|
|
Globals.Logger.User("No entries were found to rebuild, exiting...");
|
|
return false;
|
|
}
|
|
|
|
// Check that the output directory exists
|
|
outDir = DirectoryExtensions.Ensure(outDir, create: true);
|
|
|
|
// Now we want to get forcepack flag if it's not overridden
|
|
if (outputFormat == OutputFormat.Folder && Header.ForcePacking != ForcePacking.None)
|
|
{
|
|
switch (Header.ForcePacking)
|
|
{
|
|
case ForcePacking.Zip:
|
|
outputFormat = OutputFormat.TorrentZip;
|
|
break;
|
|
case ForcePacking.Unzip:
|
|
outputFormat = OutputFormat.Folder;
|
|
break;
|
|
}
|
|
}
|
|
|
|
// Preload the Skipper list
|
|
Transform.Init();
|
|
|
|
#endregion
|
|
|
|
bool success = true;
|
|
|
|
#region Rebuild from depots in order
|
|
|
|
string format = string.Empty;
|
|
switch (outputFormat)
|
|
{
|
|
case OutputFormat.Folder:
|
|
format = "directory";
|
|
break;
|
|
case OutputFormat.TapeArchive:
|
|
format = "TAR";
|
|
break;
|
|
case OutputFormat.Torrent7Zip:
|
|
format = "Torrent7Z";
|
|
break;
|
|
case OutputFormat.TorrentGzip:
|
|
case OutputFormat.TorrentGzipRomba:
|
|
format = "TorrentGZ";
|
|
break;
|
|
case OutputFormat.TorrentLRZip:
|
|
format = "TorrentLRZ";
|
|
break;
|
|
case OutputFormat.TorrentRar:
|
|
format = "TorrentRAR";
|
|
break;
|
|
case OutputFormat.TorrentXZ:
|
|
case OutputFormat.TorrentXZRomba:
|
|
format = "TorrentXZ";
|
|
break;
|
|
case OutputFormat.TorrentZip:
|
|
format = "TorrentZip";
|
|
break;
|
|
}
|
|
|
|
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
|
|
|
|
// Now loop through and get only directories from the input paths
|
|
List<string> directories = new List<string>();
|
|
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
|
|
{
|
|
// Add to the list if the input is a directory
|
|
if (Directory.Exists(input))
|
|
{
|
|
Globals.Logger.Verbose($"Adding depot: {input}");
|
|
lock (directories)
|
|
{
|
|
directories.Add(input);
|
|
}
|
|
}
|
|
});
|
|
|
|
// If we don't have any directories, we want to exit
|
|
if (directories.Count == 0)
|
|
return success;
|
|
|
|
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
|
|
Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
|
|
|
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
|
foreach (string hash in Items.Keys)
|
|
{
|
|
// Pre-empt any issues that could arise from string length
|
|
if (hash.Length != Constants.SHA1Length)
|
|
continue;
|
|
|
|
Globals.Logger.User($"Checking hash '{hash}'");
|
|
|
|
// Get the extension path for the hash
|
|
string subpath = PathExtensions.GetRombaPath(hash);
|
|
|
|
// Find the first depot that includes the hash
|
|
string foundpath = null;
|
|
foreach (string directory in directories)
|
|
{
|
|
if (File.Exists(Path.Combine(directory, subpath)))
|
|
{
|
|
foundpath = Path.Combine(directory, subpath);
|
|
break;
|
|
}
|
|
}
|
|
|
|
// If we didn't find a path, then we continue
|
|
if (foundpath == null)
|
|
continue;
|
|
|
|
// If we have a path, we want to try to get the rom information
|
|
GZipArchive archive = new GZipArchive(foundpath);
|
|
BaseFile fileinfo = archive.GetTorrentGZFileInfo();
|
|
|
|
// If the file information is null, then we continue
|
|
if (fileinfo == null)
|
|
continue;
|
|
|
|
// Otherwise, we rebuild that file to all locations that we need to
|
|
bool usedInternally;
|
|
if (Items[hash][0].ItemType == ItemType.Disk)
|
|
usedInternally = RebuildIndividualFile(new Disk(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */, headerToCheckAgainst);
|
|
else
|
|
usedInternally = RebuildIndividualFile(new Rom(fileinfo), foundpath, outDir, date, inverse, outputFormat, updateDat, false /* isZip */, headerToCheckAgainst);
|
|
|
|
// If we are supposed to delete the depot file, do so
|
|
if (delete && usedInternally)
|
|
FileExtensions.TryDelete(foundpath);
|
|
}
|
|
|
|
watch.Stop();
|
|
|
|
#endregion
|
|
|
|
// If we're updating the DAT, output to the rebuild directory
|
|
if (updateDat)
|
|
{
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
|
Items.ClearMarked();
|
|
Write(outDir);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Process the DAT and find all matches in input files and folders
|
|
/// </summary>
|
|
/// <param name="inputs">List of input files/folders to check</param>
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
|
public bool RebuildGeneric(
|
|
List<string> inputs,
|
|
string outDir,
|
|
bool quickScan,
|
|
bool date,
|
|
bool delete,
|
|
bool inverse,
|
|
OutputFormat outputFormat,
|
|
bool updateDat,
|
|
string headerToCheckAgainst,
|
|
bool chdsAsFiles)
|
|
{
|
|
#region Perform setup
|
|
|
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
|
if (Items.TotalCount == 0 && !inverse)
|
|
{
|
|
Globals.Logger.User("No entries were found to rebuild, exiting...");
|
|
return false;
|
|
}
|
|
|
|
// Check that the output directory exists
|
|
if (!Directory.Exists(outDir))
|
|
{
|
|
Directory.CreateDirectory(outDir);
|
|
outDir = Path.GetFullPath(outDir);
|
|
}
|
|
|
|
// Now we want to get forcepack flag if it's not overridden
|
|
if (outputFormat == OutputFormat.Folder && Header.ForcePacking != ForcePacking.None)
|
|
{
|
|
switch (Header.ForcePacking)
|
|
{
|
|
case ForcePacking.Zip:
|
|
outputFormat = OutputFormat.TorrentZip;
|
|
break;
|
|
case ForcePacking.Unzip:
|
|
outputFormat = OutputFormat.Folder;
|
|
break;
|
|
}
|
|
}
|
|
|
|
// Preload the Skipper list
|
|
Transform.Init();
|
|
|
|
#endregion
|
|
|
|
bool success = true;
|
|
|
|
#region Rebuild from sources in order
|
|
|
|
string format = string.Empty;
|
|
switch (outputFormat)
|
|
{
|
|
case OutputFormat.Folder:
|
|
format = "directory";
|
|
break;
|
|
case OutputFormat.TapeArchive:
|
|
format = "TAR";
|
|
break;
|
|
case OutputFormat.Torrent7Zip:
|
|
format = "Torrent7Z";
|
|
break;
|
|
case OutputFormat.TorrentGzip:
|
|
case OutputFormat.TorrentGzipRomba:
|
|
format = "TorrentGZ";
|
|
break;
|
|
case OutputFormat.TorrentLRZip:
|
|
format = "TorrentLRZ";
|
|
break;
|
|
case OutputFormat.TorrentRar:
|
|
format = "TorrentRAR";
|
|
break;
|
|
case OutputFormat.TorrentXZ:
|
|
case OutputFormat.TorrentXZRomba:
|
|
format = "TorrentXZ";
|
|
break;
|
|
case OutputFormat.TorrentZip:
|
|
format = "TorrentZip";
|
|
break;
|
|
}
|
|
|
|
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
|
|
|
|
// Now loop through all of the files in all of the inputs
|
|
foreach (string input in inputs)
|
|
{
|
|
// If the input is a file
|
|
if (File.Exists(input))
|
|
{
|
|
Globals.Logger.User($"Checking file: {input}");
|
|
RebuildGenericHelper(input, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
|
|
}
|
|
|
|
// If the input is a directory
|
|
else if (Directory.Exists(input))
|
|
{
|
|
Globals.Logger.Verbose($"Checking directory: {input}");
|
|
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
|
|
{
|
|
Globals.Logger.User($"Checking file: {file}");
|
|
RebuildGenericHelper(file, outDir, quickScan, date, delete, inverse, outputFormat, updateDat, headerToCheckAgainst, chdsAsFiles);
|
|
}
|
|
}
|
|
}
|
|
|
|
watch.Stop();
|
|
|
|
#endregion
|
|
|
|
// If we're updating the DAT, output to the rebuild directory
|
|
if (updateDat)
|
|
{
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
|
Items.ClearMarked();
|
|
Write(outDir);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Attempt to add a file to the output if it matches
|
|
/// </summary>
|
|
/// <param name="file">Name of the file to process</param>
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
private void RebuildGenericHelper(
|
|
string file,
|
|
string outDir,
|
|
bool quickScan,
|
|
bool date,
|
|
bool delete,
|
|
bool inverse,
|
|
OutputFormat outputFormat,
|
|
bool updateDat,
|
|
string headerToCheckAgainst,
|
|
bool chdsAsFiles)
|
|
{
|
|
// If we somehow have a null filename, return
|
|
if (file == null)
|
|
return;
|
|
|
|
// Set the deletion variables
|
|
bool usedExternally, usedInternally = false;
|
|
|
|
// Scan the file externally
|
|
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
|
BaseFile externalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes),
|
|
header: headerToCheckAgainst, chdsAsFiles: chdsAsFiles);
|
|
|
|
DatItem externalDatItem = null;
|
|
if (externalFileInfo.Type == FileType.CHD)
|
|
externalDatItem = new Disk(externalFileInfo);
|
|
else if (externalFileInfo.Type == FileType.None)
|
|
externalDatItem = new Rom(externalFileInfo);
|
|
|
|
usedExternally = RebuildIndividualFile(externalDatItem, file, outDir, date, inverse, outputFormat,
|
|
updateDat, null /* isZip */, headerToCheckAgainst);
|
|
|
|
// Scan the file internally
|
|
|
|
// Create an empty list of BaseFile for archive entries
|
|
List<BaseFile> entries = null;
|
|
|
|
// Get the TGZ status for later
|
|
GZipArchive tgz = new GZipArchive(file);
|
|
bool isTorrentGzip = tgz.IsTorrent();
|
|
|
|
// Get the base archive first
|
|
BaseArchive archive = BaseArchive.Create(file);
|
|
|
|
// Now get all extracted items from the archive
|
|
if (archive != null)
|
|
{
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
|
entries = archive.GetChildren(omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), date: date);
|
|
}
|
|
|
|
// If the entries list is null, we encountered an error and should scan exteranlly
|
|
if (entries == null && File.Exists(file))
|
|
{
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
|
BaseFile internalFileInfo = FileExtensions.GetInfo(file, omitFromScan: (quickScan ? Hash.SecureHashes : Hash.DeepHashes), chdsAsFiles: chdsAsFiles);
|
|
|
|
DatItem internalDatItem = null;
|
|
if (internalFileInfo.Type == FileType.CHD)
|
|
internalDatItem = new Disk(internalFileInfo);
|
|
else if (internalFileInfo.Type == FileType.None)
|
|
internalDatItem = new Rom(internalFileInfo);
|
|
|
|
usedExternally = RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, null /* isZip */, headerToCheckAgainst);
|
|
}
|
|
// Otherwise, loop through the entries and try to match
|
|
else
|
|
{
|
|
foreach (BaseFile entry in entries)
|
|
{
|
|
DatItem internalDatItem = DatItem.Create(entry);
|
|
usedInternally |= RebuildIndividualFile(internalDatItem, file, outDir, date, inverse, outputFormat, updateDat, !isTorrentGzip /* isZip */, headerToCheckAgainst);
|
|
}
|
|
}
|
|
|
|
// If we are supposed to delete the file, do so
|
|
if (delete && (usedExternally || usedInternally))
|
|
FileExtensions.TryDelete(file);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Find duplicates and rebuild individual files to output
|
|
/// </summary>
|
|
/// <param name="datItem">Information for the current file to rebuild from</param>
|
|
/// <param name="file">Name of the file to process</param>
|
|
/// <param name="outDir">Output directory to use to build to</param>
|
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
|
/// <param name="isZip">True if the input file is an archive, false if the file is TGZ, null otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
|
|
private bool RebuildIndividualFile(
|
|
DatItem datItem,
|
|
string file,
|
|
string outDir,
|
|
bool date,
|
|
bool inverse,
|
|
OutputFormat outputFormat,
|
|
bool updateDat,
|
|
bool? isZip,
|
|
string headerToCheckAgainst)
|
|
{
|
|
// Set the initial output value
|
|
bool rebuilt = false;
|
|
|
|
// If the DatItem is a Disk, force rebuilding to a folder except if TGZ or TXZ
|
|
if (datItem.ItemType == ItemType.Disk
|
|
&& !(outputFormat == OutputFormat.TorrentGzip || outputFormat == OutputFormat.TorrentGzipRomba)
|
|
&& !(outputFormat == OutputFormat.TorrentXZ || outputFormat == OutputFormat.TorrentXZRomba))
|
|
{
|
|
outputFormat = OutputFormat.Folder;
|
|
}
|
|
|
|
// If we have a disk, change it into a Rom for later use
|
|
if (datItem.ItemType == ItemType.Disk)
|
|
datItem = ((Disk)datItem).ConvertToRom();
|
|
|
|
// Prepopluate a few key strings
|
|
string crc = ((Rom)datItem).CRC ?? string.Empty;
|
|
string sha1 = ((Rom)datItem).SHA1 ?? string.Empty;
|
|
|
|
// Find if the file has duplicates in the DAT
|
|
List<DatItem> dupes = Items.GetDuplicates(datItem, remove: updateDat);
|
|
bool hasDuplicates = dupes.Count > 0;
|
|
|
|
// If either we have duplicates or we're filtering
|
|
if (hasDuplicates ^ inverse)
|
|
{
|
|
// If we have a very specific TGZ->TGZ case, just copy it accordingly
|
|
GZipArchive tgz = new GZipArchive(file);
|
|
BaseFile tgzRom = tgz.GetTorrentGZFileInfo();
|
|
if (isZip == false && tgzRom != null && (outputFormat == OutputFormat.TorrentGzip || outputFormat == OutputFormat.TorrentGzipRomba))
|
|
{
|
|
Globals.Logger.User($"Matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
|
|
|
// Get the proper output path
|
|
if (outputFormat == OutputFormat.TorrentGzipRomba)
|
|
outDir = Path.Combine(outDir, PathExtensions.GetRombaPath(sha1));
|
|
else
|
|
outDir = Path.Combine(outDir, sha1 + ".gz");
|
|
|
|
// Make sure the output folder is created
|
|
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
|
|
|
|
// Now copy the file over
|
|
try
|
|
{
|
|
File.Copy(file, outDir);
|
|
return true;
|
|
}
|
|
catch
|
|
{
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// If we have a very specific TXZ->TXZ case, just copy it accordingly
|
|
XZArchive txz = new XZArchive(file);
|
|
BaseFile txzRom = txz.GetTorrentXZFileInfo();
|
|
if (isZip == false && txzRom != null && (outputFormat == OutputFormat.TorrentXZ || outputFormat == OutputFormat.TorrentXZRomba))
|
|
{
|
|
Globals.Logger.User($"Matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
|
|
|
// Get the proper output path
|
|
if (outputFormat == OutputFormat.TorrentGzipRomba)
|
|
outDir = Path.Combine(outDir, PathExtensions.GetRombaPath(sha1));
|
|
else
|
|
outDir = Path.Combine(outDir, sha1 + ".xz");
|
|
|
|
// Make sure the output folder is created
|
|
Directory.CreateDirectory(Path.GetDirectoryName(outDir));
|
|
|
|
// Now copy the file over
|
|
try
|
|
{
|
|
File.Copy(file, outDir);
|
|
return true;
|
|
}
|
|
catch
|
|
{
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// Get a generic stream for the file
|
|
Stream fileStream = new MemoryStream();
|
|
|
|
// If we have a zipfile, extract the stream to memory
|
|
if (isZip != null)
|
|
{
|
|
BaseArchive archive = BaseArchive.Create(file);
|
|
if (archive != null)
|
|
(fileStream, _) = archive.CopyToStream(datItem.Name);
|
|
}
|
|
// Otherwise, just open the filestream
|
|
else
|
|
{
|
|
fileStream = FileExtensions.TryOpenRead(file);
|
|
}
|
|
|
|
// If the stream is null, then continue
|
|
if (fileStream == null)
|
|
return false;
|
|
|
|
// Seek to the beginning of the stream
|
|
if (fileStream.CanSeek)
|
|
fileStream.Seek(0, SeekOrigin.Begin);
|
|
|
|
// If we are inverse, create an output to rebuild to
|
|
if (inverse)
|
|
{
|
|
string machinename = null;
|
|
|
|
// Get the item from the current file
|
|
Rom item = new Rom(fileStream.GetInfo(keepReadOpen: true));
|
|
item.MachineName = Path.GetFileNameWithoutExtension(item.Name);
|
|
item.MachineDescription = Path.GetFileNameWithoutExtension(item.Name);
|
|
|
|
// If we are coming from an archive, set the correct machine name
|
|
if (machinename != null)
|
|
{
|
|
item.MachineName = machinename;
|
|
item.MachineDescription = machinename;
|
|
}
|
|
|
|
dupes.Add(item);
|
|
}
|
|
|
|
Globals.Logger.User($"{(inverse ? "No matches" : "Matches")} found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
|
rebuilt = true;
|
|
|
|
// Now loop through the list and rebuild accordingly
|
|
foreach (DatItem item in dupes)
|
|
{
|
|
// Get the output archive, if possible
|
|
Folder outputArchive = Folder.Create(outputFormat);
|
|
|
|
// Now rebuild to the output file
|
|
outputArchive.Write(fileStream, outDir, (Rom)item, date: date, romba: outputFormat == OutputFormat.TorrentGzipRomba || outputFormat == OutputFormat.TorrentXZRomba);
|
|
}
|
|
|
|
// Close the input stream
|
|
fileStream?.Dispose();
|
|
}
|
|
|
|
// Now we want to take care of headers, if applicable
|
|
if (headerToCheckAgainst != null)
|
|
{
|
|
// Get a generic stream for the file
|
|
Stream fileStream = new MemoryStream();
|
|
|
|
// If we have a zipfile, extract the stream to memory
|
|
if (isZip != null)
|
|
{
|
|
BaseArchive archive = BaseArchive.Create(file);
|
|
if (archive != null)
|
|
(fileStream, _) = archive.CopyToStream(datItem.Name);
|
|
}
|
|
// Otherwise, just open the filestream
|
|
else
|
|
{
|
|
fileStream = FileExtensions.TryOpenRead(file);
|
|
}
|
|
|
|
// If the stream is null, then continue
|
|
if (fileStream == null)
|
|
return false;
|
|
|
|
// Check to see if we have a matching header first
|
|
SkipperRule rule = Transform.GetMatchingRule(fileStream, Path.GetFileNameWithoutExtension(headerToCheckAgainst));
|
|
|
|
// If there's a match, create the new file to write
|
|
if (rule.Tests != null && rule.Tests.Count != 0)
|
|
{
|
|
// If the file could be transformed correctly
|
|
MemoryStream transformStream = new MemoryStream();
|
|
if (rule.TransformStream(fileStream, transformStream, keepReadOpen: true, keepWriteOpen: true))
|
|
{
|
|
// Get the file informations that we will be using
|
|
Rom headerless = new Rom(transformStream.GetInfo(keepReadOpen: true));
|
|
|
|
// Find if the file has duplicates in the DAT
|
|
dupes = Items.GetDuplicates(headerless, remove: updateDat);
|
|
hasDuplicates = dupes.Count > 0;
|
|
|
|
// If it has duplicates and we're not filtering, rebuild it
|
|
if (hasDuplicates && !inverse)
|
|
{
|
|
Globals.Logger.User($"Headerless matches found for '{Path.GetFileName(datItem.Name)}', rebuilding accordingly...");
|
|
rebuilt = true;
|
|
|
|
// Now loop through the list and rebuild accordingly
|
|
foreach (DatItem item in dupes)
|
|
{
|
|
// Create a headered item to use as well
|
|
datItem.CopyMachineInformation(item);
|
|
datItem.Name += $"_{crc}";
|
|
|
|
// If either copy succeeds, then we want to set rebuilt to true
|
|
bool eitherSuccess = false;
|
|
|
|
// Get the output archive, if possible
|
|
Folder outputArchive = Folder.Create(outputFormat);
|
|
|
|
// Now rebuild to the output file
|
|
eitherSuccess |= outputArchive.Write(transformStream, outDir, (Rom)item, date: date, romba: outputFormat == OutputFormat.TorrentGzipRomba || outputFormat == OutputFormat.TorrentXZRomba);
|
|
eitherSuccess |= outputArchive.Write(fileStream, outDir, (Rom)datItem, date: date, romba: outputFormat == OutputFormat.TorrentGzipRomba || outputFormat == OutputFormat.TorrentXZRomba);
|
|
|
|
// Now add the success of either rebuild
|
|
rebuilt &= eitherSuccess;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Dispose of the stream
|
|
transformStream?.Dispose();
|
|
}
|
|
|
|
// Dispose of the stream
|
|
fileStream?.Dispose();
|
|
}
|
|
|
|
return rebuilt;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Process the DAT and verify from the depots
|
|
/// </summary>
|
|
/// <param name="inputs">List of input directories to compare against</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <returns>True if verification was a success, false otherwise</returns>
|
|
public bool VerifyDepot(List<string> inputs, string outDir)
|
|
{
|
|
bool success = true;
|
|
|
|
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied depots");
|
|
|
|
// Now loop through and get only directories from the input paths
|
|
List<string> directories = new List<string>();
|
|
foreach (string input in inputs)
|
|
{
|
|
// Add to the list if the input is a directory
|
|
if (Directory.Exists(input))
|
|
{
|
|
Globals.Logger.Verbose($"Adding depot: {input}");
|
|
directories.Add(input);
|
|
}
|
|
}
|
|
|
|
// If we don't have any directories, we want to exit
|
|
if (directories.Count == 0)
|
|
return success;
|
|
|
|
// Now that we have a list of depots, we want to bucket the input DAT by SHA-1
|
|
Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
|
|
|
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
|
foreach (string hash in Items.Keys)
|
|
{
|
|
// Pre-empt any issues that could arise from string length
|
|
if (hash.Length != Constants.SHA1Length)
|
|
continue;
|
|
|
|
Globals.Logger.User($"Checking hash '{hash}'");
|
|
|
|
// Get the extension path for the hash
|
|
string subpath = PathExtensions.GetRombaPath(hash);
|
|
|
|
// Find the first depot that includes the hash
|
|
string foundpath = null;
|
|
foreach (string directory in directories)
|
|
{
|
|
if (File.Exists(Path.Combine(directory, subpath)))
|
|
{
|
|
foundpath = Path.Combine(directory, subpath);
|
|
break;
|
|
}
|
|
}
|
|
|
|
// If we didn't find a path, then we continue
|
|
if (foundpath == null)
|
|
continue;
|
|
|
|
// If we have a path, we want to try to get the rom information
|
|
GZipArchive tgz = new GZipArchive(foundpath);
|
|
BaseFile fileinfo = tgz.GetTorrentGZFileInfo();
|
|
|
|
// If the file information is null, then we continue
|
|
if (fileinfo == null)
|
|
continue;
|
|
|
|
// Now we want to remove all duplicates from the DAT
|
|
Items.GetDuplicates(new Rom(fileinfo), remove: true)
|
|
.AddRange(Items.GetDuplicates(new Disk(fileinfo), remove: true));
|
|
}
|
|
|
|
watch.Stop();
|
|
|
|
// If there are any entries in the DAT, output to the rebuild directory
|
|
Header.FileName = $"fixDAT_{Header.FileName}";
|
|
Header.Name = $"fixDAT_{Header.Name}";
|
|
Header.Description = $"fixDAT_{Header.Description}";
|
|
Items.ClearMarked();
|
|
Write(outDir, stats: true);
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Process the DAT and verify the output directory
|
|
/// </summary>
|
|
/// <param name="inputs">List of input directories to compare against</param>
|
|
/// <param name="outDir">Optional param for output directory</param>
|
|
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
|
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
|
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
|
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
|
/// <returns>True if verification was a success, false otherwise</returns>
|
|
public bool VerifyGeneric(List<string> inputs, string outDir, bool hashOnly, bool quickScan, string headerToCheckAgainst, bool chdsAsFiles, Filter filter)
|
|
{
|
|
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
|
|
bool success = true;
|
|
|
|
// Then, loop through and check each of the inputs
|
|
Globals.Logger.User("Processing files:\n");
|
|
foreach (string input in inputs)
|
|
{
|
|
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
|
|
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
|
|
SkipFileType.None, false /* addBlanks */, false /* addDate */, string.Empty /* tempDir */, false /* copyFiles */, headerToCheckAgainst, chdsAsFiles, filter);
|
|
}
|
|
|
|
// Setup the fixdat
|
|
DatFile matched = Create(Header);
|
|
matched.Items = new ItemDictionary();
|
|
matched.Header.FileName = $"fixDat_{matched.Header.FileName}";
|
|
matched.Header.Name = $"fixDat_{matched.Header.Name}";
|
|
matched.Header.Description = $"fixDat_{matched.Header.Description}";
|
|
matched.Header.DatFormat = DatFormat.Logiqx;
|
|
|
|
// If we are checking hashes only, essentially diff the inputs
|
|
if (hashOnly)
|
|
{
|
|
// First we need to bucket and dedupe by hash to get duplicates
|
|
Items.BucketBy(BucketedBy.CRC, DedupeType.Full);
|
|
|
|
// Then follow the same tactics as before
|
|
foreach (string key in Items.Keys)
|
|
{
|
|
List<DatItem> roms = Items[key];
|
|
foreach (DatItem rom in roms)
|
|
{
|
|
if (rom.IndexId == 99)
|
|
{
|
|
if (rom.ItemType == ItemType.Disk || rom.ItemType == ItemType.Rom)
|
|
matched.Items.Add(((Disk)rom).SHA1, rom);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// If we are checking full names, get only files found in directory
|
|
else
|
|
{
|
|
foreach (string key in Items.Keys)
|
|
{
|
|
List<DatItem> roms = Items[key];
|
|
List<DatItem> newroms = DatItem.Merge(roms);
|
|
foreach (Rom rom in newroms)
|
|
{
|
|
if (rom.IndexId == 99)
|
|
matched.Items.Add($"{rom.Size}-{rom.CRC}", rom);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Now output the fixdat to the main folder
|
|
Items.ClearMarked();
|
|
success &= matched.Write(outDir, stats: true);
|
|
|
|
return success;
|
|
}
|
|
|
|
#endregion
|
|
|
|
// TODO: Implement Level split
|
|
#region Splitting
|
|
|
|
/// <summary>
|
|
/// Split a DAT by input extensions
|
|
/// </summary>
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
/// <param name="extA">List of extensions to split on (first DAT)</param>
|
|
/// <param name="extB">List of extensions to split on (second DAT)</param>
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
|
public bool SplitByExtension(string outDir, List<string> extA, List<string> extB)
|
|
{
|
|
// If roms is empty, return false
|
|
if (Items.TotalCount == 0)
|
|
return false;
|
|
|
|
// Make sure all of the extensions don't have a dot at the beginning
|
|
var newExtA = extA.Select(s => s.TrimStart('.').ToLowerInvariant());
|
|
string newExtAString = string.Join(",", newExtA);
|
|
|
|
var newExtB = extB.Select(s => s.TrimStart('.').ToLowerInvariant());
|
|
string newExtBString = string.Join(",", newExtB);
|
|
|
|
// Set all of the appropriate outputs for each of the subsets
|
|
DatFile datdataA = Create(Header.CloneStandard());
|
|
datdataA.Header.FileName += $" ({newExtAString})";
|
|
datdataA.Header.Name += $" ({newExtAString})";
|
|
datdataA.Header.Description += $" ({newExtAString})";
|
|
|
|
DatFile datdataB = Create(Header.CloneStandard());
|
|
datdataB.Header.FileName += $" ({newExtBString})";
|
|
datdataB.Header.Name += $" ({newExtBString})";
|
|
datdataB.Header.Description += $" ({newExtBString})";
|
|
|
|
// Now separate the roms accordingly
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = Items[key];
|
|
foreach (DatItem item in items)
|
|
{
|
|
if (newExtA.Contains(PathExtensions.GetNormalizedExtension(item.Name)))
|
|
{
|
|
datdataA.Items.Add(key, item);
|
|
}
|
|
else if (newExtB.Contains(PathExtensions.GetNormalizedExtension(item.Name)))
|
|
{
|
|
datdataB.Items.Add(key, item);
|
|
}
|
|
else
|
|
{
|
|
datdataA.Items.Add(key, item);
|
|
datdataB.Items.Add(key, item);
|
|
}
|
|
}
|
|
});
|
|
|
|
// Then write out both files
|
|
bool success = datdataA.Write(outDir);
|
|
success &= datdataB.Write(outDir);
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Split a DAT by best available hashes
|
|
/// </summary>
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
|
public bool SplitByHash(string outDir)
|
|
{
|
|
// Create each of the respective output DATs
|
|
Globals.Logger.User("Creating and populating new DATs");
|
|
|
|
DatFile nodump = Create(Header.CloneStandard());
|
|
nodump.Header.FileName += " (Nodump)";
|
|
nodump.Header.Name += " (Nodump)";
|
|
nodump.Header.Description += " (Nodump)";
|
|
|
|
DatFile sha512 = Create(Header.CloneStandard());
|
|
sha512.Header.FileName += " (SHA-512)";
|
|
sha512.Header.Name += " (SHA-512)";
|
|
sha512.Header.Description += " (SHA-512)";
|
|
|
|
DatFile sha384 = Create(Header.CloneStandard());
|
|
sha384.Header.FileName += " (SHA-384)";
|
|
sha384.Header.Name += " (SHA-384)";
|
|
sha384.Header.Description += " (SHA-384)";
|
|
|
|
DatFile sha256 = Create(Header.CloneStandard());
|
|
sha256.Header.FileName += " (SHA-256)";
|
|
sha256.Header.Name += " (SHA-256)";
|
|
sha256.Header.Description += " (SHA-256)";
|
|
|
|
DatFile sha1 = Create(Header.CloneStandard());
|
|
sha1.Header.FileName += " (SHA-1)";
|
|
sha1.Header.Name += " (SHA-1)";
|
|
sha1.Header.Description += " (SHA-1)";
|
|
|
|
#if NET_FRAMEWORK
|
|
DatFile ripemd160 = Create(Header.CloneStandard());
|
|
ripemd160.Header.FileName += " (RIPEMD160)";
|
|
ripemd160.Header.Name += " (RIPEMD160)";
|
|
ripemd160.Header.Description += " (RIPEMD160)";
|
|
#endif
|
|
|
|
DatFile md5 = Create(Header.CloneStandard());
|
|
md5.Header.FileName += " (MD5)";
|
|
md5.Header.Name += " (MD5)";
|
|
md5.Header.Description += " (MD5)";
|
|
|
|
DatFile crc = Create(Header.CloneStandard());
|
|
crc.Header.FileName += " (CRC)";
|
|
crc.Header.Name += " (CRC)";
|
|
crc.Header.Description += " (CRC)";
|
|
|
|
DatFile other = Create(Header.CloneStandard());
|
|
other.Header.FileName += " (Other)";
|
|
other.Header.Name += " (Other)";
|
|
other.Header.Description += " (Other)";
|
|
|
|
// Now populate each of the DAT objects in turn
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = Items[key];
|
|
foreach (DatItem item in items)
|
|
{
|
|
// If the file is not a Rom or Disk, continue
|
|
if (item.ItemType != ItemType.Disk && item.ItemType != ItemType.Rom)
|
|
return;
|
|
|
|
// If the file is a nodump
|
|
if ((item.ItemType == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|
|
|| (item.ItemType == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
|
|
{
|
|
nodump.Items.Add(key, item);
|
|
}
|
|
// If the file has a SHA-512
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).SHA512))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).SHA512)))
|
|
{
|
|
sha512.Items.Add(key, item);
|
|
}
|
|
// If the file has a SHA-384
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).SHA384))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).SHA384)))
|
|
{
|
|
sha384.Items.Add(key, item);
|
|
}
|
|
// If the file has a SHA-256
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).SHA256))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).SHA256)))
|
|
{
|
|
sha256.Items.Add(key, item);
|
|
}
|
|
// If the file has a SHA-1
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).SHA1))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).SHA1)))
|
|
{
|
|
sha1.Items.Add(key, item);
|
|
}
|
|
#if NET_FRAMEWORK
|
|
// If the file has a RIPEMD160
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).RIPEMD160))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).RIPEMD160)))
|
|
{
|
|
ripemd160.Items.Add(key, item);
|
|
}
|
|
#endif
|
|
// If the file has an MD5
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).MD5))
|
|
|| (item.ItemType == ItemType.Disk && !string.IsNullOrWhiteSpace(((Disk)item).MD5)))
|
|
{
|
|
md5.Items.Add(key, item);
|
|
}
|
|
// If the file has a CRC
|
|
else if ((item.ItemType == ItemType.Rom && !string.IsNullOrWhiteSpace(((Rom)item).CRC)))
|
|
{
|
|
crc.Items.Add(key, item);
|
|
}
|
|
else
|
|
{
|
|
other.Items.Add(key, item);
|
|
}
|
|
}
|
|
});
|
|
|
|
// Now, output all of the files to the output directory
|
|
Globals.Logger.User("DAT information created, outputting new files");
|
|
bool success = true;
|
|
success &= nodump.Write(outDir);
|
|
success &= sha512.Write(outDir);
|
|
success &= sha384.Write(outDir);
|
|
success &= sha256.Write(outDir);
|
|
success &= sha1.Write(outDir);
|
|
#if NET_FRAMEWORK
|
|
success &= ripemd160.Write(outDir);
|
|
#endif
|
|
success &= md5.Write(outDir);
|
|
success &= crc.Write(outDir);
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Split a SuperDAT by lowest available directory level
|
|
/// </summary>
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
/// <param name="shortname">True if short names should be used, false otherwise</param>
|
|
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
|
public bool SplitByLevel(string outDir, bool shortname, bool basedat)
|
|
{
|
|
// First, bucket by games so that we can do the right thing
|
|
Items.BucketBy(BucketedBy.Game, DedupeType.None, lower: false, norename: true);
|
|
|
|
// Create a temporary DAT to add things to
|
|
DatFile tempDat = Create(Header);
|
|
tempDat.Header.Name = null;
|
|
|
|
// Sort the input keys
|
|
List<string> keys = Items.Keys.ToList();
|
|
keys.Sort(SplitByLevelSort);
|
|
|
|
// Then, we loop over the games
|
|
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
|
{
|
|
// Here, the key is the name of the game to be used for comparison
|
|
if (tempDat.Header.Name != null && tempDat.Header.Name != Path.GetDirectoryName(key))
|
|
{
|
|
// Reset the DAT for the next items
|
|
tempDat = Create(Header);
|
|
tempDat.Header.Name = null;
|
|
}
|
|
|
|
// Clean the input list and set all games to be pathless
|
|
List<DatItem> items = Items[key];
|
|
items.ForEach(item => item.MachineName = Path.GetFileName(item.MachineName));
|
|
items.ForEach(item => item.MachineDescription = Path.GetFileName(item.MachineDescription));
|
|
|
|
// Now add the game to the output DAT
|
|
tempDat.Items.AddRange(key, items);
|
|
|
|
// Then set the DAT name to be the parent directory name
|
|
tempDat.Header.Name = Path.GetDirectoryName(key);
|
|
});
|
|
|
|
return true;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Helper function for SplitByLevel to sort the input game names
|
|
/// </summary>
|
|
/// <param name="a">First string to compare</param>
|
|
/// <param name="b">Second string to compare</param>
|
|
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
|
|
private int SplitByLevelSort(string a, string b)
|
|
{
|
|
NaturalComparer nc = new NaturalComparer();
|
|
int adeep = a.Count(c => c == '/' || c == '\\');
|
|
int bdeep = b.Count(c => c == '/' || c == '\\');
|
|
|
|
if (adeep == bdeep)
|
|
return nc.Compare(a, b);
|
|
|
|
return adeep - bdeep;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Helper function for SplitByLevel to clean and write out a DAT
|
|
/// </summary>
|
|
/// <param name="newDatFile">DAT to clean and write out</param>
|
|
/// <param name="outDir">Directory to write out to</param>
|
|
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
|
|
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
|
|
private void SplitByLevelHelper(DatFile newDatFile, string outDir, bool shortname, bool restore)
|
|
{
|
|
// Get the name from the DAT to use separately
|
|
string name = newDatFile.Header.Name;
|
|
string expName = name.Replace("/", " - ").Replace("\\", " - ");
|
|
|
|
// Now set the new output values
|
|
newDatFile.Header.FileName = WebUtility.HtmlDecode(string.IsNullOrWhiteSpace(name)
|
|
? Header.FileName
|
|
: (shortname
|
|
? Path.GetFileName(name)
|
|
: expName
|
|
)
|
|
);
|
|
newDatFile.Header.FileName = (restore ? $"{Header.FileName} ({newDatFile.Header.FileName})" : newDatFile.Header.FileName);
|
|
newDatFile.Header.Name = $"{Header.Name} ({expName})";
|
|
newDatFile.Header.Description = (string.IsNullOrWhiteSpace(Header.Description) ? newDatFile.Header.Name : $"{Header.Description} ({expName})");
|
|
newDatFile.Header.Type = null;
|
|
|
|
// Write out the temporary DAT to the proper directory
|
|
newDatFile.Write(outDir);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Split a DAT by size of Rom
|
|
/// </summary>
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
/// <param name="radix">Long value representing the split point</param>
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
|
public bool SplitBySize(string outDir, long radix)
|
|
{
|
|
// Create each of the respective output DATs
|
|
Globals.Logger.User("Creating and populating new DATs");
|
|
|
|
DatFile lessDat = Create(Header.CloneStandard());
|
|
lessDat.Header.FileName += $" (less than {radix})";
|
|
lessDat.Header.Name += $" (less than {radix})";
|
|
lessDat.Header.Description += $" (less than {radix})";
|
|
|
|
DatFile greaterEqualDat = Create(Header.CloneStandard());
|
|
greaterEqualDat.Header.FileName += $" (equal-greater than {radix})";
|
|
greaterEqualDat.Header.Name += $" (equal-greater than {radix})";
|
|
greaterEqualDat.Header.Description += $" (equal-greater than {radix})";
|
|
|
|
// Now populate each of the DAT objects in turn
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = Items[key];
|
|
foreach (DatItem item in items)
|
|
{
|
|
// If the file is not a Rom, it automatically goes in the "lesser" dat
|
|
if (item.ItemType != ItemType.Rom)
|
|
lessDat.Items.Add(key, item);
|
|
|
|
// If the file is a Rom and less than the radix, put it in the "lesser" dat
|
|
else if (item.ItemType == ItemType.Rom && ((Rom)item).Size < radix)
|
|
lessDat.Items.Add(key, item);
|
|
|
|
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
|
|
else if (item.ItemType == ItemType.Rom && ((Rom)item).Size >= radix)
|
|
greaterEqualDat.Items.Add(key, item);
|
|
}
|
|
});
|
|
|
|
// Now, output all of the files to the output directory
|
|
Globals.Logger.User("DAT information created, outputting new files");
|
|
bool success = true;
|
|
success &= lessDat.Write(outDir);
|
|
success &= greaterEqualDat.Write(outDir);
|
|
|
|
return success;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Split a DAT by type of DatItem
|
|
/// </summary>
|
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
|
/// <returns>True if split succeeded, false otherwise</returns>
|
|
public bool SplitByType(string outDir)
|
|
{
|
|
// Create each of the respective output DATs
|
|
Globals.Logger.User("Creating and populating new DATs");
|
|
|
|
DatFile romdat = Create(Header.CloneStandard());
|
|
romdat.Header.FileName += " (ROM)";
|
|
romdat.Header.Name += " (ROM)";
|
|
romdat.Header.Description += " (ROM)";
|
|
|
|
DatFile diskdat = Create(Header.CloneStandard());
|
|
diskdat.Header.FileName += " (Disk)";
|
|
diskdat.Header.Name += " (Disk)";
|
|
diskdat.Header.Description += " (Disk)";
|
|
|
|
DatFile sampledat = Create(Header.CloneStandard());
|
|
sampledat.Header.FileName += " (Sample)";
|
|
sampledat.Header.Name += " (Sample)";
|
|
sampledat.Header.Description += " (Sample)";
|
|
|
|
// Now populate each of the DAT objects in turn
|
|
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
|
{
|
|
List<DatItem> items = Items[key];
|
|
foreach (DatItem item in items)
|
|
{
|
|
// If the file is a Rom
|
|
if (item.ItemType == ItemType.Rom)
|
|
romdat.Items.Add(key, item);
|
|
|
|
// If the file is a Disk
|
|
else if (item.ItemType == ItemType.Disk)
|
|
diskdat.Items.Add(key, item);
|
|
|
|
// If the file is a Sample
|
|
else if (item.ItemType == ItemType.Sample)
|
|
sampledat.Items.Add(key, item);
|
|
}
|
|
});
|
|
|
|
// Now, output all of the files to the output directory
|
|
Globals.Logger.User("DAT information created, outputting new files");
|
|
bool success = true;
|
|
success &= romdat.Write(outDir);
|
|
success &= diskdat.Write(outDir);
|
|
success &= sampledat.Write(outDir);
|
|
|
|
return success;
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region Writing
|
|
|
|
/// <summary>
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
/// </summary>
|
|
/// <param name="outDir">Set the output directory (current directory on null)</param>
|
|
/// <param name="norename">True if games should only be compared on game and file name (default), false if system and source are counted</param>
|
|
/// <param name="stats">True if DAT statistics should be output on write, false otherwise (default)</param>
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
/// <param name="overwrite">True if files should be overwritten (default), false if they should be renamed instead</param>
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
|
public bool Write(string outDir, bool norename = true, bool stats = false, bool ignoreblanks = false, bool overwrite = true)
|
|
{
|
|
// If there's nothing there, abort
|
|
if (Items.TotalCount == 0)
|
|
{
|
|
Globals.Logger.User("There were no items to write out!");
|
|
return false;
|
|
}
|
|
|
|
// Ensure the output directory is set and created
|
|
outDir = DirectoryExtensions.Ensure(outDir, create: true);
|
|
|
|
// If the DAT has no output format, default to XML
|
|
if (Header.DatFormat == 0)
|
|
{
|
|
Globals.Logger.Verbose("No DAT format defined, defaulting to XML");
|
|
Header.DatFormat = DatFormat.Logiqx;
|
|
}
|
|
|
|
// Make sure that the three essential fields are filled in
|
|
if (string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.FileName = Header.Name = Header.Description = "Default";
|
|
}
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.FileName = Header.Name = Header.Description;
|
|
}
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.FileName = Header.Description = Header.Name;
|
|
}
|
|
else if (string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.FileName = Header.Description;
|
|
}
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.Name = Header.Description = Header.FileName;
|
|
}
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.Name = Header.Description;
|
|
}
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
Header.Description = Header.Name;
|
|
}
|
|
else if (!string.IsNullOrWhiteSpace(Header.FileName) && !string.IsNullOrWhiteSpace(Header.Name) && !string.IsNullOrWhiteSpace(Header.Description))
|
|
{
|
|
// Nothing is needed
|
|
}
|
|
|
|
// Output initial statistics, for kicks
|
|
if (stats)
|
|
{
|
|
if (Items.RomCount + Items.DiskCount == 0)
|
|
Items.RecalculateStats();
|
|
|
|
Items.BucketBy(BucketedBy.Game, DedupeType.None, norename: true);
|
|
|
|
var consoleOutput = BaseReport.Create(StatReportFormat.None, null, true, true);
|
|
consoleOutput.ReplaceStatistics(Header.FileName, Items.Keys.Count(), Items);
|
|
}
|
|
|
|
// Bucket and dedupe according to the flag
|
|
if (Header.DedupeRoms == DedupeType.Full)
|
|
Items.BucketBy(BucketedBy.CRC, Header.DedupeRoms, norename: norename);
|
|
else if (Header.DedupeRoms == DedupeType.Game)
|
|
Items.BucketBy(BucketedBy.Game, Header.DedupeRoms, norename: norename);
|
|
|
|
// Bucket roms by game name, if not already
|
|
Items.BucketBy(BucketedBy.Game, DedupeType.None, norename: norename);
|
|
|
|
// Output the number of items we're going to be writing
|
|
Globals.Logger.User($"A total of {Items.TotalCount} items will be written out to '{Header.FileName}'");
|
|
|
|
// Get the outfile names
|
|
Dictionary<DatFormat, string> outfiles = Header.CreateOutFileNames(outDir, overwrite);
|
|
|
|
try
|
|
{
|
|
// Write out all required formats
|
|
Parallel.ForEach(outfiles.Keys, Globals.ParallelOptions, datFormat =>
|
|
{
|
|
string outfile = outfiles[datFormat];
|
|
try
|
|
{
|
|
Create(datFormat, this)?.WriteToFile(outfile, ignoreblanks);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Globals.Logger.Error($"Datfile {outfile} could not be written out: {ex}");
|
|
}
|
|
|
|
});
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Globals.Logger.Error(ex.ToString());
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Create and open an output file for writing direct from a dictionary
|
|
/// </summary>
|
|
/// <param name="outfile">Name of the file to write to</param>
|
|
/// <param name="ignoreblanks">True if blank roms should be skipped on output, false otherwise (default)</param>
|
|
/// <returns>True if the DAT was written correctly, false otherwise</returns>
|
|
public abstract bool WriteToFile(string outfile, bool ignoreblanks = false);
|
|
|
|
/// <summary>
|
|
/// Process an item and correctly set the item name
|
|
/// </summary>
|
|
/// <param name="item">DatItem to update</param>
|
|
/// <param name="forceRemoveQuotes">True if the Quotes flag should be ignored, false otherwise</param>
|
|
/// <param name="forceRomName">True if the UseRomName should be always on (default), false otherwise</param>
|
|
protected void ProcessItemName(DatItem item, bool forceRemoveQuotes, bool forceRomName = true)
|
|
{
|
|
string name = item.Name;
|
|
|
|
// Backup relevant values and set new ones accordingly
|
|
bool quotesBackup = Header.Quotes;
|
|
bool useRomNameBackup = Header.UseRomName;
|
|
if (forceRemoveQuotes)
|
|
Header.Quotes = false;
|
|
|
|
if (forceRomName)
|
|
Header.UseRomName = true;
|
|
|
|
// Create the proper Prefix and Postfix
|
|
string pre = CreatePrefixPostfix(item, true);
|
|
string post = CreatePrefixPostfix(item, false);
|
|
|
|
// If we're in Romba mode, take care of that instead
|
|
if (Header.Romba)
|
|
{
|
|
if (item.ItemType == ItemType.Rom)
|
|
{
|
|
Rom romItem = item as Rom;
|
|
|
|
// We can only write out if there's a SHA-1
|
|
if (!string.IsNullOrWhiteSpace(romItem.SHA1))
|
|
{
|
|
name = PathExtensions.GetRombaPath(romItem.SHA1).Replace('\\', '/');
|
|
item.Name = $"{pre}{name}{post}";
|
|
}
|
|
}
|
|
else if (item.ItemType == ItemType.Disk)
|
|
{
|
|
Disk diskItem = item as Disk;
|
|
|
|
// We can only write out if there's a SHA-1
|
|
if (!string.IsNullOrWhiteSpace(diskItem.SHA1))
|
|
{
|
|
name = PathExtensions.GetRombaPath(diskItem.SHA1).Replace('\\', '/');
|
|
item.Name = pre + name + post;
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
if (!string.IsNullOrWhiteSpace(Header.ReplaceExtension) || Header.RemoveExtension)
|
|
{
|
|
if (Header.RemoveExtension)
|
|
Header.ReplaceExtension = string.Empty;
|
|
|
|
string dir = Path.GetDirectoryName(name);
|
|
dir = dir.TrimStart(Path.DirectorySeparatorChar);
|
|
name = Path.Combine(dir, Path.GetFileNameWithoutExtension(name) + Header.ReplaceExtension);
|
|
}
|
|
|
|
if (!string.IsNullOrWhiteSpace(Header.AddExtension))
|
|
name += Header.AddExtension;
|
|
|
|
if (Header.UseRomName && Header.GameName)
|
|
name = Path.Combine(item.MachineName, name);
|
|
|
|
// Now assign back the item name
|
|
item.Name = pre + name + post;
|
|
|
|
// Restore all relevant values
|
|
if (forceRemoveQuotes)
|
|
Header.Quotes = quotesBackup;
|
|
|
|
if (forceRomName)
|
|
Header.UseRomName = useRomNameBackup;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Create a prefix or postfix from inputs
|
|
/// </summary>
|
|
/// <param name="item">DatItem to create a prefix/postfix for</param>
|
|
/// <param name="prefix">True for prefix, false for postfix</param>
|
|
/// <returns>Sanitized string representing the postfix or prefix</returns>
|
|
protected string CreatePrefixPostfix(DatItem item, bool prefix)
|
|
{
|
|
// Initialize strings
|
|
string fix = string.Empty,
|
|
game = item.MachineName,
|
|
name = item.Name,
|
|
manufacturer = item.Manufacturer,
|
|
publisher = item.Publisher,
|
|
category = item.Category,
|
|
crc = string.Empty,
|
|
md5 = string.Empty,
|
|
ripemd160 = string.Empty,
|
|
sha1 = string.Empty,
|
|
sha256 = string.Empty,
|
|
sha384 = string.Empty,
|
|
sha512 = string.Empty,
|
|
size = string.Empty;
|
|
|
|
// If we have a prefix
|
|
if (prefix)
|
|
fix = Header.Prefix + (Header.Quotes ? "\"" : string.Empty);
|
|
|
|
// If we have a postfix
|
|
else
|
|
fix = (Header.Quotes ? "\"" : string.Empty) + Header.Postfix;
|
|
|
|
// Ensure we have the proper values for replacement
|
|
if (item.ItemType == ItemType.Rom)
|
|
{
|
|
crc = ((Rom)item).CRC;
|
|
md5 = ((Rom)item).MD5;
|
|
#if NET_FRAMEWORK
|
|
ripemd160 = ((Rom)item).RIPEMD160;
|
|
#endif
|
|
sha1 = ((Rom)item).SHA1;
|
|
sha256 = ((Rom)item).SHA256;
|
|
sha384 = ((Rom)item).SHA384;
|
|
sha512 = ((Rom)item).SHA512;
|
|
size = ((Rom)item).Size.ToString();
|
|
}
|
|
else if (item.ItemType == ItemType.Disk)
|
|
{
|
|
md5 = ((Disk)item).MD5;
|
|
#if NET_FRAMEWORK
|
|
ripemd160 = ((Disk)item).RIPEMD160;
|
|
#endif
|
|
sha1 = ((Disk)item).SHA1;
|
|
sha256 = ((Disk)item).SHA256;
|
|
sha384 = ((Disk)item).SHA384;
|
|
sha512 = ((Disk)item).SHA512;
|
|
}
|
|
|
|
// Now do bulk replacement where possible
|
|
fix = fix
|
|
.Replace("%game%", game)
|
|
.Replace("%machine%", game)
|
|
.Replace("%name%", name)
|
|
.Replace("%manufacturer%", manufacturer)
|
|
.Replace("%publisher%", publisher)
|
|
.Replace("%category%", category)
|
|
.Replace("%crc%", crc)
|
|
.Replace("%md5%", md5)
|
|
.Replace("%ripemd160%", ripemd160)
|
|
.Replace("%sha1%", sha1)
|
|
.Replace("%sha256%", sha256)
|
|
.Replace("%sha384%", sha384)
|
|
.Replace("%sha512%", sha512)
|
|
.Replace("%size%", size);
|
|
|
|
// TODO: Add GameName logic here too?
|
|
|
|
return fix;
|
|
}
|
|
|
|
#endregion
|
|
|
|
#endregion // Instance Methods
|
|
}
|
|
}
|