using System;
using System.Collections.Generic;
using SabreTools.Library.Data;
using SabreTools.Library.DatFiles;
using SabreTools.Library.Tools;
#if MONO
using System.IO;
#else
using Alphaleonis.Win32.Filesystem;
using SearchOption = System.IO.SearchOption;
#endif
namespace SabreTools
{
public partial class SabreTools
{
#region Init Methods
///
/// Wrap creating a DAT file from files or a directory in parallel
///
/// List of input filenames
/// /* Normal DAT header info */
/// New filename
/// New name
/// New description
/// New category
/// New version
/// New author
/// New email
/// New homepage
/// New URL
/// New comment
/// String representing the forcepacking flag
/// True if cloneof, romof, and sampleof fields should be omitted from output, false otherwise
/// True if scene-named sets have the date stripped from the beginning, false otherwise
/// DatFormat to be used for outputting the DAT
/// /* Standard DFD info */
/// True to enable reading a directory like a Romba depot, false otherwise
/// True to enable SuperDAT-style reading, false otherwise
/// Hash flag saying what hashes should not be calculated
/// True if the date should be omitted from the DAT, false otherwise
/// True if archives should be treated as files, false otherwise
/// Type of files that should be skipped on scan
/// True if blank items should be created for empty folders, false otherwise
/// True if dates should be archived for all files, false otherwise
/// /* Output DAT info */
/// Name of the directory to create a temp folder in (blank is default temp directory)
/// Name of the directory to output the DAT to (blank is the current directory)
/// True if files should be copied to the temp directory before hashing, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// True if CHDs should be treated like regular files, false otherwise
private static void InitDatFromDir(List inputs,
/* Normal DAT header info */
string filename,
string name,
string description,
string category,
string version,
string author,
string email,
string homepage,
string url,
string comment,
string forcepack,
bool excludeOf,
bool sceneDateStrip,
DatFormat datFormat,
/* Standard DFD info */
bool romba,
bool superdat,
Hash omitFromScan,
bool removeDateFromAutomaticName,
bool archivesAsFiles,
SkipFileType skipFileType,
bool addBlankFilesForEmptyFolder,
bool addFileDates,
/* Output DAT info */
string tempDir,
string outDir,
bool copyFiles,
string headerToCheckAgainst,
bool chdsAsFiles)
{
ForcePacking fp = ForcePacking.None;
switch (forcepack?.ToLowerInvariant())
{
case "none":
default:
fp = ForcePacking.None;
break;
case "zip":
fp = ForcePacking.Zip;
break;
case "unzip":
fp = ForcePacking.Unzip;
break;
}
// Create a new DATFromDir object and process the inputs
DatFile basedat = new DatFile
{
FileName = filename,
Name = name,
Description = description,
Category = category,
Version = version,
Date = DateTime.Now.ToString("yyyy-MM-dd"),
Author = author,
Email = email,
Homepage = homepage,
Url = url,
Comment = comment,
ForcePacking = fp,
DatFormat = (datFormat == 0 ? DatFormat.Logiqx : datFormat),
Romba = romba,
ExcludeOf = excludeOf,
SceneDateStrip = sceneDateStrip,
Type = (superdat ? "SuperDAT" : ""),
};
// Clean the temp directory
tempDir = (String.IsNullOrWhiteSpace(tempDir) ? Path.GetTempPath() : tempDir);
// For each input directory, create a DAT
foreach (string path in inputs)
{
if (Directory.Exists(path) || File.Exists(path))
{
// Clone the base Dat for information
DatFile datdata = new DatFile(basedat);
string basePath = Path.GetFullPath(path);
bool success = datdata.PopulateFromDir(basePath, omitFromScan, removeDateFromAutomaticName, archivesAsFiles,
skipFileType, addBlankFilesForEmptyFolder, addFileDates, tempDir, copyFiles, headerToCheckAgainst, chdsAsFiles);
// If it was a success, write the DAT out
if (success)
{
datdata.WriteToFile(outDir);
}
// Otherwise, show the help
else
{
Console.WriteLine();
_help.OutputIndividualFeature("DATFromDir");
}
}
}
}
///
/// Wrap extracting headers
///
/// Input file or folder names
/// Output directory to write new files to, blank defaults to rom folder
/// True if headers should not be stored in the database, false otherwise
private static void InitExtractRemoveHeader(List inputs, string outDir, bool nostore)
{
// Get only files from the inputs
List files = Utilities.GetOnlyFilesFromInputs(inputs);
foreach (string file in files)
{
Utilities.DetectSkipperAndTransform(file, outDir, nostore);
}
}
///
/// Wrap replacing headers
///
/// Input file or folder names
/// Output directory to write new files to, blank defaults to rom folder
private static void InitReplaceHeader(List inputs, string outDir)
{
// Get only files from the inputs
List files = Utilities.GetOnlyFilesFromInputs(inputs);
foreach (string file in files)
{
Utilities.RestoreHeader(file, outDir);
}
}
///
/// Wrap sorting files using an input DAT
///
/// Names of the DATs to compare against
/// List of input files/folders to check
/// Output directory to use to build to
/// True if the input direcories are treated as romba depots, false otherwise
/// True to enable external scanning of archives, false otherwise
/// True if the date from the DAT should be used if available, false otherwise
/// True if input files should be deleted, false otherwise
/// True if the DAT should be used as a filter instead of a template, false otherwise
/// Output format that files should be written to
/// True if files should be output in Romba depot folders, false otherwise
/// Integer representing the archive handling level for 7z
/// Integer representing the archive handling level for GZip
/// Integer representing the archive handling level for RAR
/// Integer representing the archive handling level for Zip
/// True if the updated DAT should be output, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// Type of the split that should be performed (split, merged, fully merged)
/// True if CHDs should be treated like regular files, false otherwise
private static void InitSort(List datfiles, List inputs, string outDir, bool depot, bool quickScan, bool date, bool delete,
bool inverse, OutputFormat outputFormat, bool romba, int sevenzip, int gz, int rar, int zip, bool updateDat, string headerToCheckAgainst,
SplitType splitType, bool chdsAsFiles)
{
// Get the archive scanning level
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(sevenzip, gz, rar, zip);
// Get a list of files from the input datfiles
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
}
watch.Stop();
// If we have the depot flag, repsect it
if (depot)
{
datdata.RebuildDepot(inputs, outDir, date, delete, inverse, outputFormat, romba,
updateDat, headerToCheckAgainst);
}
else
{
datdata.RebuildGeneric(inputs, outDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
updateDat, headerToCheckAgainst, chdsAsFiles);
}
}
///
/// Wrap splitting a DAT by any known type
///
/// List of inputs to be used
/// Output directory for the split files
/// True if files should be written to the source folders, false otherwise
/// Type of split to perform, if any
/// First extension to split on (Extension Split only)
/// Second extension to split on (Extension Split only)
/// True if short filenames should be used, false otherwise (Level Split only)
/// True if original filenames should be used as the base for output filename, false otherwise (Level Split only)
private static void InitSplit(List inputs, string outDir, bool inplace, ExternalSplitType splitType,
List exta, List extb, bool shortname, bool basedat)
{
// If we somehow have the "none" split type, return
if (splitType == ExternalSplitType.None)
{
return;
}
// Get only files from the inputs
List files = Utilities.GetOnlyFilesFromInputs(inputs, appendparent: true);
// Loop over the input files
foreach (string file in files)
{
// Split the input filename
string[] splitpath = file.Split('¬');
// Create and fill the new DAT
DatFile datFile = new DatFile();
datFile.Parse(splitpath[0], 0, 0);
// Get the output directory
outDir = Utilities.GetOutputPath(outDir, file, inplace, splitpath: true);
// Split and write the DAT
switch (splitType)
{
case ExternalSplitType.Extension:
datFile.SplitByExtension(outDir, inplace, exta, extb);
break;
case ExternalSplitType.Hash:
datFile.SplitByHash(outDir, inplace);
break;
case ExternalSplitType.Level:
datFile.SplitByLevel(outDir, inplace, shortname, basedat);
break;
case ExternalSplitType.Type:
datFile.SplitByType(outDir, inplace);
break;
}
}
}
///
/// Wrap getting statistics on a DAT or folder of DATs
///
/// List of inputs to be used
/// Name of the file to output to, blank for default
/// Output directory for the report files
/// True to show individual DAT statistics, false otherwise
/// True if baddumps should be included in output, false otherwise
/// True if nodumps should be included in output, false otherwise
/// Set the statistics output format to use
private static void InitStats(List inputs, string filename, string outDir, bool single, bool baddumpCol, bool nodumpCol,
StatReportFormat statDatFormat)
{
DatFile.OutputStats(inputs, filename, outDir, single, baddumpCol, nodumpCol, statDatFormat);
}
///
/// Wrap converting and updating DAT file from any format to any format
///
/// List of input filenames
/// List of base filenames
/// /* Normal DAT header info */
/// New filename
/// New name
/// New description
/// New rootdir
/// New category
/// New version
/// New date
/// New author
/// New email
/// New homepage
/// New URL
/// New comment
/// New header
/// True to set SuperDAT type, false otherwise
/// None, Split, Full
/// None, Obsolete, Required, Ignore
/// None, Zip, Unzip
/// True if cloneof, romof, and sampleof fields should be omitted from output, false otherwise
/// True if scene-named sets have the date stripped from the beginning, false otherwise
/// Non-zero flag for output format, zero otherwise for default
/// /* Missfile-specific DAT info */
/// True if games are to be used in output, false if roms are
/// Generic prefix to be added to each line
/// Generic postfix to be added to each line
/// Add quotes to each item
/// Replace all extensions with another
/// Add an extension to all items
/// Remove all extensions
/// Add the dat name as a directory prefix
/// Output files in romba format
/// /* Merging and Diffing info */
/// True if input files should be merged into a single file, false otherwise
/// Non-zero flag for diffing mode, zero otherwise
/// True if the cascade-diffed files should overwrite their inputs, false otherwise
/// True if the first cascaded diff file should be skipped on output, false otherwise
/// True if the date should not be appended to the default name, false otherwise [OBSOLETE]
/// /* Filtering info */
/// Pre-populated filter object for DAT filtering
/// True if the outputs should be created in 1G1R mode, false otherwise
/// List of regions in the order they should be used, blank for default
/// /* Trimming info */
/// Type of the split that should be performed (split, merged, fully merged)
/// True if we are supposed to trim names to NTFS length, false otherwise
/// True if all games should be replaced by '!', false otherwise
/// String representing root directory to compare against for length calculation
/// /* Output DAT info */
/// Optional param for output directory
/// True to clean the game names to WoD standard, false otherwise (default)
/// True if we should remove non-ASCII characters from output, false otherwise (default)
/// True if descriptions should be used as names, false otherwise (default)
/// Dedupe type to use for DAT processing
/// StripHash that represents the hash(es) that you want to remove from the output
private static void InitUpdate(
List inputPaths,
List basePaths,
/* Normal DAT header info */
string filename,
string name,
string description,
string rootdir,
string category,
string version,
string date,
string author,
string email,
string homepage,
string url,
string comment,
string header,
bool superdat,
string forcemerge,
string forcend,
string forcepack,
bool excludeOf,
bool sceneDateStrip,
DatFormat datFormat,
/* Missfile-specific DAT info */
bool usegame,
string prefix,
string postfix,
bool quotes,
string repext,
string addext,
bool remext,
bool datprefix,
bool romba,
/* Merging and Diffing info */
bool merge,
UpdateMode diffMode,
bool inplace,
bool skip,
bool bare,
/* Filtering info */
Filter filter,
bool oneGameOneRegion,
List regions,
/* Trimming info */
SplitType splitType,
bool trim,
bool single,
string root,
/* Output DAT info */
string outDir,
bool clean,
bool remUnicode,
bool descAsName,
DedupeType dedup,
Hash stripHash)
{
// Set the special flags
ForceMerging fm = ForceMerging.None;
if (!String.IsNullOrWhiteSpace(forcemerge))
{
switch (forcemerge.ToLowerInvariant())
{
case "none":
fm = ForceMerging.None;
break;
case "split":
fm = ForceMerging.Split;
break;
case "full":
fm = ForceMerging.Full;
break;
default:
Globals.Logger.Warning("{0} is not a valid merge flag", forcemerge);
break;
}
}
ForceNodump fn = ForceNodump.None;
if (!String.IsNullOrWhiteSpace(forcend))
{
switch (forcend.ToLowerInvariant())
{
case "none":
fn = ForceNodump.None;
break;
case "obsolete":
fn = ForceNodump.Obsolete;
break;
case "required":
fn = ForceNodump.Required;
break;
case "ignore":
fn = ForceNodump.Ignore;
break;
default:
Globals.Logger.Warning("{0} is not a valid nodump flag", forcend);
break;
}
}
ForcePacking fp = ForcePacking.None;
if (!String.IsNullOrWhiteSpace(forcepack))
{
switch (forcepack.ToLowerInvariant())
{
case "none":
fp = ForcePacking.None;
break;
case "zip":
fp = ForcePacking.Zip;
break;
case "unzip":
fp = ForcePacking.Unzip;
break;
default:
Globals.Logger.Warning("{0} is not a valid packing flag", forcepack);
break;
}
}
// Set the 1G1R regions alphabetically if not already set
if (regions == null || regions.Count == 0)
{
regions = new List()
{
"australia",
"canada",
"china",
"denmark",
"europe",
"finland",
"france",
"germany",
"greece",
"italy",
"japan",
"korea",
"netherlands",
"norway",
"russia",
"spain",
"sweden",
"usa",
"usa, australia",
"usa, europe",
"world",
};
}
// Normalize the extensions
addext = (addext == "" || addext.StartsWith(".") ? addext : "." + addext);
repext = (repext == "" || repext.StartsWith(".") ? repext : "." + repext);
// If we're in merge or diff mode and the names aren't set, set defaults
if (merge || diffMode != 0)
{
// Get the values that will be used
if (date == "")
{
date = DateTime.Now.ToString("yyyy-MM-dd");
}
if (name == "")
{
name = (diffMode != 0 ? "DiffDAT" : "MergeDAT") + (superdat ? "-SuperDAT" : "") + (dedup != DedupeType.None ? "-deduped" : "");
}
if (description == "")
{
description = (diffMode != 0 ? "DiffDAT" : "MergeDAT") + (superdat ? "-SuperDAT" : "") + (dedup != DedupeType.None ? " - deduped" : "");
if (!bare)
{
description += " (" + date + ")";
}
}
if (category == "" && diffMode != 0)
{
category = "DiffDAT";
}
if (author == "")
{
author = "SabreTools";
}
}
// Populate the DatData object
DatFile userInputDat = new DatFile
{
FileName = filename,
Name = name,
Description = description,
RootDir = rootdir,
Category = category,
Version = version,
Date = date,
Author = author,
Email = email,
Homepage = homepage,
Url = url,
Comment = comment,
Header = header,
Type = (superdat ? "SuperDAT" : null),
ForceMerging = fm,
ForceNodump = fn,
ForcePacking = fp,
DedupeRoms = dedup,
ExcludeOf = excludeOf,
SceneDateStrip = sceneDateStrip,
DatFormat = datFormat,
StripHash = stripHash,
OneGameOneRegion = oneGameOneRegion,
Regions = regions,
UseGame = usegame,
Prefix = prefix,
Postfix = postfix,
Quotes = quotes,
RepExt = repext,
AddExt = addext,
RemExt = remext,
GameName = datprefix,
Romba = romba,
};
userInputDat.DetermineUpdateType(inputPaths, basePaths, outDir, merge, diffMode, inplace, skip, bare, clean,
remUnicode, descAsName, filter, splitType, trim, single, root);
}
///
/// Wrap verifying files using an input DAT
///
/// Names of the DATs to compare against
/// Input directories to compare against
/// True if the input direcories are treated as romba depots, false otherwise
/// True if only hashes should be checked, false for full file information
/// True to enable external scanning of archives, false otherwise
/// Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise
/// Type of the split that should be performed (split, merged, fully merged)
/// True if CHDs should be treated like regular files, false otherwise
private static void InitVerify(List datfiles, List inputs, bool depot, bool hashOnly, bool quickScan,
string headerToCheckAgainst, SplitType splitType, bool chdsAsFiles)
{
// Get the archive scanning level
ArchiveScanLevel asl = Utilities.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
// Get a list of files from the input datfiles
datfiles = Utilities.GetOnlyFilesFromInputs(datfiles);
InternalStopwatch watch = new InternalStopwatch("Populating internal DAT");
// Add all of the input DATs into one huge internal DAT
DatFile datdata = new DatFile();
foreach (string datfile in datfiles)
{
datdata.Parse(datfile, 99, 99, splitType, keep: true, useTags: true);
}
watch.Stop();
// If we have the depot flag, repsect it
if (depot)
{
datdata.VerifyDepot(inputs, headerToCheckAgainst);
}
else
{
datdata.VerifyGeneric(inputs, hashOnly, quickScan, headerToCheckAgainst, chdsAsFiles);
}
}
#endregion
}
}