mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[DatFile] Add and fix sort from depot
This commit is contained in:
@@ -174,7 +174,7 @@ namespace SabreTools
|
|||||||
|
|
||||||
// Create the sorting object to use and rebuild the needed files
|
// Create the sorting object to use and rebuild the needed files
|
||||||
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers((onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1));
|
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers((onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1), (onlyNeeded ? 0 : 1));
|
||||||
need.RebuildToOutput(onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false /*quickScan*/, false /*date*/,
|
need.RebuildFromInputs(onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false /*quickScan*/, false /*date*/,
|
||||||
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/,
|
false /*delete*/, false /*inverse*/, OutputFormat.TorrentGzip, true /*romba*/, asl, false /*updateDat*/,
|
||||||
null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
|
null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
|
||||||
}
|
}
|
||||||
@@ -214,8 +214,8 @@ namespace SabreTools
|
|||||||
|
|
||||||
// Now scan all of those depots and rebuild
|
// Now scan all of those depots and rebuild
|
||||||
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
|
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(1, 1, 1, 1);
|
||||||
datFile.RebuildToOutput(onlineDepots, outputFolder, _tmpdir, true /*quickscan*/, false /*date*/,
|
datFile.RebuildFromDepot(onlineDepots, outputFolder, _tmpdir, false /*date*/,
|
||||||
false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy, asl,
|
false /*delete*/, false /*inverse*/, (copy ? OutputFormat.TorrentGzip : OutputFormat.TorrentZip), copy,
|
||||||
false /*updateDat*/, null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
|
false /*updateDat*/, null /*headerToCheckAgainst*/, 4 /*maxDegreeOfParallelism*/, _logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -238,6 +238,26 @@ namespace SabreTools.Helper.Data
|
|||||||
helptext.Add(" -ud, --update-dat Output updated DAT to output directory");
|
helptext.Add(" -ud, --update-dat Output updated DAT to output directory");
|
||||||
helptext.Add(" -mt={4} Amount of threads to use (-1 unlimted)");
|
helptext.Add(" -mt={4} Amount of threads to use (-1 unlimted)");
|
||||||
|
|
||||||
|
// Sort Depot
|
||||||
|
helptext.Add(" -ssd, --sort Sort input files by a set of DATs");
|
||||||
|
helptext.Add(" -dat= Input DAT to rebuild against");
|
||||||
|
helptext.Add(" -out= Output directory");
|
||||||
|
helptext.Add(" -t=, --temp= Set the temporary directory to use");
|
||||||
|
helptext.Add(" -del, --delete Delete fully rebuilt input files");
|
||||||
|
helptext.Add(" -in, --inverse Rebuild only files not in DAT");
|
||||||
|
helptext.Add(" -ad, --add-date Add original dates from DAT, if possible");
|
||||||
|
//helptext.Add(" -t7z Enable Torrent7z output");
|
||||||
|
helptext.Add(" -tar Enable TAR output");
|
||||||
|
helptext.Add(" -tgz Enable TorrentGZ output");
|
||||||
|
helptext.Add(" -r, --romba Enable Romba depot dir output");
|
||||||
|
//helptext.Add(" -tlrz Enable TorrentLRZ output");
|
||||||
|
//helptext.Add(" -trar Enable TorrentRAR output");
|
||||||
|
//helptext.Add(" -txz Enable TorrentXZ output");
|
||||||
|
helptext.Add(" -tzip Enable TorrentZip output");
|
||||||
|
helptext.Add(" -h=, --header= Set a header skipper to use, blank means all");
|
||||||
|
helptext.Add(" -ud, --update-dat Output updated DAT to output directory");
|
||||||
|
helptext.Add(" -mt={4} Amount of threads to use (-1 unlimted)");
|
||||||
|
|
||||||
// Stats
|
// Stats
|
||||||
helptext.Add(" -st, --stats Get statistics on all input DATs");
|
helptext.Add(" -st, --stats Get statistics on all input DATs");
|
||||||
helptext.Add(" -bc, --baddump-col Add baddump stats to output");
|
helptext.Add(" -bc, --baddump-col Add baddump stats to output");
|
||||||
|
|||||||
@@ -248,10 +248,15 @@ namespace SabreTools.Helper.Dats
|
|||||||
|
|
||||||
// Get the correct dictionary based on what is available
|
// Get the correct dictionary based on what is available
|
||||||
string key = "";
|
string key = "";
|
||||||
if (_itemType == ItemType.Rom && ((Rom)this).CRC != null)
|
if (_itemType == ItemType.Rom && ((Rom)this).SHA1 != null)
|
||||||
{
|
{
|
||||||
key = ((Rom)this).CRC;
|
key = ((Rom)this).SHA1;
|
||||||
datdata.BucketByCRC(false, logger, false);
|
datdata.BucketBySHA1(false, logger, false);
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk && ((Disk)this).SHA1 != null)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA1;
|
||||||
|
datdata.BucketBySHA1(false, logger, false);
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom && ((Rom)this).MD5 != null)
|
else if (_itemType == ItemType.Rom && ((Rom)this).MD5 != null)
|
||||||
{
|
{
|
||||||
@@ -263,15 +268,10 @@ namespace SabreTools.Helper.Dats
|
|||||||
key = ((Disk)this).MD5;
|
key = ((Disk)this).MD5;
|
||||||
datdata.BucketByMD5(false, logger, false);
|
datdata.BucketByMD5(false, logger, false);
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom && ((Rom)this).SHA1 != null)
|
else if (_itemType == ItemType.Rom && ((Rom)this).CRC != null)
|
||||||
{
|
{
|
||||||
key = ((Rom)this).SHA1;
|
key = ((Rom)this).CRC;
|
||||||
datdata.BucketBySHA1(false, logger, false);
|
datdata.BucketByCRC(false, logger, false);
|
||||||
}
|
|
||||||
else if (_itemType == ItemType.Disk && ((Disk)this).SHA1 != null)
|
|
||||||
{
|
|
||||||
key = ((Disk)this).SHA1;
|
|
||||||
datdata.BucketBySHA1(false, logger, false);
|
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom)
|
else if (_itemType == ItemType.Rom)
|
||||||
{
|
{
|
||||||
@@ -323,10 +323,15 @@ namespace SabreTools.Helper.Dats
|
|||||||
|
|
||||||
// Get the correct dictionary based on what is available
|
// Get the correct dictionary based on what is available
|
||||||
string key = "";
|
string key = "";
|
||||||
if (_itemType == ItemType.Rom && ((Rom)this).CRC != null)
|
if (_itemType == ItemType.Rom && ((Rom)this).SHA1 != null)
|
||||||
{
|
{
|
||||||
key = ((Rom)this).CRC;
|
key = ((Rom)this).SHA1;
|
||||||
datdata.BucketByCRC(false, logger, false);
|
datdata.BucketBySHA1(false, logger, false);
|
||||||
|
}
|
||||||
|
else if (_itemType == ItemType.Disk && ((Disk)this).SHA1 != null)
|
||||||
|
{
|
||||||
|
key = ((Disk)this).SHA1;
|
||||||
|
datdata.BucketBySHA1(false, logger, false);
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom && ((Rom)this).MD5 != null)
|
else if (_itemType == ItemType.Rom && ((Rom)this).MD5 != null)
|
||||||
{
|
{
|
||||||
@@ -338,15 +343,10 @@ namespace SabreTools.Helper.Dats
|
|||||||
key = ((Disk)this).MD5;
|
key = ((Disk)this).MD5;
|
||||||
datdata.BucketByMD5(false, logger, false);
|
datdata.BucketByMD5(false, logger, false);
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom && ((Rom)this).SHA1 != null)
|
else if (_itemType == ItemType.Rom && ((Rom)this).CRC != null)
|
||||||
{
|
{
|
||||||
key = ((Rom)this).SHA1;
|
key = ((Rom)this).CRC;
|
||||||
datdata.BucketBySHA1(false, logger, false);
|
datdata.BucketByCRC(false, logger, false);
|
||||||
}
|
|
||||||
else if (_itemType == ItemType.Disk && ((Disk)this).SHA1 != null)
|
|
||||||
{
|
|
||||||
key = ((Disk)this).SHA1;
|
|
||||||
datdata.BucketBySHA1(false, logger, false);
|
|
||||||
}
|
}
|
||||||
else if (_itemType == ItemType.Rom)
|
else if (_itemType == ItemType.Rom)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -20,6 +20,178 @@ namespace SabreTools.Helper.Dats
|
|||||||
{
|
{
|
||||||
#region Rebuilding and Verifying [MODULAR DONE, FOR NOW]
|
#region Rebuilding and Verifying [MODULAR DONE, FOR NOW]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Process the DAT and find all matches in input files and folders assuming they're a depot
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of input files/folders to check</param>
|
||||||
|
/// <param name="outDir">Output directory to use to build to</param>
|
||||||
|
/// <param name="tempDir">Temporary directory for archive extraction</param>
|
||||||
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||||
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||||
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||||
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||||
|
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||||
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
||||||
|
public bool RebuildFromDepot(List<string> inputs, string outDir, string tempDir, bool date, bool delete,
|
||||||
|
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst,
|
||||||
|
int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
#region Perform setup
|
||||||
|
|
||||||
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
||||||
|
if (Count == 0 && !inverse)
|
||||||
|
{
|
||||||
|
logger.User("No entries were found to rebuild, exiting...");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that the output directory exists
|
||||||
|
if (!Directory.Exists(outDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(outDir);
|
||||||
|
outDir = Path.GetFullPath(outDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the temp directory
|
||||||
|
if (String.IsNullOrEmpty(tempDir))
|
||||||
|
{
|
||||||
|
tempDir = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then create or clean the temp directory
|
||||||
|
if (!Directory.Exists(tempDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(tempDir);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
FileTools.CleanDirectory(tempDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preload the Skipper list
|
||||||
|
int listcount = Skipper.List.Count;
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
bool success = true;
|
||||||
|
|
||||||
|
#region Rebuild from depots in order
|
||||||
|
|
||||||
|
switch (outputFormat)
|
||||||
|
{
|
||||||
|
case OutputFormat.Folder:
|
||||||
|
logger.User("Rebuilding all files to directory");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TapeArchive:
|
||||||
|
logger.User("Rebuilding all files to TAR");
|
||||||
|
break;
|
||||||
|
case OutputFormat.Torrent7Zip:
|
||||||
|
logger.User("Rebuilding all files to Torrent7Z");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentGzip:
|
||||||
|
logger.User("Rebuilding all files to TorrentGZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentLrzip:
|
||||||
|
logger.User("Rebuilding all files to TorrentLRZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentRar:
|
||||||
|
logger.User("Rebuilding all files to TorrentRAR");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentXZ:
|
||||||
|
logger.User("Rebuilding all files to TorrentXZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentZip:
|
||||||
|
logger.User("Rebuilding all files to TorrentZip");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
|
||||||
|
// Now loop through and get only directories from the input paths
|
||||||
|
List<string> directories = new List<string>();
|
||||||
|
foreach (string input in inputs)
|
||||||
|
{
|
||||||
|
// Add to the list if the input is a directory
|
||||||
|
if (Directory.Exists(input))
|
||||||
|
{
|
||||||
|
logger.Verbose("Adding depot: '" + input + "'");
|
||||||
|
directories.Add(input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we don't have any directories, we want to exit
|
||||||
|
if (directories.Count == 0)
|
||||||
|
{
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
|
||||||
|
BucketBySHA1(false, logger, output: false);
|
||||||
|
|
||||||
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
||||||
|
List<string> hashes = Keys.ToList();
|
||||||
|
foreach (string hash in hashes)
|
||||||
|
{
|
||||||
|
// Pre-empt any issues that could arise from string length
|
||||||
|
if (hash.Length != Constants.SHA1Length)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Checking hash '" + hash + "'");
|
||||||
|
|
||||||
|
// Get the extension path for the hash
|
||||||
|
string subpath = Path.Combine(hash.Substring(0, 2), hash.Substring(2, 2), hash.Substring(4, 2), hash.Substring(6, 2), hash + ".gz");
|
||||||
|
|
||||||
|
// Find the first depot that includes the hash
|
||||||
|
string foundpath = null;
|
||||||
|
foreach (string directory in directories)
|
||||||
|
{
|
||||||
|
if (File.Exists(Path.Combine(directory, subpath)))
|
||||||
|
{
|
||||||
|
foundpath = Path.Combine(directory, subpath);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we didn't find a path, then we continue
|
||||||
|
if (foundpath == null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a path, we want to try to get the rom information
|
||||||
|
Rom fileinfo = ArchiveTools.GetTorrentGZFileInfo(foundpath, logger);
|
||||||
|
|
||||||
|
// If the file information is null, then we continue
|
||||||
|
if (fileinfo == null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we rebuild that file to all locations that we need to
|
||||||
|
|
||||||
|
RebuildIndividualFile(fileinfo, foundpath, outDir, tempDir, date, inverse, outputFormat, romba, updateDat, true /*isZip*/, headerToCheckAgainst, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
// If we're updating the DAT, output to the rebuild directory
|
||||||
|
if (updateDat)
|
||||||
|
{
|
||||||
|
_fileName = "fixDAT_" + _fileName;
|
||||||
|
_name = "fixDAT_" + _name;
|
||||||
|
_description = "fixDAT_" + _description;
|
||||||
|
WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Process the DAT and find all matches in input files and folders
|
/// Process the DAT and find all matches in input files and folders
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@@ -37,7 +209,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
/// <param name="logger">Logger object for file and console output</param>
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
||||||
public bool RebuildToOutput(List<string> inputs, string outDir, string tempDir, bool quickScan, bool date,
|
public bool RebuildFromInputs(List<string> inputs, string outDir, string tempDir, bool quickScan, bool date,
|
||||||
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
|
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
|
||||||
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
|
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
|
||||||
{
|
{
|
||||||
@@ -117,8 +289,8 @@ namespace SabreTools.Helper.Dats
|
|||||||
// If the input is a file
|
// If the input is a file
|
||||||
if (File.Exists(input))
|
if (File.Exists(input))
|
||||||
{
|
{
|
||||||
logger.Verbose("Checking file: '" + input + "'");
|
logger.User("Checking file: '" + input + "'");
|
||||||
RebuildToOutputHelper(input, outDir, tempDir, quickScan, date, delete, inverse,
|
RebuildFromInputsHelper(input, outDir, tempDir, quickScan, date, delete, inverse,
|
||||||
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,8 +300,8 @@ namespace SabreTools.Helper.Dats
|
|||||||
logger.Verbose("Checking directory: '" + input + "'");
|
logger.Verbose("Checking directory: '" + input + "'");
|
||||||
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
|
foreach (string file in Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories))
|
||||||
{
|
{
|
||||||
logger.Verbose("Checking file: '" + file + "'");
|
logger.User("Checking file: '" + file + "'");
|
||||||
RebuildToOutputHelper(file, outDir, tempDir, quickScan, date, delete, inverse,
|
RebuildFromInputsHelper(file, outDir, tempDir, quickScan, date, delete, inverse,
|
||||||
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
outputFormat, romba, archiveScanLevel, updateDat, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -167,7 +339,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
/// <param name="logger">Logger object for file and console output</param>
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
private void RebuildToOutputHelper(string file, string outDir, string tempDir, bool quickScan, bool date,
|
private void RebuildFromInputsHelper(string file, string outDir, string tempDir, bool quickScan, bool date,
|
||||||
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
|
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
|
||||||
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
|
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
|
||||||
{
|
{
|
||||||
@@ -193,7 +365,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
if (shouldExternalProcess)
|
if (shouldExternalProcess)
|
||||||
{
|
{
|
||||||
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan, header: headerToCheckAgainst);
|
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan, header: headerToCheckAgainst);
|
||||||
usedExternally = RebuildToOutputIndividual(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
||||||
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -208,7 +380,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
|
|
||||||
foreach (Rom rom in extracted)
|
foreach (Rom rom in extracted)
|
||||||
{
|
{
|
||||||
usedInternally &= RebuildToOutputIndividual(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
usedInternally &= RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
||||||
romba, updateDat, true /* isZip */, headerToCheckAgainst, logger);
|
romba, updateDat, true /* isZip */, headerToCheckAgainst, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -227,7 +399,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
foreach (string entry in extracted)
|
foreach (string entry in extracted)
|
||||||
{
|
{
|
||||||
Rom rom = FileTools.GetFileInfo(entry, logger, noMD5: quickScan, noSHA1: quickScan);
|
Rom rom = FileTools.GetFileInfo(entry, logger, noMD5: quickScan, noSHA1: quickScan);
|
||||||
usedInternally &= RebuildToOutputIndividual(rom, entry, outDir, tempSubDir, date, inverse, outputFormat,
|
usedInternally &= RebuildIndividualFile(rom, entry, outDir, tempSubDir, date, inverse, outputFormat,
|
||||||
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -235,7 +407,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
else if (File.Exists(file))
|
else if (File.Exists(file))
|
||||||
{
|
{
|
||||||
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan);
|
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan);
|
||||||
usedExternally = RebuildToOutputIndividual(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
usedExternally = RebuildIndividualFile(rom, file, outDir, tempSubDir, date, inverse, outputFormat,
|
||||||
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
romba, updateDat, false /* isZip */, headerToCheckAgainst, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -280,7 +452,7 @@ namespace SabreTools.Helper.Dats
|
|||||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
/// <param name="logger">Logger object for file and console output</param>
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
|
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
|
||||||
private bool RebuildToOutputIndividual(Rom rom, string file, string outDir, string tempDir, bool date,
|
private bool RebuildIndividualFile(Rom rom, string file, string outDir, string tempDir, bool date,
|
||||||
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst, Logger logger)
|
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, bool isZip, string headerToCheckAgainst, Logger logger)
|
||||||
{
|
{
|
||||||
// Set the output value
|
// Set the output value
|
||||||
|
|||||||
@@ -488,6 +488,105 @@ Options:
|
|||||||
Once the files that were able to rebuilt are taken care of, a DAT of the files
|
Once the files that were able to rebuilt are taken care of, a DAT of the files
|
||||||
that could not be matched will be output to the output directory.
|
that could not be matched will be output to the output directory.
|
||||||
|
|
||||||
|
-ssd, --sort-depot Sort input depots by a set of DATs
|
||||||
|
This feature allows the user to quickly rebuild based on a supplied DAT file(s). By
|
||||||
|
default all files will be rebuilt to uncompressed folders in the output directory.
|
||||||
|
This is different than the generic sort above as it requires that all inputs are
|
||||||
|
Romba-compatible depots.
|
||||||
|
|
||||||
|
-dat= Name of the DAT to be used for the various options
|
||||||
|
The user-supplied DAT used to check which files need to be rebuilt. Multiple
|
||||||
|
occurrences of this flag are allowed.
|
||||||
|
|
||||||
|
-out= Set the name of the output directory
|
||||||
|
This sets an output folder to be used when the files are created. If a path
|
||||||
|
is not defined, the application directory is used instead.
|
||||||
|
|
||||||
|
-t=, --temp= Set the name of the temporary directory
|
||||||
|
Optionally, a temp folder can be supplied in the case the default temp directory
|
||||||
|
(inside the running folder) is not preferred. This is used for any operations that
|
||||||
|
require an archive to be extracted.
|
||||||
|
|
||||||
|
-d, --delete Delete fully rebuilt input files
|
||||||
|
Optionally, the input files, once processed and fully matched, can be deleted. This
|
||||||
|
can be useful when the original file structure is no longer needed or if there is
|
||||||
|
limited space on the source drive.
|
||||||
|
|
||||||
|
-in, --inverse Match files not in the DAT
|
||||||
|
Instead of the normal behavior of rebuilding using a DAT, this flag allows the user
|
||||||
|
to use the DAT as a filter instead. All files that are found in the DAT will be
|
||||||
|
skipped and everything else will be output in the selected format.
|
||||||
|
|
||||||
|
-ad, --add-date Write dates for each file parsed, if available
|
||||||
|
If this flag is set, the the date in the DAT will be used for the output file
|
||||||
|
instead of the standard date and time for TorrentZip. This will technically
|
||||||
|
invalidate the output files as proper TorrentZip files because the date will not
|
||||||
|
match the standard.
|
||||||
|
|
||||||
|
-t7z Enable Torrent 7zip output [NOT IMPLEMENTED]
|
||||||
|
Instead of ouputting the files to folder, files will be rebuilt to Torrent7Zip (T7Z)
|
||||||
|
files. This format is based on the LZMA container format 7zip, but with custom header
|
||||||
|
information. This is currently unused by any major application.
|
||||||
|
|
||||||
|
-tar Enable Tape ARchive output
|
||||||
|
Instead of outputting the fiels to folder, files will be rebuilt to Tape ARchive (TAR)
|
||||||
|
files. This format is a standardized storage archive without any compression, usually
|
||||||
|
used with other compression formats around it. It is widely used in backup applications
|
||||||
|
and source code archives.
|
||||||
|
|
||||||
|
-tgz Enable Torrent GZ output
|
||||||
|
Instead of outputting the files to folder, files will be rebuilt to TorrentGZ (TGZ)
|
||||||
|
files. This format is based on the GZip archive format, but with custom header
|
||||||
|
information and a file name replaced by the SHA-1 of the file inside. This is
|
||||||
|
primarily used by external tool Romba (https://github.com/uwedeportivo/romba), but
|
||||||
|
may be used more widely in the future.
|
||||||
|
|
||||||
|
-r, --romba Enable Romba depot directory output
|
||||||
|
As an extension of the parent flag, this outputs the TGZ files into directories
|
||||||
|
based on the structure used by Romba. This uses nested folders using the first
|
||||||
|
4 bytes of the SHA-1, 1 byte for each layer of the directory name. It also
|
||||||
|
includes two auxilary files, .romba_size and .romba_size.backup, that have the
|
||||||
|
compressed size of the folder inside for use with Romba.
|
||||||
|
|
||||||
|
-tlrz Enable Torrent Long-Range Zip output [NOT IMPLEMENTED]
|
||||||
|
Instead of ouputting the files to folder, files will be rebuilt to Torrent Long-Range
|
||||||
|
Zip (TLRZ) files. This format is based on the LRZip file format as defined at
|
||||||
|
https://github.com/ckolivas/lrzip but with custom header information. This is currently
|
||||||
|
unused by any major application.
|
||||||
|
|
||||||
|
-trar Enable Torrent RAR output [NOT IMPLEMENTED]
|
||||||
|
Instead of outputting files to folder, files will be rebuilt to Torrent RAR (TRAR)
|
||||||
|
files. This format is based on the RAR propietary format but with custom header
|
||||||
|
information. This is currently unused by any major application;
|
||||||
|
|
||||||
|
-txz Enable Torrent XZ output [NOT IMPLEMENTED]
|
||||||
|
Instead of outputting files to folder, files will be rebuilt to Torrent XZ (TXZ) files.
|
||||||
|
This format is based on the LZMA container format XZ, but with custom header
|
||||||
|
information. This is currently unused by any major application;
|
||||||
|
|
||||||
|
-tzip Enable Torrent Zip output
|
||||||
|
Instead of ouputting files to folder, files will be rebuilt to TorrentZip (TZ) files.
|
||||||
|
This format is based on the ZIP archive format, but with custom header information.
|
||||||
|
This is primarily used by external tool RomVault (http://www.romvault.com/) and is
|
||||||
|
already widely used.
|
||||||
|
|
||||||
|
-h=, --header= Remove headers from hash calculations
|
||||||
|
If this is set, then all files that have copier headers that are detected will
|
||||||
|
have them removed from the hash calculation. This will allow for a headered collection
|
||||||
|
to be hashed without possibly variant information. If a particular header skipper is
|
||||||
|
defined, and that skipper exists, then it will be used instead of trying to find one
|
||||||
|
that matches.
|
||||||
|
|
||||||
|
-mt={4} Amount of threads to use
|
||||||
|
Optionally, set the number of threads to use for the multithreaded operations.
|
||||||
|
The default is 4 threads; -1 means unlimited threads created. If the user specifies
|
||||||
|
that only 1 thread is to be used, it defaults to the original, serial implementation
|
||||||
|
of the DFD code.
|
||||||
|
|
||||||
|
-upd, --update-dat Output updated DAT
|
||||||
|
Once the files that were able to rebuilt are taken care of, a DAT of the files
|
||||||
|
that could not be matched will be output to the output directory.
|
||||||
|
|
||||||
-st, --stats Get statistics on all input DATs
|
-st, --stats Get statistics on all input DATs
|
||||||
This will output by default the combined statistics for all input DAT files. The stats
|
This will output by default the combined statistics for all input DAT files. The stats
|
||||||
that are outputted are as follows:
|
that are outputted are as follows:
|
||||||
|
|||||||
@@ -332,7 +332,40 @@ namespace SabreTools
|
|||||||
}
|
}
|
||||||
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
datdata.RebuildToOutput(inputs, outDir, tempDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
datdata.RebuildFromInputs(inputs, outDir, tempDir, quickScan, date, delete, inverse, outputFormat, romba, asl,
|
||||||
|
updateDat, headerToCheckAgainst, maxDegreeOfParallelism, _logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Wrap sorting files from a depot using an input DAT
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="datfiles">Names of the DATs to compare against</param>
|
||||||
|
/// <param name="inputs">List of input files/folders to check</param>
|
||||||
|
/// <param name="outDir">Output directory to use to build to</param>
|
||||||
|
/// <param name="tempDir">Temporary directory for archive extraction</param>
|
||||||
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||||
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||||
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||||
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||||
|
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||||
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
private static void InitSortDepot(List<string> datfiles, List<string> inputs, string outDir, string tempDir, bool date, bool delete,
|
||||||
|
bool inverse, OutputFormat outputFormat, bool romba, bool updateDat, string headerToCheckAgainst, int maxDegreeOfParallelism)
|
||||||
|
{
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
_logger.User("Populating internal DAT...");
|
||||||
|
|
||||||
|
// Add all of the input DATs into one huge internal DAT
|
||||||
|
DatFile datdata = new DatFile();
|
||||||
|
foreach (string datfile in datfiles)
|
||||||
|
{
|
||||||
|
datdata.Parse(datfile, 99, 99, _logger, keep: true, softlist: true);
|
||||||
|
}
|
||||||
|
_logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
datdata.RebuildFromDepot(inputs, outDir, tempDir, date, delete, inverse, outputFormat, romba,
|
||||||
updateDat, headerToCheckAgainst, maxDegreeOfParallelism, _logger);
|
updateDat, headerToCheckAgainst, maxDegreeOfParallelism, _logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -58,14 +58,15 @@ namespace SabreTools
|
|||||||
datFromDir = false,
|
datFromDir = false,
|
||||||
extract = false,
|
extract = false,
|
||||||
restore = false,
|
restore = false,
|
||||||
sort = false, // SimpleSort
|
sort = false,
|
||||||
|
sortDepot = false,
|
||||||
splitByExt = false,
|
splitByExt = false,
|
||||||
splitByHash = false,
|
splitByHash = false,
|
||||||
splitByLevel = false,
|
splitByLevel = false,
|
||||||
splitByType = false,
|
splitByType = false,
|
||||||
stats = false,
|
stats = false,
|
||||||
update = false,
|
update = false,
|
||||||
verify = false; // SimpleSort
|
verify = false;
|
||||||
|
|
||||||
// User flags
|
// User flags
|
||||||
bool addBlankFilesForEmptyFolder = false,
|
bool addBlankFilesForEmptyFolder = false,
|
||||||
@@ -75,17 +76,17 @@ namespace SabreTools
|
|||||||
copyFiles = false,
|
copyFiles = false,
|
||||||
datPrefix = false,
|
datPrefix = false,
|
||||||
dedup = false,
|
dedup = false,
|
||||||
delete = false, // SimpleSort
|
delete = false,
|
||||||
enableGzip = false,
|
enableGzip = false,
|
||||||
excludeOf = false,
|
excludeOf = false,
|
||||||
hashOnly = false,
|
hashOnly = false,
|
||||||
inplace = false,
|
inplace = false,
|
||||||
inverse = false, // SimpleSort
|
inverse = false,
|
||||||
merge = false,
|
merge = false,
|
||||||
noMD5 = false,
|
noMD5 = false,
|
||||||
noSHA1 = false,
|
noSHA1 = false,
|
||||||
parseArchivesAsFiles = false,
|
parseArchivesAsFiles = false,
|
||||||
quickScan = false, // SimpleSort
|
quickScan = false,
|
||||||
quotes = false,
|
quotes = false,
|
||||||
remext = false,
|
remext = false,
|
||||||
removeDateFromAutomaticName = false,
|
removeDateFromAutomaticName = false,
|
||||||
@@ -98,7 +99,7 @@ namespace SabreTools
|
|||||||
superdat = false,
|
superdat = false,
|
||||||
trim = false,
|
trim = false,
|
||||||
skip = false,
|
skip = false,
|
||||||
updateDat = false, // SimpleSort
|
updateDat = false,
|
||||||
usegame = true;
|
usegame = true;
|
||||||
bool? runnable = null;
|
bool? runnable = null;
|
||||||
DatFormat datFormat = 0x0;
|
DatFormat datFormat = 0x0;
|
||||||
@@ -108,11 +109,11 @@ namespace SabreTools
|
|||||||
StatDatFormat statDatFormat = 0x0;
|
StatDatFormat statDatFormat = 0x0;
|
||||||
|
|
||||||
// User inputs
|
// User inputs
|
||||||
int gz = 2, // SimpleSort
|
int gz = 2,
|
||||||
maxParallelism = 4,
|
maxParallelism = 4,
|
||||||
rar = 2, // SimpleSort
|
rar = 2,
|
||||||
sevenzip = 1, // SimpleSort
|
sevenzip = 1,
|
||||||
zip = 1; // SimpleSort
|
zip = 1;
|
||||||
long sgt = -1,
|
long sgt = -1,
|
||||||
slt = -1,
|
slt = -1,
|
||||||
seq = -1;
|
seq = -1;
|
||||||
@@ -142,7 +143,7 @@ namespace SabreTools
|
|||||||
url = null,
|
url = null,
|
||||||
version = null;
|
version = null;
|
||||||
List<string> crc = new List<string>();
|
List<string> crc = new List<string>();
|
||||||
List<string> datfiles = new List<string>(); // SimpleSort
|
List<string> datfiles = new List<string>();
|
||||||
//List<string> exta = new List<string>();
|
//List<string> exta = new List<string>();
|
||||||
//List<string> extb = new List<string>();
|
//List<string> extb = new List<string>();
|
||||||
List<string> gamename = new List<string>();
|
List<string> gamename = new List<string>();
|
||||||
@@ -205,6 +206,10 @@ namespace SabreTools
|
|||||||
case "--sort":
|
case "--sort":
|
||||||
sort = true;
|
sort = true;
|
||||||
break;
|
break;
|
||||||
|
case "-ssd":
|
||||||
|
case "--sort-depot":
|
||||||
|
sortDepot = true;
|
||||||
|
break;
|
||||||
case "-st":
|
case "-st":
|
||||||
case "--stats":
|
case "--stats":
|
||||||
stats = true;
|
stats = true;
|
||||||
@@ -991,7 +996,7 @@ namespace SabreTools
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If none of the feature flags is enabled, show the help screen
|
// If none of the feature flags is enabled, show the help screen
|
||||||
if (!(datFromDir | extract | restore | sort | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify))
|
if (!(datFromDir | extract | restore | sort | sortDepot | splitByExt | splitByHash | splitByLevel | splitByType | stats | update | verify))
|
||||||
{
|
{
|
||||||
_logger.Error("At least one feature switch must be enabled");
|
_logger.Error("At least one feature switch must be enabled");
|
||||||
_logger.Close();
|
_logger.Close();
|
||||||
@@ -999,7 +1004,7 @@ namespace SabreTools
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If more than one switch is enabled, show the help screen
|
// If more than one switch is enabled, show the help screen
|
||||||
if (!(datFromDir ^ extract ^ restore ^ sort ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify))
|
if (!(datFromDir ^ extract ^ restore ^ sort ^ sortDepot ^ splitByExt ^ splitByHash ^ splitByLevel ^ splitByType ^ stats ^ update ^ verify))
|
||||||
{
|
{
|
||||||
_logger.Error("Only one feature switch is allowed at a time");
|
_logger.Error("Only one feature switch is allowed at a time");
|
||||||
_logger.Close();
|
_logger.Close();
|
||||||
@@ -1065,6 +1070,13 @@ namespace SabreTools
|
|||||||
outputFormat, romba, sevenzip, gz, rar, zip, updateDat, header, maxParallelism);
|
outputFormat, romba, sevenzip, gz, rar, zip, updateDat, header, maxParallelism);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we're using the sorter from depot
|
||||||
|
else if (sortDepot)
|
||||||
|
{
|
||||||
|
InitSortDepot(datfiles, inputs, outDir, tempDir, addFileDates, delete, inverse,
|
||||||
|
outputFormat, romba, updateDat, header, maxParallelism);
|
||||||
|
}
|
||||||
|
|
||||||
// Split a DAT by extension
|
// Split a DAT by extension
|
||||||
else if (splitByExt)
|
else if (splitByExt)
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user