2016-11-04 11:57:32 -07:00
using System ;
using System.Collections.Generic ;
using System.Linq ;
using SabreTools.Helper.Data ;
using SabreTools.Helper.Skippers ;
using SabreTools.Helper.Tools ;
#if MONO
using System.IO ;
#else
using Alphaleonis.Win32.Filesystem ;
using SearchOption = System . IO . SearchOption ;
#endif
namespace SabreTools.Helper.Dats
{
2017-02-27 23:00:57 -08:00
/ *
* TODO : Delete flags - Remove files from archive if only some are used ( rebuild to TZip )
* /
2016-11-04 11:57:32 -07:00
public partial class DatFile
{
#region Rebuilding and Verifying [ MODULAR DONE , FOR NOW ]
2017-01-30 12:59:04 -08:00
/// <summary>
/// Process the DAT and find all matches in input files and folders assuming they're a depot
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
2017-01-31 23:18:41 -08:00
public bool RebuildDepot ( List < string > inputs , string outDir , string tempDir , bool date , bool delete ,
2017-03-01 21:26:27 -08:00
bool inverse , OutputFormat outputFormat , bool romba , bool updateDat , string headerToCheckAgainst )
2017-01-30 12:59:04 -08:00
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
if ( Count = = 0 & & ! inverse )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
2017-01-30 12:59:04 -08:00
return false ;
}
// Check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Check the temp directory
if ( String . IsNullOrEmpty ( tempDir ) )
{
tempDir = Path . Combine ( Path . GetTempPath ( ) , Path . GetRandomFileName ( ) ) ;
}
// Then create or clean the temp directory
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
FileTools . CleanDirectory ( tempDir ) ;
}
2017-02-23 20:34:43 -08:00
// Now we want to get forcepack flag if it's not overridden
if ( outputFormat = = OutputFormat . Folder & & ForcePacking ! = ForcePacking . None )
{
switch ( ForcePacking )
{
case ForcePacking . Zip :
outputFormat = OutputFormat . TorrentZip ;
break ;
2017-02-23 21:34:40 -08:00
case ForcePacking . Unzip :
2017-02-23 20:34:43 -08:00
outputFormat = OutputFormat . Folder ;
break ;
}
}
2017-01-30 12:59:04 -08:00
// Preload the Skipper list
int listcount = Skipper . List . Count ;
#endregion
bool success = true ;
#region Rebuild from depots in order
switch ( outputFormat )
{
case OutputFormat . Folder :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to directory" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TapeArchive :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TAR" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . Torrent7Zip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to Torrent7Z" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TorrentGzip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentGZ" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TorrentLrzip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentLRZ" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TorrentRar :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentRAR" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TorrentXZ :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentXZ" ) ;
2017-01-30 12:59:04 -08:00
break ;
case OutputFormat . TorrentZip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentZip" ) ;
2017-01-30 12:59:04 -08:00
break ;
}
DateTime start = DateTime . Now ;
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
foreach ( string input in inputs )
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "Adding depot: '" + input + "'" ) ;
2017-01-30 12:59:04 -08:00
directories . Add ( input ) ;
}
}
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
{
return success ;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
2017-03-01 21:26:27 -08:00
BucketBy ( SortedBy . SHA1 , false /* mergeroms */ ) ;
2017-01-30 12:59:04 -08:00
// Then we want to loop through each of the hashes and see if we can rebuild
List < string > hashes = Keys . ToList ( ) ;
foreach ( string hash in hashes )
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
{
continue ;
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Checking hash '" + hash + "'" ) ;
2017-01-30 12:59:04 -08:00
// Get the extension path for the hash
2017-01-30 23:16:05 -08:00
string subpath = Style . GetRombaPath ( hash ) ;
2017-01-30 12:59:04 -08:00
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
break ;
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
{
continue ;
}
// If we have a path, we want to try to get the rom information
2017-03-01 21:26:27 -08:00
Rom fileinfo = ArchiveTools . GetTorrentGZFileInfo ( foundpath ) ;
2017-01-30 12:59:04 -08:00
// If the file information is null, then we continue
if ( fileinfo = = null )
{
continue ;
}
// Otherwise, we rebuild that file to all locations that we need to
2017-03-01 20:28:32 -08:00
RebuildIndividualFile ( fileinfo , foundpath , outDir , tempDir , date , inverse , outputFormat , romba ,
2017-03-01 21:26:27 -08:00
updateDat , true /*isZip*/ , headerToCheckAgainst ) ;
2017-01-30 12:59:04 -08:00
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding complete in: " + DateTime . Now . Subtract ( start ) . ToString ( @"hh\:mm\:ss\.fffff" ) ) ;
2017-01-30 12:59:04 -08:00
#endregion
// If we're updating the DAT, output to the rebuild directory
if ( updateDat )
{
_fileName = "fixDAT_" + _fileName ;
_name = "fixDAT_" + _name ;
_description = "fixDAT_" + _description ;
2017-03-01 21:26:27 -08:00
WriteToFile ( outDir ) ;
2017-01-30 12:59:04 -08:00
}
return success ;
}
2016-11-04 11:57:32 -07:00
/// <summary>
/// Process the DAT and find all matches in input files and folders
/// </summary>
/// <param name="inputs">List of input files/folders to check</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if rebuilding was a success, false otherwise</returns>
2017-01-31 23:18:41 -08:00
public bool RebuildGeneric ( List < string > inputs , string outDir , string tempDir , bool quickScan , bool date ,
2016-11-04 11:57:32 -07:00
bool delete , bool inverse , OutputFormat outputFormat , bool romba , ArchiveScanLevel archiveScanLevel , bool updateDat ,
2017-03-01 21:26:27 -08:00
string headerToCheckAgainst )
2016-11-04 11:57:32 -07:00
{
#region Perform setup
// If the DAT is not populated and inverse is not set, inform the user and quit
2016-11-08 15:50:27 -08:00
if ( Count = = 0 & & ! inverse )
2016-11-04 11:57:32 -07:00
{
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "No entries were found to rebuild, exiting..." ) ;
2016-11-04 11:57:32 -07:00
return false ;
}
// Check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Check the temp directory
if ( String . IsNullOrEmpty ( tempDir ) )
{
tempDir = Path . Combine ( Path . GetTempPath ( ) , Path . GetRandomFileName ( ) ) ;
}
// Then create or clean the temp directory
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
FileTools . CleanDirectory ( tempDir ) ;
}
2017-02-23 20:34:43 -08:00
// Now we want to get forcepack flag if it's not overridden
if ( outputFormat = = OutputFormat . Folder & & ForcePacking ! = ForcePacking . None )
{
switch ( ForcePacking )
{
case ForcePacking . Zip :
outputFormat = OutputFormat . TorrentZip ;
break ;
2017-02-23 21:34:40 -08:00
case ForcePacking . Unzip :
2017-02-23 20:34:43 -08:00
outputFormat = OutputFormat . Folder ;
break ;
}
}
2016-11-04 11:57:32 -07:00
// Preload the Skipper list
int listcount = Skipper . List . Count ;
#endregion
2017-01-18 10:20:00 -08:00
bool success = true ;
#region Rebuild from sources in order
switch ( outputFormat )
{
case OutputFormat . Folder :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to directory" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TapeArchive :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TAR" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . Torrent7Zip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to Torrent7Z" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentGzip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentGZ" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentLrzip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentLRZ" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentRar :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentRAR" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentXZ :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentXZ" ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentZip :
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding all files to TorrentZip" ) ;
2017-01-18 10:20:00 -08:00
break ;
}
DateTime start = DateTime . Now ;
// Now loop through all of the files in all of the inputs
foreach ( string input in inputs )
{
// If the input is a file
if ( File . Exists ( input ) )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Checking file: '" + input + "'" ) ;
2017-01-31 23:18:41 -08:00
RebuildGenericHelper ( input , outDir , tempDir , quickScan , date , delete , inverse ,
2017-03-01 21:26:27 -08:00
outputFormat , romba , archiveScanLevel , updateDat , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
// If the input is a directory
else if ( Directory . Exists ( input ) )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "Checking directory: '" + input + "'" ) ;
2017-01-18 11:11:57 -08:00
foreach ( string file in Directory . EnumerateFiles ( input , "*" , SearchOption . AllDirectories ) )
2017-01-18 10:20:00 -08:00
{
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Checking file: '" + file + "'" ) ;
2017-01-31 23:18:41 -08:00
RebuildGenericHelper ( file , outDir , tempDir , quickScan , date , delete , inverse ,
2017-03-01 21:26:27 -08:00
outputFormat , romba , archiveScanLevel , updateDat , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
}
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Rebuilding complete in: " + DateTime . Now . Subtract ( start ) . ToString ( @"hh\:mm\:ss\.fffff" ) ) ;
2017-01-18 10:20:00 -08:00
#endregion
2017-01-27 17:33:08 -08:00
// If we're updating the DAT, output to the rebuild directory
if ( updateDat )
{
_fileName = "fixDAT_" + _fileName ;
_name = "fixDAT_" + _name ;
_description = "fixDAT_" + _description ;
2017-03-01 21:26:27 -08:00
WriteToFile ( outDir ) ;
2017-01-27 17:33:08 -08:00
}
2017-01-18 10:20:00 -08:00
return success ;
}
/// <summary>
/// Attempt to add a file to the output if it matches
/// </summary>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-01-31 23:18:41 -08:00
private void RebuildGenericHelper ( string file , string outDir , string tempDir , bool quickScan , bool date ,
2017-01-18 10:20:00 -08:00
bool delete , bool inverse , OutputFormat outputFormat , bool romba , ArchiveScanLevel archiveScanLevel , bool updateDat ,
2017-03-01 21:26:27 -08:00
string headerToCheckAgainst )
2017-01-18 10:20:00 -08:00
{
// If we somehow have a null filename, return
if ( file = = null )
{
return ;
}
// Define the temporary directory
string tempSubDir = Path . GetFullPath ( Path . Combine ( tempDir , Path . GetRandomFileName ( ) ) ) + Path . DirectorySeparatorChar ;
2017-01-26 15:49:46 -08:00
// Set the deletion variables
bool usedExternally = false ;
bool usedInternally = false ;
2017-02-24 12:38:40 -08:00
// Get the required scanning level for the file
2017-03-01 21:26:27 -08:00
ArchiveTools . GetInternalExternalProcess ( file , archiveScanLevel , out bool shouldExternalProcess , out bool shouldInternalProcess ) ;
2017-01-18 10:20:00 -08:00
// If we're supposed to scan the file externally
if ( shouldExternalProcess )
{
2017-02-27 22:38:47 -08:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-03-01 21:26:27 -08:00
Rom rom = FileTools . GetFileInfo ( file , omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) , header : headerToCheckAgainst ) ;
2017-01-30 12:59:04 -08:00
usedExternally = RebuildIndividualFile ( rom , file , outDir , tempSubDir , date , inverse , outputFormat ,
2017-03-01 21:26:27 -08:00
romba , updateDat , false /* isZip */ , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
// If we're supposed to scan the file internally
if ( shouldInternalProcess )
{
// If quickscan is set, do so
if ( quickScan )
{
2017-03-01 21:26:27 -08:00
List < Rom > extracted = ArchiveTools . GetArchiveFileInfo ( file ) ;
2017-01-26 15:49:46 -08:00
usedInternally = true ;
2017-01-18 10:20:00 -08:00
foreach ( Rom rom in extracted )
{
2017-01-30 12:59:04 -08:00
usedInternally & = RebuildIndividualFile ( rom , file , outDir , tempSubDir , date , inverse , outputFormat ,
2017-03-01 21:26:27 -08:00
romba , updateDat , true /* isZip */ , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
}
// Otherwise, attempt to extract the files to the temporary directory
else
{
2017-03-01 21:26:27 -08:00
bool encounteredErrors = ArchiveTools . ExtractArchive ( file , tempSubDir , archiveScanLevel ) ;
2017-01-18 10:20:00 -08:00
// If the file was an archive and was extracted successfully, check it
if ( ! encounteredErrors )
{
2017-01-26 17:06:47 -08:00
usedInternally = true ;
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( Path . GetFileName ( file ) + " treated like an archive" ) ;
2017-01-18 10:20:00 -08:00
List < string > extracted = Directory . EnumerateFiles ( tempSubDir , "*" , SearchOption . AllDirectories ) . ToList ( ) ;
foreach ( string entry in extracted )
{
2017-02-27 22:38:47 -08:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-03-01 21:26:27 -08:00
Rom rom = FileTools . GetFileInfo ( entry , omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) ) ;
2017-01-30 12:59:04 -08:00
usedInternally & = RebuildIndividualFile ( rom , entry , outDir , tempSubDir , date , inverse , outputFormat ,
2017-03-01 21:26:27 -08:00
romba , updateDat , false /* isZip */ , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
}
// Otherwise, just get the info on the file itself
else if ( File . Exists ( file ) )
{
2017-02-27 22:38:47 -08:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-03-01 21:26:27 -08:00
Rom rom = FileTools . GetFileInfo ( file , omitFromScan : ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) ) ;
2017-01-30 12:59:04 -08:00
usedExternally = RebuildIndividualFile ( rom , file , outDir , tempSubDir , date , inverse , outputFormat ,
2017-03-01 21:26:27 -08:00
romba , updateDat , false /* isZip */ , headerToCheckAgainst ) ;
2017-01-18 10:20:00 -08:00
}
}
}
2017-01-26 15:49:46 -08:00
// If we are supposed to delete the file, do so
if ( delete & & ( usedExternally | | usedInternally ) )
{
try
{
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "Attempting to delete input file '" + file + "'" ) ;
2017-03-15 14:44:44 -07:00
FileTools . SafeTryDeleteFile ( file , true ) ;
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "File '" + file + "' deleted" ) ;
2017-01-26 17:06:47 -08:00
}
catch ( Exception ex )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . Error ( "An error occurred while trying to delete '" + file + "' " + ex . ToString ( ) ) ;
2017-01-26 15:49:46 -08:00
}
}
2017-01-18 10:20:00 -08:00
// Now delete the temp directory
2017-03-15 14:44:44 -07:00
FileTools . SafeTryDeleteDirectory ( tempSubDir ) ;
2017-01-18 10:20:00 -08:00
}
/// <summary>
/// Find duplicates and rebuild individual files to output
/// </summary>
/// <param name="rom">Information for the current file to rebuild from</param>
/// <param name="file">Name of the file to process</param>
/// <param name="outDir">Output directory to use to build to</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
/// <param name="outputFormat">Output format that files should be written to</param>
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
/// <param name="isZip">True if the input file is an archive, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2017-01-26 15:49:46 -08:00
/// <returns>True if the file was able to be rebuilt, false otherwise</returns>
2017-02-27 23:00:57 -08:00
/// <remarks>
/// TODO: If going from a TGZ file to a TGZ file, don't extract, just copy
/// </remarks>
2017-01-30 12:59:04 -08:00
private bool RebuildIndividualFile ( Rom rom , string file , string outDir , string tempDir , bool date ,
2017-03-01 21:26:27 -08:00
bool inverse , OutputFormat outputFormat , bool romba , bool updateDat , bool isZip , string headerToCheckAgainst )
2017-01-18 10:20:00 -08:00
{
2017-01-26 15:49:46 -08:00
// Set the output value
bool rebuilt = false ;
2017-01-18 10:20:00 -08:00
// Find if the file has duplicates in the DAT
2017-03-01 21:26:27 -08:00
bool hasDuplicates = rom . HasDuplicates ( this ) ;
2017-01-18 10:20:00 -08:00
2017-01-26 16:04:30 -08:00
// If it has duplicates and we're not filtering, rebuild it
if ( hasDuplicates & & ! inverse )
2017-01-18 10:20:00 -08:00
{
// Get the list of duplicates to rebuild to
2017-03-01 21:26:27 -08:00
List < DatItem > dupes = rom . GetDuplicates ( this , remove : updateDat ) ;
2017-01-18 10:20:00 -08:00
// If we don't have any duplicates, continue
if ( dupes . Count = = 0 )
{
2017-01-26 15:49:46 -08:00
return rebuilt ;
2017-01-18 10:20:00 -08:00
}
// If we have an archive input, get the real name of the file to use
if ( isZip )
{
// Otherwise, extract the file to the temp folder
2017-03-01 21:26:27 -08:00
file = ArchiveTools . ExtractItem ( file , rom . Name , tempDir ) ;
2017-01-18 10:20:00 -08:00
}
// If we couldn't extract the file, then continue,
if ( String . IsNullOrEmpty ( file ) )
{
2017-01-26 15:49:46 -08:00
return rebuilt ;
2017-01-18 10:20:00 -08:00
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Matches found for '" + Style . GetFileName ( file ) + "', rebuilding accordingly..." ) ;
2017-01-27 10:46:00 -08:00
rebuilt = true ;
2017-01-26 17:06:47 -08:00
2017-01-18 10:20:00 -08:00
// Now loop through the list and rebuild accordingly
foreach ( Rom item in dupes )
{
switch ( outputFormat )
{
case OutputFormat . Folder :
string outfile = Path . Combine ( outDir , Style . RemovePathUnsafeCharacters ( item . Machine . Name ) , item . Name ) ;
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outfile ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outfile ) ;
if ( date & & ! String . IsNullOrEmpty ( item . Date ) )
{
File . SetCreationTime ( outfile , DateTime . Parse ( item . Date ) ) ;
}
2017-01-26 15:49:46 -08:00
rebuilt & = true ;
}
catch
{
2017-01-26 17:06:47 -08:00
rebuilt = false ;
2017-01-18 10:20:00 -08:00
}
break ;
case OutputFormat . TapeArchive :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTAR ( file , outDir , item , date : date ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . Torrent7Zip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrent7Zip ( file , outDir , item , date : date ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentGzip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentGZ ( file , outDir , romba ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentLrzip :
break ;
case OutputFormat . TorrentRar :
break ;
case OutputFormat . TorrentXZ :
2017-03-05 21:41:28 -08:00
rebuilt & = ArchiveTools . WriteTorrentXZ ( file , outDir , item , date : date ) ;
2017-01-18 10:20:00 -08:00
break ;
case OutputFormat . TorrentZip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentZip ( file , outDir , item , date : date ) ;
2017-01-18 10:20:00 -08:00
break ;
}
2017-01-26 17:06:47 -08:00
}
2017-01-18 10:20:00 -08:00
}
2017-01-26 15:49:46 -08:00
2017-01-26 16:04:30 -08:00
// If we have no duplicates and we're filtering, rebuild it
else if ( ! hasDuplicates & & inverse )
{
2017-01-26 17:06:47 -08:00
string machinename = null ;
2017-01-26 16:04:30 -08:00
// If we have an archive input, get the real name of the file to use
if ( isZip )
{
// Otherwise, extract the file to the temp folder
2017-01-26 17:06:47 -08:00
machinename = Style . GetFileNameWithoutExtension ( file ) ;
2017-03-01 21:26:27 -08:00
file = ArchiveTools . ExtractItem ( file , rom . Name , tempDir ) ;
2017-01-26 16:04:30 -08:00
}
// If we couldn't extract the file, then continue,
if ( String . IsNullOrEmpty ( file ) )
{
return rebuilt ;
}
// Get the item from the current file
2017-03-01 21:26:27 -08:00
Rom item = FileTools . GetFileInfo ( file ) ;
2017-01-26 17:06:47 -08:00
item . Machine = new Machine ( )
{
Name = Style . GetFileNameWithoutExtension ( item . Name ) ,
Description = Style . GetFileNameWithoutExtension ( item . Name ) ,
} ;
// If we are coming from an archive, set the correct machine name
if ( machinename ! = null )
{
item . Machine . Name = machinename ;
item . Machine . Description = machinename ;
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "No matches found for '" + Style . GetFileName ( file ) + "', rebuilding accordingly from inverse flag..." ) ;
2017-01-26 16:04:30 -08:00
// Now rebuild to the output file
switch ( outputFormat )
{
case OutputFormat . Folder :
string outfile = Path . Combine ( outDir , Style . RemovePathUnsafeCharacters ( item . Machine . Name ) , item . Name ) ;
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outfile ) ) ;
// Now copy the file over
try
{
File . Copy ( file , outfile ) ;
if ( date & & ! String . IsNullOrEmpty ( item . Date ) )
{
File . SetCreationTime ( outfile , DateTime . Parse ( item . Date ) ) ;
}
rebuilt & = true ;
}
catch
{
rebuilt & = false ;
}
break ;
case OutputFormat . TapeArchive :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTAR ( file , outDir , item , date : date ) ;
2017-01-26 16:04:30 -08:00
break ;
case OutputFormat . Torrent7Zip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrent7Zip ( file , outDir , item , date : date ) ;
2017-01-26 16:04:30 -08:00
break ;
case OutputFormat . TorrentGzip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentGZ ( file , outDir , romba ) ;
2017-01-26 16:04:30 -08:00
break ;
case OutputFormat . TorrentLrzip :
break ;
case OutputFormat . TorrentRar :
break ;
case OutputFormat . TorrentXZ :
2017-03-05 21:41:28 -08:00
rebuilt & = ArchiveTools . WriteTorrentXZ ( file , outDir , item , date : date ) ;
2017-01-26 16:04:30 -08:00
break ;
case OutputFormat . TorrentZip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentZip ( file , outDir , item , date : date ) ;
2017-01-26 16:04:30 -08:00
break ;
}
2017-01-27 11:13:12 -08:00
}
2017-01-26 16:04:30 -08:00
2017-01-27 11:13:12 -08:00
// Now we want to take care of headers, if applicable
if ( headerToCheckAgainst ! = null )
{
// Check to see if we have a matching header first
2017-03-01 21:26:27 -08:00
SkipperRule rule = Skipper . GetMatchingRule ( file , Path . GetFileNameWithoutExtension ( headerToCheckAgainst ) ) ;
2017-01-27 11:13:12 -08:00
// If there's a match, create the new file to write
if ( rule . Tests ! = null & & rule . Tests . Count ! = 0 )
2017-01-26 16:04:30 -08:00
{
2017-01-27 11:13:12 -08:00
// If the file could be transformed correctly
2017-03-01 21:26:27 -08:00
if ( rule . TransformFile ( file , file + ".new" ) )
2017-01-26 17:06:47 -08:00
{
2017-01-27 11:13:12 -08:00
// Get the file informations that we will be using
2017-03-01 21:26:27 -08:00
Rom headerless = FileTools . GetFileInfo ( file + ".new" ) ;
2017-01-27 11:13:12 -08:00
// Find if the file has duplicates in the DAT
2017-03-01 21:26:27 -08:00
hasDuplicates = headerless . HasDuplicates ( this ) ;
2017-01-27 11:13:12 -08:00
// If it has duplicates and we're not filtering, rebuild it
if ( hasDuplicates & & ! inverse )
{
// Get the list of duplicates to rebuild to
2017-03-01 21:26:27 -08:00
List < DatItem > dupes = headerless . GetDuplicates ( this , remove : updateDat ) ;
2017-01-27 11:13:12 -08:00
// If we don't have any duplicates, continue
if ( dupes . Count = = 0 )
{
return rebuilt ;
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Headerless matches found for '" + Style . GetFileName ( file ) + "', rebuilding accordingly..." ) ;
2017-01-27 11:13:12 -08:00
rebuilt = true ;
// Now loop through the list and rebuild accordingly
foreach ( Rom item in dupes )
{
// Create a headered item to use as well
2017-01-27 11:53:20 -08:00
rom . Machine = item . Machine ;
rom . Name + = "_" + rom . CRC ;
2017-01-27 11:13:12 -08:00
switch ( outputFormat )
{
case OutputFormat . Folder :
string outfile = Path . Combine ( outDir , Style . RemovePathUnsafeCharacters ( item . Machine . Name ) , item . Name ) ;
2017-01-27 11:53:20 -08:00
string headeredOutfile = Path . Combine ( outDir , Style . RemovePathUnsafeCharacters ( rom . Machine . Name ) , rom . Name ) ;
2017-01-27 11:13:12 -08:00
// Make sure the output folder is created
Directory . CreateDirectory ( Path . GetDirectoryName ( outfile ) ) ;
2017-01-27 11:53:20 -08:00
// If either copy succeeds, then we want to set rebuilt to true
bool eitherSuccess = false ;
2017-01-27 11:13:12 -08:00
// Now copy the files over
try
{
File . Copy ( file + ".new" , outfile ) ;
if ( date & & ! String . IsNullOrEmpty ( item . Date ) )
{
File . SetCreationTime ( outfile , DateTime . Parse ( item . Date ) ) ;
}
2017-01-27 11:53:20 -08:00
eitherSuccess | = true ;
}
catch { }
try
{
2017-01-27 11:13:12 -08:00
File . Copy ( file , headeredOutfile ) ;
2017-01-27 11:53:20 -08:00
if ( date & & ! String . IsNullOrEmpty ( rom . Date ) )
2017-01-27 11:13:12 -08:00
{
2017-01-27 11:53:20 -08:00
File . SetCreationTime ( outfile , DateTime . Parse ( rom . Date ) ) ;
2017-01-27 11:13:12 -08:00
}
2017-01-27 11:53:20 -08:00
eitherSuccess | = true ;
2017-01-27 11:13:12 -08:00
}
2017-01-27 11:53:20 -08:00
catch { }
// Now add the success of either rebuild
rebuilt & = eitherSuccess ;
2017-01-27 11:13:12 -08:00
break ;
case OutputFormat . TapeArchive :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTAR ( file + ".new" , outDir , item , date : date ) ;
rebuilt & = ArchiveTools . WriteTAR ( file , outDir , rom , date : date ) ;
2017-01-27 11:13:12 -08:00
break ;
case OutputFormat . Torrent7Zip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrent7Zip ( file + ".new" , outDir , item , date : date ) ;
rebuilt & = ArchiveTools . WriteTorrent7Zip ( file , outDir , rom , date : date ) ;
2017-01-27 11:13:12 -08:00
break ;
case OutputFormat . TorrentGzip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentGZ ( file + ".new" , outDir , romba ) ;
rebuilt & = ArchiveTools . WriteTorrentGZ ( file , outDir , romba ) ;
2017-01-27 11:13:12 -08:00
break ;
case OutputFormat . TorrentLrzip :
break ;
case OutputFormat . TorrentRar :
break ;
case OutputFormat . TorrentXZ :
2017-03-05 21:41:28 -08:00
rebuilt & = ArchiveTools . WriteTorrentXZ ( file + ".new" , outDir , item , date : date ) ;
rebuilt & = ArchiveTools . WriteTorrentXZ ( file , outDir , rom , date : date ) ;
2017-01-27 11:13:12 -08:00
break ;
case OutputFormat . TorrentZip :
2017-03-01 21:26:27 -08:00
rebuilt & = ArchiveTools . WriteTorrentZip ( file + ".new" , outDir , item , date : date ) ;
rebuilt & = ArchiveTools . WriteTorrentZip ( file , outDir , rom , date : date ) ;
2017-01-27 11:13:12 -08:00
break ;
}
}
}
2017-01-26 17:06:47 -08:00
}
2017-01-26 16:04:30 -08:00
}
}
2017-01-27 11:13:12 -08:00
// And now clear the temp folder to get rid of any transient files if we unzipped
if ( isZip )
{
2017-03-15 14:44:44 -07:00
FileTools . SafeTryDeleteDirectory ( tempDir ) ;
2017-01-27 11:13:12 -08:00
}
2017-01-26 15:49:46 -08:00
return rebuilt ;
2017-01-18 10:20:00 -08:00
}
2017-01-31 23:18:41 -08:00
/// <summary>
/// Process the DAT and verify from the depots
/// </summary>
/// <param name="inputs">List of input directories to compare against</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if verification was a success, false otherwise</returns>
2017-03-01 21:26:27 -08:00
public bool VerifyDepot ( List < string > inputs , string tempDir , string headerToCheckAgainst )
2017-01-31 23:18:41 -08:00
{
// Check the temp directory
if ( String . IsNullOrEmpty ( tempDir ) )
{
tempDir = Path . Combine ( Path . GetTempPath ( ) , Path . GetRandomFileName ( ) ) ;
}
// Then create or clean the temp directory
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
FileTools . CleanDirectory ( tempDir ) ;
}
bool success = true ;
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Verifying all from supplied depots" ) ;
2017-01-31 23:18:41 -08:00
DateTime start = DateTime . Now ;
// Now loop through and get only directories from the input paths
List < string > directories = new List < string > ( ) ;
foreach ( string input in inputs )
{
// Add to the list if the input is a directory
if ( Directory . Exists ( input ) )
{
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "Adding depot: '" + input + "'" ) ;
2017-01-31 23:18:41 -08:00
directories . Add ( input ) ;
}
}
// If we don't have any directories, we want to exit
if ( directories . Count = = 0 )
{
return success ;
}
// Now that we have a list of depots, we want to sort the input DAT by SHA-1
2017-03-01 21:26:27 -08:00
BucketBy ( SortedBy . SHA1 , false /* mergeroms */ ) ;
2017-01-31 23:18:41 -08:00
// Then we want to loop through each of the hashes and see if we can rebuild
List < string > hashes = Keys . ToList ( ) ;
foreach ( string hash in hashes )
{
// Pre-empt any issues that could arise from string length
if ( hash . Length ! = Constants . SHA1Length )
{
continue ;
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Checking hash '" + hash + "'" ) ;
2017-01-31 23:18:41 -08:00
// Get the extension path for the hash
string subpath = Style . GetRombaPath ( hash ) ;
// Find the first depot that includes the hash
string foundpath = null ;
foreach ( string directory in directories )
{
if ( File . Exists ( Path . Combine ( directory , subpath ) ) )
{
foundpath = Path . Combine ( directory , subpath ) ;
break ;
}
}
// If we didn't find a path, then we continue
if ( foundpath = = null )
{
continue ;
}
// If we have a path, we want to try to get the rom information
2017-03-01 21:26:27 -08:00
Rom fileinfo = ArchiveTools . GetTorrentGZFileInfo ( foundpath ) ;
2017-01-31 23:18:41 -08:00
// If the file information is null, then we continue
if ( fileinfo = = null )
{
continue ;
}
// Now we want to remove all duplicates from the DAT
2017-03-01 21:26:27 -08:00
fileinfo . GetDuplicates ( this , remove : true ) ;
2017-01-31 23:18:41 -08:00
}
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Verifying complete in: " + DateTime . Now . Subtract ( start ) . ToString ( @"hh\:mm\:ss\.fffff" ) ) ;
2017-01-31 23:18:41 -08:00
// If there are any entries in the DAT, output to the rebuild directory
_fileName = "fixDAT_" + _fileName ;
_name = "fixDAT_" + _name ;
_description = "fixDAT_" + _description ;
2017-03-01 21:26:27 -08:00
WriteToFile ( null ) ;
2017-01-31 23:18:41 -08:00
return success ;
}
2016-11-04 11:57:32 -07:00
/// <summary>
/// Process the DAT and verify the output directory
/// </summary>
/// <param name="inputs">List of input directories to compare against</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
2017-01-26 18:03:07 -08:00
/// <param name="hashOnly">True if only hashes should be checked, false for full file information</param>
2017-01-27 10:02:55 -08:00
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
2016-11-04 11:57:32 -07:00
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <returns>True if verification was a success, false otherwise</returns>
2017-03-01 20:28:32 -08:00
public bool VerifyGeneric ( List < string > inputs , string tempDir , bool hashOnly , bool quickScan ,
2017-03-01 21:26:27 -08:00
string headerToCheckAgainst )
2016-11-04 11:57:32 -07:00
{
2017-01-26 18:09:16 -08:00
// Check the temp directory exists
if ( String . IsNullOrEmpty ( tempDir ) )
{
tempDir = Path . Combine ( Path . GetTempPath ( ) , Path . GetRandomFileName ( ) ) ;
}
// Then create or clean the temp directory
2016-11-04 11:57:32 -07:00
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
FileTools . CleanDirectory ( tempDir ) ;
}
2017-02-27 22:39:44 -08:00
// TODO: We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
2016-11-04 11:57:32 -07:00
bool success = true ;
// Then, loop through and check each of the inputs
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Processing files:\n" ) ;
2016-11-04 11:57:32 -07:00
foreach ( string input in inputs )
{
2017-02-27 22:38:47 -08:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-02-27 20:09:47 -08:00
PopulateFromDir ( input , ( quickScan ? Hash . SecureHashes : Hash . DeepHashes ) /* omitFromScan */ , true /* bare */ , false /* archivesAsFiles */ ,
2016-11-04 11:57:32 -07:00
true /* enableGzip */ , false /* addBlanks */ , false /* addDate */ , tempDir /* tempDir */ , false /* copyFiles */ ,
2017-03-01 21:26:27 -08:00
headerToCheckAgainst ) ;
2016-11-04 11:57:32 -07:00
}
// Setup the fixdat
2016-11-08 16:04:26 -08:00
DatFile matched = new DatFile ( this ) ;
2016-11-08 15:50:27 -08:00
matched . Reset ( ) ;
2016-11-04 11:57:32 -07:00
matched . FileName = "fixDat_" + matched . FileName ;
matched . Name = "fixDat_" + matched . Name ;
matched . Description = "fixDat_" + matched . Description ;
matched . DatFormat = DatFormat . Logiqx ;
2017-01-26 18:03:07 -08:00
// If we are checking hashes only, essentially diff the inputs
if ( hashOnly )
2016-11-04 11:57:32 -07:00
{
2017-01-26 18:03:07 -08:00
// First we need to sort by hash to get duplicates
2017-03-01 21:26:27 -08:00
BucketBy ( SortedBy . SHA1 , false /* mergeroms */ ) ;
2017-01-26 18:03:07 -08:00
// Then follow the same tactics as before
foreach ( string key in Keys )
{
List < DatItem > roms = this [ key ] ;
2017-01-26 20:59:11 -08:00
foreach ( DatItem rom in roms )
2017-01-26 18:03:07 -08:00
{
if ( rom . SourceID = = 99 )
{
2017-01-26 20:59:11 -08:00
if ( rom . Type = = ItemType . Disk | | rom . Type = = ItemType . Rom )
{
matched . Add ( ( ( Disk ) rom ) . SHA1 , rom ) ;
}
2017-01-26 18:03:07 -08:00
}
}
}
}
// If we are checking full names, get only files found in directory
else
{
foreach ( string key in Keys )
2016-11-04 11:57:32 -07:00
{
2017-01-26 18:03:07 -08:00
List < DatItem > roms = this [ key ] ;
2017-03-01 21:26:27 -08:00
List < DatItem > newroms = DatItem . Merge ( roms ) ;
2017-01-26 18:03:07 -08:00
foreach ( Rom rom in newroms )
2016-11-04 11:57:32 -07:00
{
2017-01-26 18:03:07 -08:00
if ( rom . SourceID = = 99 )
{
matched . Add ( rom . Size + "-" + rom . CRC , rom ) ;
}
2016-11-04 11:57:32 -07:00
}
}
}
// Now output the fixdat to the main folder
2017-03-01 21:26:27 -08:00
success & = matched . WriteToFile ( "" , stats : true ) ;
2016-11-04 11:57:32 -07:00
return success ;
}
#endregion
}
}