2016-09-22 20:42:34 -07:00
using Mono.Data.Sqlite ;
using OCRC ;
2016-06-13 23:54:13 -07:00
using System ;
2016-09-26 17:52:20 -07:00
using System.Collections.Generic ;
2016-06-13 20:57:49 -07:00
using System.IO ;
2016-06-13 22:12:00 -07:00
using System.Linq ;
2016-08-29 16:55:55 -07:00
using System.Security.Cryptography ;
2016-09-22 17:14:23 -07:00
using System.Xml ;
2016-09-28 12:27:39 -07:00
using System.Xml.Schema ;
2016-06-13 20:57:49 -07:00
namespace SabreTools.Helper
{
2016-08-29 16:52:55 -07:00
public class FileTools
2016-06-13 20:57:49 -07:00
{
2016-09-22 21:00:18 -07:00
#region File Information
2016-06-15 14:55:06 -07:00
2016-09-26 17:52:20 -07:00
/// <summary>
/// Retrieve a list of files from a directory recursively in proper order
/// </summary>
/// <param name="directory">Directory to parse</param>
/// <param name="infiles">List representing existing files</param>
/// <returns>List with all new files</returns>
public static List < string > RetrieveFiles ( string directory , List < string > infiles )
{
// Take care of the files in the top directory
List < string > toadd = Directory . EnumerateFiles ( directory , "*" , SearchOption . TopDirectoryOnly ) . ToList ( ) ;
toadd . Sort ( new NaturalComparer ( ) ) ;
infiles . AddRange ( toadd ) ;
// Then recurse through and add from the directories
List < string > dirs = Directory . EnumerateDirectories ( directory , "*" , SearchOption . TopDirectoryOnly ) . ToList ( ) ;
dirs . Sort ( new NaturalComparer ( ) ) ;
foreach ( string dir in dirs )
{
infiles = RetrieveFiles ( dir , infiles ) ;
}
// Return the new list
return infiles ;
}
2016-06-13 22:12:00 -07:00
/// <summary>
2016-09-22 21:00:18 -07:00
/// Get what type of DAT the input file is
2016-06-13 22:12:00 -07:00
/// </summary>
2016-09-22 21:00:18 -07:00
/// <param name="filename">Name of the file to be parsed</param>
/// <returns>The OutputFormat corresponding to the DAT</returns>
/// <remarks>There is currently no differentiation between XML and SabreDAT here</remarks>
public static OutputFormat GetOutputFormat ( string filename , Logger logger )
2016-06-13 22:12:00 -07:00
{
2016-09-22 21:00:18 -07:00
// Limit the output formats based on extension
string ext = Path . GetExtension ( filename ) . ToLowerInvariant ( ) ;
2016-09-26 17:36:25 -07:00
if ( ext . StartsWith ( "." ) )
{
ext = ext . Substring ( 1 ) ;
}
2016-09-28 11:30:06 -07:00
if ( ext ! = "dat" & & ext ! = "md5" & & ext ! = "sfv" & & ext ! = "sha1" & & ext ! = "txt" & & ext ! = "xml" )
2016-08-25 20:03:27 -07:00
{
2016-09-22 21:00:18 -07:00
return 0 ;
2016-06-13 22:12:00 -07:00
}
2016-09-22 21:00:18 -07:00
// Read the input file, if possible
2016-09-23 15:09:00 -07:00
logger . Verbose ( "Attempting to read file: \"" + filename + "\"" ) ;
2016-09-09 13:39:01 -07:00
2016-09-22 21:00:18 -07:00
// Check if file exists
if ( ! File . Exists ( filename ) )
2016-08-18 19:54:37 -07:00
{
2016-09-22 21:00:18 -07:00
logger . Warning ( "File '" + filename + "' could not read from!" ) ;
return 0 ;
2016-08-18 19:54:37 -07:00
}
2016-09-28 11:30:06 -07:00
// Some formats only require the extension to know
if ( ext = = "md5" )
{
return OutputFormat . RedumpMD5 ;
}
if ( ext = = "sfv" )
{
return OutputFormat . RedumpSFV ;
}
if ( ext = = "sha1" )
{
return OutputFormat . RedumpSHA1 ;
}
// For everything else, we need to read it
2016-06-15 14:43:05 -07:00
try
{
2016-09-27 11:26:55 -07:00
// Get the first two lines to check
2016-09-22 21:00:18 -07:00
StreamReader sr = File . OpenText ( filename ) ;
2016-09-27 11:26:55 -07:00
string first = sr . ReadLine ( ) . ToLowerInvariant ( ) ;
string second = sr . ReadLine ( ) . ToLowerInvariant ( ) ;
2016-09-22 21:00:18 -07:00
sr . Dispose ( ) ;
2016-09-27 11:26:55 -07:00
// If we have an XML-based DAT
2016-10-03 09:22:18 -07:00
if ( first . Contains ( "<?xml" ) & & first . Contains ( "?>" ) )
2016-06-17 20:03:07 -07:00
{
2016-09-27 11:26:55 -07:00
if ( second . StartsWith ( "<!doctype datafile" ) )
{
2016-09-30 12:15:36 -07:00
return OutputFormat . Logiqx ;
2016-09-27 11:26:55 -07:00
}
else if ( second . StartsWith ( "<!doctype softwarelist" ) )
{
return OutputFormat . SoftwareList ;
}
else if ( second . StartsWith ( "<!doctype sabredat" ) )
{
return OutputFormat . SabreDat ;
}
2016-10-05 10:24:36 -07:00
else if ( second . StartsWith ( "<dat" ) & & ! second . StartsWith ( "<datafile" ) )
2016-09-27 12:05:29 -07:00
{
return OutputFormat . OfflineList ;
}
2016-09-29 17:57:27 -07:00
// Older and non-compliant DATs
else
{
2016-09-30 12:15:36 -07:00
return OutputFormat . Logiqx ;
2016-09-29 17:57:27 -07:00
}
2016-06-17 20:03:07 -07:00
}
2016-09-27 11:26:55 -07:00
// If we have an INI-based DAT
2016-09-22 21:00:18 -07:00
else if ( first . Contains ( "[" ) & & first . Contains ( "]" ) )
2016-06-17 20:03:07 -07:00
{
2016-09-22 21:00:18 -07:00
return OutputFormat . RomCenter ;
2016-06-17 20:03:07 -07:00
}
2016-09-27 11:26:55 -07:00
// If we have a CMP-based DAT
2016-09-29 20:05:46 -07:00
else if ( first . Contains ( "clrmamepro" ) )
2016-06-17 20:03:07 -07:00
{
2016-09-22 21:00:18 -07:00
return OutputFormat . ClrMamePro ;
2016-06-17 20:03:07 -07:00
}
2016-09-29 20:05:46 -07:00
else if ( first . Contains ( "romvault" ) )
2016-09-27 11:26:55 -07:00
{
return OutputFormat . ClrMamePro ;
}
2016-09-29 20:05:46 -07:00
else if ( first . Contains ( "doscenter" ) )
2016-09-27 11:26:55 -07:00
{
return OutputFormat . DOSCenter ;
}
2016-10-02 20:14:24 -07:00
else
{
return OutputFormat . ClrMamePro ;
}
2016-06-15 14:43:05 -07:00
}
2016-06-16 12:51:35 -07:00
catch ( Exception )
2016-06-15 14:43:05 -07:00
{
2016-09-22 21:00:18 -07:00
return 0 ;
2016-08-29 12:23:02 -07:00
}
}
2016-09-22 16:16:48 -07:00
/// <summary>
/// Retrieve file information for a single file
/// </summary>
/// <param name="input">Filename to get information from</param>
2016-10-03 15:05:07 -07:00
/// <param name="logger">Logger object for console and file output</param>
2016-09-22 16:16:48 -07:00
/// <param name="noMD5">True if MD5 hashes should not be calculated, false otherwise (default)</param>
/// <param name="noSHA1">True if SHA-1 hashes should not be calcluated, false otherwise (default)</param>
/// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param>
2016-09-22 21:00:18 -07:00
/// <param name="date">True if the file Date should be included, false otherwise (default)</param>
2016-10-03 21:16:59 -07:00
/// <param name="header">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
2016-09-22 16:16:48 -07:00
/// <returns>Populated RomData object if success, empty one on error</returns>
2016-10-03 21:16:59 -07:00
public static Rom GetFileInfo ( string input , Logger logger , bool noMD5 = false , bool noSHA1 = false , long offset = 0 , bool date = false , string header = null )
2016-09-22 16:16:48 -07:00
{
2016-09-22 21:00:18 -07:00
// Add safeguard if file doesn't exist
if ( ! File . Exists ( input ) )
2016-09-22 16:16:48 -07:00
{
2016-09-22 21:00:18 -07:00
return new Rom ( ) ;
2016-09-22 16:16:48 -07:00
}
2016-09-22 21:00:18 -07:00
// Get the information from the file stream
2016-10-03 15:05:07 -07:00
Rom rom = new Rom ( ) ;
2016-10-03 21:16:59 -07:00
if ( header ! = null )
2016-10-03 15:05:07 -07:00
{
2016-10-03 21:16:59 -07:00
SkipperRule rule = Skipper . GetMatchingRule ( input , Path . GetFileNameWithoutExtension ( header ) , logger ) ;
2016-10-03 15:05:07 -07:00
// If there's a match, get the new information from the stream
if ( rule . Tests ! = null & & rule . Tests . Count ! = 0 )
{
// Create the input and output streams
MemoryStream outputStream = new MemoryStream ( ) ;
FileStream inputStream = File . OpenRead ( input ) ;
// Transform the stream and get the information from it
2016-10-03 21:16:59 -07:00
rule . TransformStream ( inputStream , outputStream , logger , keepReadOpen : false , keepWriteOpen : true ) ;
2016-10-03 15:29:40 -07:00
rom = GetStreamInfo ( outputStream , outputStream . Length ) ;
2016-10-03 15:05:07 -07:00
// Dispose of the streams
outputStream . Dispose ( ) ;
inputStream . Dispose ( ) ;
}
// Otherwise, just get the info
else
{
2016-10-03 15:29:40 -07:00
rom = GetStreamInfo ( File . OpenRead ( input ) , new FileInfo ( input ) . Length , noMD5 , noSHA1 , offset , false ) ;
2016-10-03 15:05:07 -07:00
}
}
else
{
2016-10-03 15:29:40 -07:00
rom = GetStreamInfo ( File . OpenRead ( input ) , new FileInfo ( input ) . Length , noMD5 , noSHA1 , offset , false ) ;
2016-10-03 15:05:07 -07:00
}
2016-09-22 16:16:48 -07:00
2016-09-22 21:00:18 -07:00
// Add unique data from the file
rom . Name = Path . GetFileName ( input ) ;
rom . Date = ( date ? new FileInfo ( input ) . LastWriteTime . ToString ( "yyyy/MM/dd HH:mm:ss" ) : "" ) ;
2016-09-22 16:16:48 -07:00
return rom ;
}
#endregion
2016-09-01 20:38:41 -07:00
#region File Manipulation
2016-09-22 17:14:23 -07:00
/// <summary>
/// Get the XmlTextReader associated with a file, if possible
/// </summary>
/// <param name="filename">Name of the file to be parsed</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>The XmlTextReader representing the (possibly converted) file, null otherwise</returns>
2016-09-28 12:27:39 -07:00
public static XmlReader GetXmlTextReader ( string filename , Logger logger )
2016-09-22 17:14:23 -07:00
{
2016-09-23 15:09:00 -07:00
logger . Verbose ( "Attempting to read file: \"" + filename + "\"" ) ;
2016-09-22 17:14:23 -07:00
// Check if file exists
if ( ! File . Exists ( filename ) )
{
logger . Warning ( "File '" + filename + "' could not read from!" ) ;
return null ;
}
2016-09-28 12:27:39 -07:00
XmlReader xtr = XmlReader . Create ( filename , new XmlReaderSettings {
CheckCharacters = false ,
DtdProcessing = DtdProcessing . Ignore ,
IgnoreComments = true ,
IgnoreWhitespace = true ,
ValidationFlags = XmlSchemaValidationFlags . None ,
ValidationType = ValidationType . None ,
} ) ;
2016-09-22 17:14:23 -07:00
return xtr ;
}
2016-09-01 20:38:41 -07:00
/// <summary>
/// Remove an arbitrary number of bytes from the inputted file
/// </summary>
/// <param name="input">File to be cropped</param>
/// <param name="output">Outputted file</param>
/// <param name="bytesToRemoveFromHead">Bytes to be removed from head of file</param>
/// <param name="bytesToRemoveFromTail">Bytes to be removed from tail of file</param>
public static void RemoveBytesFromFile ( string input , string output , long bytesToRemoveFromHead , long bytesToRemoveFromTail )
{
// If any of the inputs are invalid, skip
if ( ! File . Exists ( input ) | | new FileInfo ( input ) . Length < = ( bytesToRemoveFromHead + bytesToRemoveFromTail ) )
{
return ;
}
2016-09-22 16:16:48 -07:00
// Get the streams
FileStream fsr = File . OpenRead ( input ) ;
FileStream fsw = File . OpenWrite ( output ) ;
2016-09-01 20:38:41 -07:00
2016-09-22 16:16:48 -07:00
RemoveBytesFromStream ( fsr , fsw , bytesToRemoveFromHead , bytesToRemoveFromTail ) ;
2016-09-01 20:38:41 -07:00
2016-09-22 16:16:48 -07:00
fsr . Dispose ( ) ;
fsw . Dispose ( ) ;
2016-09-01 20:38:41 -07:00
}
/// <summary>
/// Add an aribtrary number of bytes to the inputted file
/// </summary>
/// <param name="input">File to be appended to</param>
/// <param name="output">Outputted file</param>
/// <param name="bytesToAddToHead">String representing bytes to be added to head of file</param>
/// <param name="bytesToAddToTail">String representing bytes to be added to tail of file</param>
public static void AppendBytesToFile ( string input , string output , string bytesToAddToHead , string bytesToAddToTail )
{
// Source: http://stackoverflow.com/questions/311165/how-do-you-convert-byte-array-to-hexadecimal-string-and-vice-versa
byte [ ] bytesToAddToHeadArray = new byte [ bytesToAddToHead . Length / 2 ] ;
for ( int i = 0 ; i < bytesToAddToHead . Length ; i + = 2 )
{
bytesToAddToHeadArray [ i / 2 ] = Convert . ToByte ( bytesToAddToHead . Substring ( i , 2 ) , 16 ) ;
}
byte [ ] bytesToAddToTailArray = new byte [ bytesToAddToTail . Length / 2 ] ;
for ( int i = 0 ; i < bytesToAddToTail . Length ; i + = 2 )
{
bytesToAddToTailArray [ i / 2 ] = Convert . ToByte ( bytesToAddToTail . Substring ( i , 2 ) , 16 ) ;
}
AppendBytesToFile ( input , output , bytesToAddToHeadArray , bytesToAddToTailArray ) ;
}
/// <summary>
/// Add an aribtrary number of bytes to the inputted file
/// </summary>
/// <param name="input">File to be appended to</param>
/// <param name="output">Outputted file</param>
/// <param name="bytesToAddToHead">Bytes to be added to head of file</param>
/// <param name="bytesToAddToTail">Bytes to be added to tail of file</param>
public static void AppendBytesToFile ( string input , string output , byte [ ] bytesToAddToHead , byte [ ] bytesToAddToTail )
{
// If any of the inputs are invalid, skip
if ( ! File . Exists ( input ) )
{
return ;
}
2016-09-22 16:16:48 -07:00
FileStream fsr = File . OpenRead ( input ) ;
FileStream fsw = File . OpenWrite ( output ) ;
2016-09-22 15:36:02 -07:00
2016-09-22 16:16:48 -07:00
AppendBytesToStream ( fsr , fsw , bytesToAddToHead , bytesToAddToTail ) ;
2016-09-22 15:36:02 -07:00
2016-09-22 16:16:48 -07:00
fsr . Dispose ( ) ;
fsw . Dispose ( ) ;
2016-09-01 20:38:41 -07:00
}
2016-09-22 20:42:34 -07:00
/// <summary>
/// Detect header skipper compliance and create an output file
/// </summary>
/// <param name="file">Name of the file to be parsed</param>
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>True if the output file was created, false otherwise</returns>
public static bool DetectSkipperAndTransform ( string file , string outDir , Logger logger )
{
// Create the output directory if it doesn't exist
if ( outDir ! = "" & & ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
}
logger . User ( "\nGetting skipper information for '" + file + "'" ) ;
2016-09-22 21:32:06 -07:00
// Get the skipper rule that matches the file, if any
2016-10-03 21:16:59 -07:00
SkipperRule rule = Skipper . GetMatchingRule ( file , "" , logger ) ;
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
// If we have an empty rule, return false
if ( rule . Tests = = null | | rule . Tests . Count = = 0 | | rule . Operation ! = HeaderSkipOperation . None )
2016-09-22 20:42:34 -07:00
{
2016-09-22 21:32:06 -07:00
return false ;
}
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
logger . User ( "File has a valid copier header" ) ;
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
// Get the header bytes from the file first
string hstr = string . Empty ;
BinaryReader br = new BinaryReader ( File . OpenRead ( file ) ) ;
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
// Extract the header as a string for the database
byte [ ] hbin = br . ReadBytes ( ( int ) rule . StartOffset ) ;
for ( int i = 0 ; i < ( int ) rule . StartOffset ; i + + )
{
hstr + = BitConverter . ToString ( new byte [ ] { hbin [ i ] } ) ;
}
br . Dispose ( ) ;
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
// Apply the rule to the file
string newfile = ( outDir = = "" ? Path . GetFullPath ( file ) + ".new" : Path . Combine ( outDir , Path . GetFileName ( file ) ) ) ;
2016-10-03 21:16:59 -07:00
rule . TransformFile ( file , newfile , logger ) ;
2016-09-22 20:42:34 -07:00
2016-09-22 21:32:06 -07:00
// If the output file doesn't exist, return false
if ( ! File . Exists ( newfile ) )
{
return false ;
2016-09-22 20:42:34 -07:00
}
2016-09-22 21:32:06 -07:00
// Now add the information to the database if it's not already there
2016-10-03 15:29:40 -07:00
Rom rom = GetFileInfo ( newfile , logger ) ;
2016-09-22 21:32:06 -07:00
DatabaseTools . AddHeaderToDatabase ( hstr , rom . SHA1 , rule . SourceFile , logger ) ;
2016-09-22 20:42:34 -07:00
return true ;
}
/// <summary>
/// Detect and replace header(s) to the given file
/// </summary>
/// <param name="file">Name of the file to be parsed</param>
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
/// <param name="logger">Logger object for console and file output</param>
/// <returns>True if a header was found and appended, false otherwise</returns>
public static bool RestoreHeader ( string file , string outDir , Logger logger )
{
// Create the output directory if it doesn't exist
if ( outDir ! = "" & & ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
}
bool success = true ;
// First, get the SHA-1 hash of the file
2016-10-03 15:29:40 -07:00
Rom rom = GetFileInfo ( file , logger ) ;
2016-09-22 20:42:34 -07:00
// Then try to pull the corresponding headers from the database
string header = "" ;
// Open the database connection
SqliteConnection dbc = new SqliteConnection ( Constants . HeadererConnectionString ) ;
dbc . Open ( ) ;
string query = @"SELECT header, type FROM data WHERE sha1='" + rom . SHA1 + "'" ;
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
int sub = 0 ;
while ( sldr . Read ( ) )
{
2016-09-23 15:09:00 -07:00
logger . Verbose ( "Found match with rom type " + sldr . GetString ( 1 ) ) ;
2016-09-22 20:42:34 -07:00
header = sldr . GetString ( 0 ) ;
logger . User ( "Creating reheadered file: " +
( outDir = = "" ? Path . GetFullPath ( file ) + ".new" : Path . Combine ( outDir , Path . GetFileName ( file ) ) ) + sub ) ;
FileTools . AppendBytesToFile ( file ,
( outDir = = "" ? Path . GetFullPath ( file ) + ".new" : Path . Combine ( outDir , Path . GetFileName ( file ) ) ) + sub , header , string . Empty ) ;
logger . User ( "Reheadered file created!" ) ;
}
}
else
{
logger . Warning ( "No matching header could be found!" ) ;
success = false ;
}
// Dispose of database objects
slc . Dispose ( ) ;
sldr . Dispose ( ) ;
dbc . Dispose ( ) ;
return success ;
}
2016-09-01 20:38:41 -07:00
/// <summary>
/// Cleans out the temporary directory
/// </summary>
/// <param name="dirname">Name of the directory to clean out</param>
public static void CleanDirectory ( string dirname )
{
foreach ( string file in Directory . EnumerateFiles ( dirname , "*" , SearchOption . TopDirectoryOnly ) )
{
try
{
File . Delete ( file ) ;
}
catch { }
}
foreach ( string dir in Directory . EnumerateDirectories ( dirname , "*" , SearchOption . TopDirectoryOnly ) )
{
try
{
Directory . Delete ( dir , true ) ;
}
catch { }
}
}
#endregion
2016-09-22 16:16:48 -07:00
2016-10-20 17:24:44 -07:00
#region Rebuilding and Verifying
2016-10-20 17:33:25 -07:00
/// <summary>
/// Process inputs and convert to TorrentZip or TorrentGZ, optionally converting to Romba format
/// </summary>
/// <param name="datFile">DatFile to use as a filter in conversion, null otherwise</param>
/// <param name="inputs">List of inputs to convert over to TorrentZip or TorrentGZ</param>
/// <param name="outDir">Output folder to rebuild to, blank is the current directory</param>
/// <param name="tempDir">Temporary directory to use in file extraction</param>
/// <param name="tgz">True if files should be output in TorrentGZ format, false for TorrentZip</param>
/// <param name="romba">True if TorrentGZ files should be output in romba depot format, false otherwise</param>
/// <param name="delete">True if input files should be deleted, false otherwise</param>
/// <param name="archiveScanLevel">ArchiveScanLevel representing how files should be treated</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if processing was a success, false otherwise</returns>
public static bool ConvertFiles ( DatFile datFile , List < string > inputs , string outDir , string tempDir , bool tgz ,
bool romba , bool delete , ArchiveScanLevel archiveScanLevel , Logger logger )
{
bool success = true ;
// First, check that the output directory exists
if ( ! Directory . Exists ( outDir ) )
{
Directory . CreateDirectory ( outDir ) ;
outDir = Path . GetFullPath ( outDir ) ;
}
// Then create or clean the temp directory
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
CleanDirectory ( tempDir ) ;
}
// Now process all of the inputs
foreach ( string input in inputs )
{
logger . User ( "Examining file " + input ) ;
// Get if the file should be scanned internally and externally
bool shouldExternalProcess , shouldInternalProcess ;
ArchiveTools . GetInternalExternalProcess ( input , archiveScanLevel , logger , out shouldExternalProcess , out shouldInternalProcess ) ;
// Do an external scan of the file, if necessary
if ( shouldExternalProcess )
{
// If a DAT is defined, we want to make sure that this file is not in there
Rom rom = FileTools . GetFileInfo ( input , logger ) ;
if ( datFile ! = null & & datFile . Files . Count > 0 )
{
if ( rom . HasDuplicates ( datFile , logger ) )
{
logger . User ( "File '" + input + "' existed in the DAT, skipping..." ) ;
continue ;
}
}
logger . User ( "Processing file " + input ) ;
if ( tgz )
{
success & = ArchiveTools . WriteTorrentGZ ( input , outDir , romba , logger ) ;
}
else
{
success & = ArchiveTools . WriteToArchive ( input , outDir , rom , logger ) ;
}
}
// Process the file as an archive, if necessary
if ( shouldInternalProcess )
{
// Now, if the file is a supported archive type, also run on all files within
bool encounteredErrors = ArchiveTools . ExtractArchive ( input , tempDir , archiveScanLevel , logger ) ;
// If no errors were encountered, we loop through the temp directory
if ( ! encounteredErrors )
{
logger . Verbose ( "Archive found! Successfully extracted" ) ;
foreach ( string file in Directory . EnumerateFiles ( tempDir , "*" , SearchOption . AllDirectories ) )
{
// If a DAT is defined, we want to make sure that this file is not in there
Rom rom = FileTools . GetFileInfo ( file , logger ) ;
if ( datFile ! = null & & datFile . Files . Count > 0 )
{
if ( rom . HasDuplicates ( datFile , logger ) )
{
logger . User ( "File '" + file + "' existed in the DAT, skipping..." ) ;
continue ;
}
}
logger . User ( "Processing file " + input ) ;
if ( tgz )
{
success & = ArchiveTools . WriteTorrentGZ ( file , outDir , romba , logger ) ;
}
else
{
success & = ArchiveTools . WriteToArchive ( file , outDir , rom , logger ) ;
}
}
FileTools . CleanDirectory ( tempDir ) ;
}
}
// Delete the source file if we're supposed to
if ( delete )
{
try
{
logger . User ( "Attempting to delete " + input ) ;
File . Delete ( input ) ;
}
catch ( Exception ex )
{
logger . Error ( ex . ToString ( ) ) ;
success & = false ;
}
}
}
// Now one final delete of the temp directory
while ( Directory . Exists ( tempDir ) )
{
try
{
Directory . Delete ( tempDir , true ) ;
}
catch
{
continue ;
}
}
// If we're in romba mode and the size file doesn't exist, create it
if ( romba & & ! File . Exists ( Path . Combine ( outDir , ".romba_size" ) ) )
{
// Get the size of all of the files in the output folder
long size = 0 ;
foreach ( string file in Directory . EnumerateFiles ( outDir , "*" , SearchOption . AllDirectories ) )
{
FileInfo tempinfo = new FileInfo ( file ) ;
size + = tempinfo . Length ;
}
// Write out the value to each of the romba depot files
StreamWriter tw = new StreamWriter ( File . Open ( Path . Combine ( outDir , ".romba_size" ) , FileMode . Create , FileAccess . Write ) ) ;
StreamWriter twb = new StreamWriter ( File . Open ( Path . Combine ( outDir , ".romba_size.backup" ) , FileMode . Create , FileAccess . Write ) ) ;
tw . Write ( size ) ;
twb . Write ( size ) ;
tw . Dispose ( ) ;
twb . Dispose ( ) ;
}
return success ;
}
2016-10-20 17:24:44 -07:00
/// <summary>
/// Process the DAT and verify the output directory
/// </summary>
/// <param name="datFile">DAT to use to verify the directory</param>
/// <param name="inputs">List of input directories to compare against</param>
/// <param name="tempDir">Temporary directory for archive extraction</param>
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
/// <param name="logger">Logger object for file and console output</param>
/// <returns>True if verification was a success, false otherwise</returns>
public static bool VerifyDirectory ( DatFile datFile , List < string > inputs , string tempDir , string headerToCheckAgainst , Logger logger )
{
// First create or clean the temp directory
if ( ! Directory . Exists ( tempDir ) )
{
Directory . CreateDirectory ( tempDir ) ;
}
else
{
CleanDirectory ( tempDir ) ;
}
bool success = true ;
/ *
We want the cross section of what ' s the folder and what ' s in the DAT . Right now , it just has what ' s in the DAT that ' s not in the folder
* /
// Then, loop through and check each of the inputs
logger . User ( "Processing files:\n" ) ;
foreach ( string input in inputs )
{
datFile . PopulateDatFromDir ( input , false /* noMD5 */ , false /* noSHA1 */ , true /* bare */ , false /* archivesAsFiles */ ,
true /* enableGzip */ , false /* addBlanks */ , false /* addDate */ , tempDir /* tempDir */ , false /* copyFiles */ ,
headerToCheckAgainst , 4 /* maxDegreeOfParallelism */ , logger ) ;
}
// Setup the fixdat
DatFile matched = ( DatFile ) datFile . CloneHeader ( ) ;
matched . Files = new SortedDictionary < string , List < DatItem > > ( ) ;
matched . FileName = "fixDat_" + matched . FileName ;
matched . Name = "fixDat_" + matched . Name ;
matched . Description = "fixDat_" + matched . Description ;
matched . OutputFormat = OutputFormat . Logiqx ;
// Now that all files are parsed, get only files found in directory
bool found = false ;
foreach ( List < DatItem > roms in datFile . Files . Values )
{
List < DatItem > newroms = DatItem . Merge ( roms , logger ) ;
foreach ( Rom rom in newroms )
{
if ( rom . SourceID = = 99 )
{
found = true ;
string key = rom . Size + "-" + rom . CRC ;
if ( matched . Files . ContainsKey ( key ) )
{
matched . Files [ key ] . Add ( rom ) ;
}
else
{
List < DatItem > temp = new List < DatItem > ( ) ;
temp . Add ( rom ) ;
matched . Files . Add ( key , temp ) ;
}
}
}
}
// Now output the fixdat to the main folder
if ( found )
{
matched . WriteToFile ( "" , logger , stats : true ) ;
}
else
{
logger . User ( "No fixDat needed" ) ;
}
return success ;
}
#endregion
2016-09-22 21:00:18 -07:00
#region Stream Information
/// <summary>
/// Retrieve file information for a single file
/// </summary>
/// <param name="input">Filename to get information from</param>
2016-10-03 15:05:07 -07:00
/// <param name="size">Size of the input stream</param>
2016-09-22 21:00:18 -07:00
/// <param name="noMD5">True if MD5 hashes should not be calculated, false otherwise (default)</param>
/// <param name="noSHA1">True if SHA-1 hashes should not be calcluated, false otherwise (default)</param>
/// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param>
/// <param name="keepReadOpen">True if the underlying read stream should be kept open, false otherwise</param>
/// <returns>Populated RomData object if success, empty one on error</returns>
2016-10-03 15:29:40 -07:00
public static Rom GetStreamInfo ( Stream input , long size , bool noMD5 = false , bool noSHA1 = false , long offset = 0 , bool keepReadOpen = false )
2016-09-22 21:00:18 -07:00
{
Rom rom = new Rom
{
Type = ItemType . Rom ,
2016-10-03 15:05:07 -07:00
Size = size ,
2016-09-22 21:00:18 -07:00
CRC = string . Empty ,
MD5 = string . Empty ,
SHA1 = string . Empty ,
} ;
try
{
// Initialize the hashers
OptimizedCRC crc = new OptimizedCRC ( ) ;
MD5 md5 = MD5 . Create ( ) ;
SHA1 sha1 = SHA1 . Create ( ) ;
// Seek to the starting position, if one is set
2016-10-03 15:25:09 -07:00
if ( offset < 0 )
{
input . Seek ( offset , SeekOrigin . End ) ;
}
else
{
input . Seek ( offset , SeekOrigin . Begin ) ;
}
2016-09-22 21:00:18 -07:00
2016-10-08 23:28:09 -07:00
byte [ ] buffer = new byte [ 8 * 1024 ] ;
2016-09-22 21:00:18 -07:00
int read ;
while ( ( read = input . Read ( buffer , 0 , buffer . Length ) ) > 0 )
{
crc . Update ( buffer , 0 , read ) ;
if ( ! noMD5 )
{
md5 . TransformBlock ( buffer , 0 , read , buffer , 0 ) ;
}
if ( ! noSHA1 )
{
sha1 . TransformBlock ( buffer , 0 , read , buffer , 0 ) ;
}
}
crc . Update ( buffer , 0 , 0 ) ;
rom . CRC = crc . Value . ToString ( "X8" ) . ToLowerInvariant ( ) ;
if ( ! noMD5 )
{
md5 . TransformFinalBlock ( buffer , 0 , 0 ) ;
rom . MD5 = BitConverter . ToString ( md5 . Hash ) . Replace ( "-" , "" ) . ToLowerInvariant ( ) ;
}
if ( ! noSHA1 )
{
sha1 . TransformFinalBlock ( buffer , 0 , 0 ) ;
rom . SHA1 = BitConverter . ToString ( sha1 . Hash ) . Replace ( "-" , "" ) . ToLowerInvariant ( ) ;
}
// Dispose of the hashers
crc . Dispose ( ) ;
md5 . Dispose ( ) ;
sha1 . Dispose ( ) ;
}
catch ( IOException )
{
return new Rom ( ) ;
}
finally
{
if ( ! keepReadOpen )
{
input . Dispose ( ) ;
}
}
return rom ;
}
#endregion
2016-09-22 16:16:48 -07:00
#region Stream Manipulation
// <summary>
/// Remove an arbitrary number of bytes from the inputted stream
/// </summary>
/// <param name="input">Stream to be cropped</param>
/// <param name="output">Stream to output to</param>
/// <param name="bytesToRemoveFromHead">Bytes to be removed from head of stream</param>
/// <param name="bytesToRemoveFromTail">Bytes to be removed from tail of stream</param>
public static void RemoveBytesFromStream ( Stream input , Stream output , long bytesToRemoveFromHead , long bytesToRemoveFromTail )
{
// Read the input file and write to the fail
BinaryReader br = new BinaryReader ( input ) ;
BinaryWriter bw = new BinaryWriter ( output ) ;
int bufferSize = 1024 ;
long adjustedLength = br . BaseStream . Length - bytesToRemoveFromTail ;
// Seek to the correct position
br . BaseStream . Seek ( ( bytesToRemoveFromHead < 0 ? 0 : bytesToRemoveFromHead ) , SeekOrigin . Begin ) ;
// Now read the file in chunks and write out
byte [ ] buffer = new byte [ bufferSize ] ;
while ( br . BaseStream . Position < = ( adjustedLength - bufferSize ) )
{
buffer = br . ReadBytes ( bufferSize ) ;
bw . Write ( buffer ) ;
}
// For the final chunk, if any, write out only that number of bytes
int length = ( int ) ( adjustedLength - br . BaseStream . Position ) ;
buffer = new byte [ length ] ;
buffer = br . ReadBytes ( length ) ;
bw . Write ( buffer ) ;
}
/// <summary>
/// Add an aribtrary number of bytes to the inputted stream
/// </summary>
/// <param name="input">Stream to be appended to</param>
/// <param name="output">Outputted stream</param>
/// <param name="bytesToAddToHead">String representing bytes to be added to head of stream</param>
/// <param name="bytesToAddToTail">String representing bytes to be added to tail of stream</param>
public static void AppendBytesToStream ( Stream input , Stream output , string bytesToAddToHead , string bytesToAddToTail )
{
// Source: http://stackoverflow.com/questions/311165/how-do-you-convert-byte-array-to-hexadecimal-string-and-vice-versa
byte [ ] bytesToAddToHeadArray = new byte [ bytesToAddToHead . Length / 2 ] ;
for ( int i = 0 ; i < bytesToAddToHead . Length ; i + = 2 )
{
bytesToAddToHeadArray [ i / 2 ] = Convert . ToByte ( bytesToAddToHead . Substring ( i , 2 ) , 16 ) ;
}
byte [ ] bytesToAddToTailArray = new byte [ bytesToAddToTail . Length / 2 ] ;
for ( int i = 0 ; i < bytesToAddToTail . Length ; i + = 2 )
{
bytesToAddToTailArray [ i / 2 ] = Convert . ToByte ( bytesToAddToTail . Substring ( i , 2 ) , 16 ) ;
}
AppendBytesToStream ( input , output , bytesToAddToHeadArray , bytesToAddToTailArray ) ;
}
/// <summary>
/// Add an aribtrary number of bytes to the inputted stream
/// </summary>
/// <param name="input">Stream to be appended to</param>
/// <param name="output">Outputted stream</param>
/// <param name="bytesToAddToHead">Bytes to be added to head of stream</param>
/// <param name="bytesToAddToTail">Bytes to be added to tail of stream</param>
public static void AppendBytesToStream ( Stream input , Stream output , byte [ ] bytesToAddToHead , byte [ ] bytesToAddToTail )
{
BinaryReader br = new BinaryReader ( input ) ;
BinaryWriter bw = new BinaryWriter ( output ) ;
if ( bytesToAddToHead . Count ( ) > 0 )
{
bw . Write ( bytesToAddToHead ) ;
}
int bufferSize = 1024 ;
// Now read the file in chunks and write out
byte [ ] buffer = new byte [ bufferSize ] ;
while ( br . BaseStream . Position < = ( br . BaseStream . Length - bufferSize ) )
{
buffer = br . ReadBytes ( bufferSize ) ;
bw . Write ( buffer ) ;
}
// For the final chunk, if any, write out only that number of bytes
int length = ( int ) ( br . BaseStream . Length - br . BaseStream . Position ) ;
buffer = new byte [ length ] ;
buffer = br . ReadBytes ( length ) ;
bw . Write ( buffer ) ;
if ( bytesToAddToTail . Count ( ) > 0 )
{
bw . Write ( bytesToAddToTail ) ;
}
}
#endregion
2016-06-13 20:57:49 -07:00
}
}