2016-10-24 12:58:57 -07:00
using System ;
2016-09-02 13:59:25 -07:00
using System.Collections.Generic ;
2016-10-10 10:51:19 -07:00
using System.Linq ;
2016-10-24 12:58:57 -07:00
using Mono.Data.Sqlite ;
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Data ;
2017-10-06 20:46:43 -07:00
using SabreTools.Library.DatFiles ;
2017-11-02 15:44:15 -07:00
using SabreTools.Library.DatItems ;
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Tools ;
2016-09-02 13:59:25 -07:00
2016-10-28 21:49:29 -07:00
#if MONO
2016-10-27 11:35:17 -07:00
using System.IO ;
#else
2016-10-26 22:10:47 -07:00
using Alphaleonis.Win32.Filesystem ;
2018-02-08 10:47:38 -08:00
using StreamWriter = System . IO . StreamWriter ;
2016-10-27 11:35:17 -07:00
#endif
2016-10-26 22:10:47 -07:00
2017-02-02 15:08:21 -08:00
namespace RombaSharp
2016-09-02 13:59:25 -07:00
{
public partial class RombaSharp
{
#region Init Methods
2016-09-02 14:08:34 -07:00
/// <summary>
/// Wrap adding files to the depots
/// </summary>
2016-10-10 10:51:19 -07:00
/// <param name="inputs">List of input folders to use</param>
/// <param name="onlyNeeded">True if only files in the database and don't exist are added, false otherwise</param>
2018-01-27 16:24:24 -08:00
/// <param name="resume">Resume a previously interrupted operation from the specified path</param>
/// <param name="includeZips">flag value == 1 means: add Zip files themselves into the depot in addition to their contents, flag value > 1 means add Zip files themselves but don't add content</param>
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="includeGZips">flag value == 1 means: add GZip files themselves into the depot in addition to their contents, flag value > 1 means add GZip files themselves but don't add content</param>
/// <param name="include7Zips">flag value == 1 means: add 7Zip files themselves into the depot in addition to their contents, flag value > 1 means add 7Zip files themselves but don't add content</param>
/// <param name="skipInitialScan">True to skip the initial scan of the files to determine amount of work, false otherwise</param>
/// <param name="useGolangZip">True to use go zip implementation instead of zlib, false otherwise</param>
/// <param name="noDb">True to archive into depot but do not touch DB index and ignore only-needed flag, false otherwise</param>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitArchive (
List < string > inputs ,
bool onlyNeeded ,
string resume ,
int includeZips ,
int workers ,
int includeGZips ,
int include7Zips ,
bool skipInitialScan ,
bool useGolangZip ,
bool noDb )
2016-09-02 13:59:25 -07:00
{
2016-10-10 10:51:19 -07:00
// First we want to get just all directories from the inputs
List < string > onlyDirs = new List < string > ( ) ;
foreach ( string input in inputs )
{
if ( Directory . Exists ( input ) )
{
onlyDirs . Add ( Path . GetFullPath ( input ) ) ;
}
}
// Then process all of the input directories into an internal DAT
DatFile df = new DatFile ( ) ;
foreach ( string dir in onlyDirs )
{
2017-03-17 15:48:49 -07:00
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
2017-10-31 16:48:08 -07:00
df . PopulateFromDir ( dir , Hash . DeepHashes , false , false , SkipFileType . None , false , false , _tmpdir , false , null , true ) ;
2018-01-27 16:58:30 -08:00
df . PopulateFromDir ( dir , Hash . DeepHashes , false , true , SkipFileType . None , false , false , _tmpdir , false , null , true ) ;
2016-10-10 10:51:19 -07:00
}
// Create an empty Dat for files that need to be rebuilt
DatFile need = new DatFile ( ) ;
2016-10-17 11:04:07 -07:00
// Open the database connection
2016-10-10 10:51:19 -07:00
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
2016-10-10 13:14:35 -07:00
dbc . Open ( ) ;
// Now that we have the Dats, add the files to the database
2016-10-17 11:04:07 -07:00
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES" ;
string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES" ;
2016-10-17 11:13:43 -07:00
string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES" ;
2016-10-17 11:04:07 -07:00
string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" ;
string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" ;
2016-11-08 15:50:27 -08:00
foreach ( string key in df . Keys )
2016-10-10 10:51:19 -07:00
{
2016-11-08 15:50:27 -08:00
List < DatItem > datItems = df [ key ] ;
2016-10-10 10:51:19 -07:00
foreach ( Rom rom in datItems )
{
2016-10-17 11:04:07 -07:00
// If we care about if the file exists, check the databse first
if ( onlyNeeded )
2016-10-10 10:51:19 -07:00
{
2016-10-17 11:04:07 -07:00
string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
2017-01-27 16:53:29 -08:00
+ " WHERE crcsha1.crc=\"" + rom . CRC + "\""
+ " OR md5sha1.md5=\"" + rom . MD5 + "\""
+ " OR md5sha1.sha1=\"" + rom . SHA1 + "\"" ;
2016-10-17 11:04:07 -07:00
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
// Add to the queries
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
crcquery + = " (\"" + rom . CRC + "\")," ;
2016-10-17 11:04:07 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
md5query + = " (\"" + rom . MD5 + "\")," ;
2016-10-17 11:04:07 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . SHA1 ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
sha1query + = " (\"" + rom . SHA1 + "\", \"" + _depots . Keys . ToList ( ) [ 0 ] + "\")," ;
2016-10-14 16:58:15 -07:00
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
crcsha1query + = " (\"" + rom . CRC + "\", \"" + rom . SHA1 + "\")," ;
2016-10-17 11:04:07 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
md5sha1query + = " (\"" + rom . MD5 + "\", \"" + rom . SHA1 + "\")," ;
2016-10-17 11:04:07 -07:00
}
}
2016-10-14 16:58:15 -07:00
2016-10-17 11:04:07 -07:00
// Add to the Dat
2016-11-08 15:29:52 -08:00
need . Add ( key , rom ) ;
2016-10-17 11:04:07 -07:00
}
}
// Otherwise, just add the file to the list
else
{
// Add to the queries
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
2016-10-10 10:51:19 -07:00
{
2017-01-27 16:53:29 -08:00
crcquery + = " (\"" + rom . CRC + "\")," ;
2016-10-14 16:58:15 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
2016-10-14 16:58:15 -07:00
{
2017-01-27 16:53:29 -08:00
md5query + = " (\"" + rom . MD5 + "\")," ;
2016-10-17 11:04:07 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . SHA1 ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
sha1query + = " (\"" + rom . SHA1 + "\", \"" + _depots . Keys . ToList ( ) [ 0 ] + "\")," ;
2016-10-17 11:13:43 -07:00
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
crcsha1query + = " (\"" + rom . CRC + "\", \"" + rom . SHA1 + "\")," ;
2016-10-17 11:04:07 -07:00
}
2017-11-08 13:15:44 -08:00
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
2016-10-17 11:04:07 -07:00
{
2017-01-27 16:53:29 -08:00
md5sha1query + = " (\"" + rom . MD5 + "\", \"" + rom . SHA1 + "\")," ;
2016-10-17 11:04:07 -07:00
}
2016-10-10 10:51:19 -07:00
}
2016-10-17 11:04:07 -07:00
// Add to the Dat
2016-11-08 15:29:52 -08:00
need . Add ( key , rom ) ;
2016-10-10 10:51:19 -07:00
}
}
}
2016-10-17 11:04:07 -07:00
// Now run the queries, if they're populated
if ( crcquery ! = "INSERT OR IGNORE INTO crc (crc) VALUES" )
{
SqliteCommand slc = new SqliteCommand ( crcquery . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
if ( md5query ! = "INSERT OR IGNORE INTO md5 (md5) VALUES" )
{
SqliteCommand slc = new SqliteCommand ( md5query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
2017-11-16 23:31:08 -08:00
if ( sha1query ! = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES" )
2016-10-17 11:04:07 -07:00
{
SqliteCommand slc = new SqliteCommand ( sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
if ( crcsha1query ! = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" )
{
SqliteCommand slc = new SqliteCommand ( crcsha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
if ( md5sha1query ! = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" )
{
SqliteCommand slc = new SqliteCommand ( md5sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
2016-10-10 10:51:19 -07:00
// Create the sorting object to use and rebuild the needed files
2018-02-08 01:21:45 -08:00
ArchiveScanLevel asl = Utilities . GetArchiveScanLevelFromNumbers ( 2 , 2 , 2 , 2 ) ;
2017-10-31 16:48:08 -07:00
need . RebuildGeneric ( onlyDirs , _depots . Keys . ToList ( ) [ 0 ] , false /*quickScan*/ , false /*date*/ ,
2017-01-18 10:20:00 -08:00
false /*delete*/ , false /*inverse*/ , OutputFormat . TorrentGzip , true /*romba*/ , asl , false /*updateDat*/ ,
2017-10-31 21:22:05 -07:00
null /*headerToCheckAgainst*/ , true /* chdsAsFiles */ ) ;
2016-09-02 13:59:25 -07:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
/// Wrap building all files from a set of DATs
/// </summary>
2016-10-12 15:40:06 -07:00
/// <param name="inputs">List of input DATs to rebuild from</param>
2018-01-27 16:24:24 -08:00
/// <param name="outdat">Output file</param>
/// <paran name="fixdatOnly">True to only fix dats and don't generate torrentzips, false otherwise</paran>
2016-10-12 15:40:06 -07:00
/// <param name="copy">True if files should be copied to output, false for rebuild</param>
2018-01-27 16:24:24 -08:00
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="subworkers">How many subworkers to launch for each worker, default from config</param>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitBuild (
List < string > inputs ,
string outdat ,
bool fixdatOnly ,
bool copy ,
int workers ,
int subworkers )
2016-09-02 13:59:25 -07:00
{
2016-09-08 17:42:53 -07:00
// Verify the filenames
Dictionary < string , string > foundDats = GetValidDats ( inputs ) ;
2016-10-12 14:40:21 -07:00
// Create a base output folder
if ( ! Directory . Exists ( "out" ) )
{
Directory . CreateDirectory ( "out" ) ;
}
2016-09-08 17:42:53 -07:00
// Now that we have the dictionary, we can loop through and output to a new folder for each
2016-10-12 14:40:21 -07:00
foreach ( string key in foundDats . Keys )
{
// Get the DAT file associated with the key
DatFile datFile = new DatFile ( ) ;
2017-03-01 21:26:27 -08:00
datFile . Parse ( Path . Combine ( _dats , foundDats [ key ] ) , 0 , 0 ) ;
2016-10-12 14:40:21 -07:00
// Create the new output directory if it doesn't exist
string outputFolder = Path . Combine ( "out" , Path . GetFileNameWithoutExtension ( foundDats [ key ] ) ) ;
if ( ! Directory . Exists ( outputFolder ) )
{
Directory . CreateDirectory ( outputFolder ) ;
}
2016-10-17 11:04:07 -07:00
// Get all online depots
List < string > onlineDepots = _depots . Where ( d = > d . Value . Item2 ) . Select ( d = > d . Key ) . ToList ( ) ;
2016-10-14 16:58:15 -07:00
2016-10-17 11:04:07 -07:00
// Now scan all of those depots and rebuild
2017-11-08 00:27:00 -08:00
ArchiveScanLevel asl = Utilities . GetArchiveScanLevelFromNumbers ( 1 , 1 , 1 , 1 ) ;
2017-10-31 16:48:08 -07:00
datFile . RebuildDepot ( onlineDepots , outputFolder , false /*date*/ ,
2017-01-30 12:59:04 -08:00
false /*delete*/ , false /*inverse*/ , ( copy ? OutputFormat . TorrentGzip : OutputFormat . TorrentZip ) , copy ,
2017-03-01 21:26:27 -08:00
false /*updateDat*/ , null /*headerToCheckAgainst*/ ) ;
2016-10-12 14:40:21 -07:00
}
2016-09-02 13:59:25 -07:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
2018-01-27 16:24:24 -08:00
/// Wrap cancelling a long-running job
2016-09-02 14:08:34 -07:00
/// </summary>
2018-01-27 16:24:24 -08:00
private static void InitCancel ( )
2016-09-02 13:59:25 -07:00
{
2018-01-27 16:32:46 -08:00
Globals . Logger . User ( "This feature is obsolete: cancel" ) ;
2018-01-27 16:24:24 -08:00
}
2016-09-08 17:42:53 -07:00
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap printing dat stats
/// </summary>
/// <param name="inputs">List of input DATs to get stats from</param>
private static void InitDatStats ( List < string > inputs )
{
2018-01-27 16:58:30 -08:00
// If we have no inputs listed, we want to use datroot
if ( inputs = = null | | inputs . Count = = 0 )
{
inputs = new List < string > ( ) ;
inputs . Add ( Path . GetFullPath ( _dats ) ) ;
}
// Now output the stats for all inputs
DatFile . OutputStats ( inputs , "rombasharp-datstats" , null /* outDir */ , true /* single */ , true /* baddumpCol */ , true /* nodumpCol */ , StatReportFormat . Textfile ) ;
2016-09-02 13:59:25 -07:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
2018-01-27 16:24:24 -08:00
/// Wrap printing db stats
2016-09-02 14:08:34 -07:00
/// </summary>
2018-01-27 16:24:24 -08:00
private static void InitDbStats ( )
2016-09-02 13:59:25 -07:00
{
2018-01-27 17:39:33 -08:00
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
dbc . Open ( ) ;
// Total number of CRCs
string query = "SELECT COUNT(*) FROM crc" ;
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
Globals . Logger . User ( "Total CRCs: {0}" , ( long ) slc . ExecuteScalar ( ) ) ;
// Total number of MD5s
query = "SELECT COUNT(*) FROM md5" ;
slc = new SqliteCommand ( query , dbc ) ;
Globals . Logger . User ( "Total MD5s: {0}" , ( long ) slc . ExecuteScalar ( ) ) ;
// Total number of SHA1s
query = "SELECT COUNT(*) FROM sha1" ;
slc = new SqliteCommand ( query , dbc ) ;
Globals . Logger . User ( "Total SHA1s: {0}" , ( long ) slc . ExecuteScalar ( ) ) ;
// Total number of DATs
query = "SELECT COUNT(*) FROM dat" ;
slc = new SqliteCommand ( query , dbc ) ;
Globals . Logger . User ( "Total DATs: {0}" , ( long ) slc . ExecuteScalar ( ) ) ;
slc . Dispose ( ) ;
dbc . Dispose ( ) ;
2018-01-27 16:24:24 -08:00
}
2016-09-02 14:13:44 -07:00
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap creating a diffdat for a given old and new dat
/// </summary>
/// <param name="outdat">Output file</param>
/// <param name="old">Old DAT file</param>
/// <param name="newdat">New DAT file</param>
/// <param name="name">Name value in DAT header</param>
/// <param name="description">Description value in DAT header</param>
private static void InitDiffDat (
string outdat ,
string old ,
string newdat ,
string name ,
string description )
{
2018-01-27 17:06:22 -08:00
// Ensure the output directory
Utilities . EnsureOutputDirectory ( outdat , create : true ) ;
// Check that all required files exist
if ( ! File . Exists ( old ) )
{
Globals . Logger . Error ( "File '{0}' does not exist!" , old ) ;
return ;
}
if ( ! File . Exists ( newdat ) )
{
Globals . Logger . Error ( "File '{0}' does not exist!" , newdat ) ;
return ;
}
// Create the encapsulating datfile
DatFile datfile = new DatFile ( )
{
Name = name ,
Description = description ,
} ;
// Create the inputs
List < string > dats = new List < string > ( ) ;
dats . Add ( newdat ) ;
List < string > basedats = new List < string > ( ) ;
basedats . Add ( old ) ;
// Now run the diff on the inputs
datfile . DetermineUpdateType ( dats , basedats , outdat , UpdateMode . DiffAgainst , false /* inplace */ , false /* skip */ ,
true /* bare */ , false /* clean */ , false /* remUnicode */ , false /* descAsName */ , new Filter ( ) , SplitType . None ,
ReplaceMode . None , false /* onlySame */ ) ;
2016-09-02 13:59:25 -07:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
2018-01-27 16:24:24 -08:00
/// Wrap creating a dir2dat from a given source
2016-09-02 14:08:34 -07:00
/// </summary>
2018-01-27 16:24:24 -08:00
/// <param name="outdat">Output file</param>
/// <param name="source">Source directory</param>
/// <param name="name">Name value in DAT header</param>
/// <param name="description">Description value in DAT header</param>
private static void InitDir2Dat (
string outdat ,
string source ,
string name ,
string description )
2016-09-02 13:59:25 -07:00
{
2018-01-27 17:14:39 -08:00
// Ensure the output directory
Utilities . EnsureOutputDirectory ( outdat , create : true ) ;
// Check that all required directories exist
if ( ! Directory . Exists ( source ) )
{
Globals . Logger . Error ( "File '{0}' does not exist!" , source ) ;
return ;
}
// Create the encapsulating datfile
DatFile datfile = new DatFile ( )
{
Name = ( String . IsNullOrWhiteSpace ( name ) ? "untitled" : name ) ,
Description = description ,
} ;
// Now run the D2D on the input and write out
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datfile . PopulateFromDir ( source , Hash . DeepHashes , true /* bare */ , false /* archivesAsFiles */ , SkipFileType . None , false /* addBlanks */ ,
false /* addDate */ , _tmpdir , false /* copyFiles */ , null /* headerToCheckAgainst */ , true /* chdsAsFiles */ ) ;
datfile . Write ( outDir : outdat ) ;
2018-01-27 16:24:24 -08:00
}
2016-09-08 17:42:53 -07:00
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap creating a diffdat for a given old and new dat
/// </summary>
/// <param name="outdat">Output file</param>
/// <param name="old">Old DAT file</param>
/// <param name="newdat">New DAT file</param>
private static void InitEDiffDat (
string outdat ,
string old ,
string newdat )
{
2018-01-27 17:06:22 -08:00
// Ensure the output directory
Utilities . EnsureOutputDirectory ( outdat , create : true ) ;
// Check that all required files exist
if ( ! File . Exists ( old ) )
{
Globals . Logger . Error ( "File '{0}' does not exist!" , old ) ;
return ;
}
if ( ! File . Exists ( newdat ) )
{
Globals . Logger . Error ( "File '{0}' does not exist!" , newdat ) ;
return ;
}
// Create the encapsulating datfile
DatFile datfile = new DatFile ( ) ;
// Create the inputs
List < string > dats = new List < string > ( ) ;
dats . Add ( newdat ) ;
List < string > basedats = new List < string > ( ) ;
basedats . Add ( old ) ;
// Now run the diff on the inputs
datfile . DetermineUpdateType ( dats , basedats , outdat , UpdateMode . DiffAgainst , false /* inplace */ , false /* skip */ ,
true /* bare */ , false /* clean */ , false /* remUnicode */ , false /* descAsName */ , new Filter ( ) , SplitType . None ,
ReplaceMode . None , false /* onlySame */ ) ;
2018-01-27 16:24:24 -08:00
}
/// <summary>
/// Wrap exporting the database to CSV
/// </summary>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitExport ( )
{
2018-01-27 17:39:33 -08:00
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
dbc . Open ( ) ;
StreamWriter sw = new StreamWriter ( Utilities . TryCreate ( "export.csv" ) ) ;
sw . WriteLine ( "\"ID\",\"Size\",\"CRC\",\"MD5\",\"SHA-1\",\"In Depot\",\"DAT Hash\"" ) ;
string query = "SELECT dats.id, size, crc, md5, sha1, indepot, hash FROM data JOIN dats ON data.id=dats.id" ;
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
while ( sldr . Read ( ) )
{
string line = "\"" + sldr . GetInt32 ( 0 ) + "\","
+ "\"" + sldr . GetInt64 ( 1 ) + "\","
+ "\"" + sldr . GetString ( 2 ) + "\","
+ "\"" + sldr . GetString ( 3 ) + "\","
+ "\"" + sldr . GetString ( 4 ) + "\","
+ "\"" + sldr . GetInt32 ( 5 ) + "\","
+ "\"" + sldr . GetString ( 6 ) + "\"" ;
sw . WriteLine ( line ) ;
}
}
sldr . Dispose ( ) ;
slc . Dispose ( ) ;
sw . Dispose ( ) ;
dbc . Dispose ( ) ;
2018-01-27 16:24:24 -08:00
}
2016-09-08 17:52:02 -07:00
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap creating a fixdat for each Dat
/// </summary>
/// <param name="inputs">List of input DATs to get fixdats for</param>
/// <param name="outdat">Output directory</param>
/// <paran name="fixdatOnly">True to only fix dats and don't generate torrentzips, false otherwise</paran>
/// <param name = "workers" > How many workers to launch for the job, default from config</param>
/// <param name="subworkers">How many subworkers to launch for each worker, default from config</param>
2018-01-27 16:37:32 -08:00
/// TODO: Implement
2018-01-27 16:24:24 -08:00
private static void InitFixdat (
List < string > inputs ,
string outdat ,
bool fixdatOnly ,
int workers ,
int subworkers )
{
Globals . Logger . Error ( "This feature is not yet implemented: fixdat" ) ;
2016-09-02 13:59:25 -07:00
}
2017-02-02 22:06:20 -08:00
/// <summary>
/// Wrap importing CSVs into the database
/// </summary>
2018-01-27 16:24:24 -08:00
/// <param name="inputs">List of input CSV files to import information from</param>
2018-01-27 16:37:32 -08:00
/// TODO: Implement
2017-02-02 22:06:20 -08:00
private static void InitImport ( List < string > inputs )
{
2018-01-27 16:24:24 -08:00
Globals . Logger . Error ( "This feature is not yet implemented: import" ) ;
2017-02-02 22:06:20 -08:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
/// Wrap looking up if hashes exist in the database
/// </summary>
2016-10-10 10:51:19 -07:00
/// <param name="inputs">List of input strings representing hashes to check for</param>
2018-01-27 16:24:24 -08:00
/// <param name="size">Size to limit hash by, -1 otherwise</param>
/// <param name="outdat">Output directory</param>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitLookup (
List < string > inputs ,
long size ,
string outdat )
2016-09-02 13:59:25 -07:00
{
2018-01-27 16:24:24 -08:00
Globals . Logger . Error ( "This feature is not yet implemented: lookup" ) ;
2016-10-09 22:30:19 -07:00
// First, try to figure out what type of hash each is by length and clean it
2016-10-10 10:51:19 -07:00
List < string > crc = new List < string > ( ) ;
List < string > md5 = new List < string > ( ) ;
List < string > sha1 = new List < string > ( ) ;
2016-10-09 22:30:19 -07:00
foreach ( string input in inputs )
{
2017-01-27 16:53:29 -08:00
string temp = "" ;
2016-10-09 22:30:19 -07:00
if ( input . Length = = Constants . CRCLength )
{
2017-11-08 00:27:00 -08:00
temp = Utilities . CleanHashData ( input , Constants . CRCLength ) ;
2017-12-14 13:22:22 -08:00
if ( ! String . IsNullOrWhiteSpace ( temp ) )
2016-10-10 10:51:19 -07:00
{
crc . Add ( temp ) ;
}
2016-10-09 22:30:19 -07:00
}
else if ( input . Length = = Constants . MD5Length )
{
2017-11-08 00:27:00 -08:00
temp = Utilities . CleanHashData ( input , Constants . MD5Length ) ;
2017-12-14 13:22:22 -08:00
if ( ! String . IsNullOrWhiteSpace ( temp ) )
2016-10-10 10:51:19 -07:00
{
md5 . Add ( temp ) ;
}
2016-10-09 22:30:19 -07:00
}
else if ( input . Length = = Constants . SHA1Length )
{
2017-11-08 00:27:00 -08:00
temp = Utilities . CleanHashData ( input , Constants . SHA1Length ) ;
2017-12-14 13:22:22 -08:00
if ( ! String . IsNullOrWhiteSpace ( temp ) )
2016-10-10 10:51:19 -07:00
{
sha1 . Add ( temp ) ;
}
2016-10-09 22:30:19 -07:00
}
}
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
2016-10-10 13:14:35 -07:00
dbc . Open ( ) ;
// Now, search for each of them and return true or false for each
2016-10-10 10:51:19 -07:00
foreach ( string input in crc )
{
2017-01-27 16:53:29 -08:00
string query = "SELECT * FROM crc WHERE crc=\"" + input + "\"" ;
2016-10-10 10:51:19 -07:00
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "For hash '{0}' there were {1} matches in the database" , input , sldr . RecordsAffected ) ;
2016-10-10 10:51:19 -07:00
}
else
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "Hash '{0}' had no matches in the database" , input ) ;
2016-10-10 10:51:19 -07:00
}
2016-10-09 22:30:19 -07:00
2016-10-10 10:51:19 -07:00
sldr . Dispose ( ) ;
slc . Dispose ( ) ;
}
foreach ( string input in md5 )
{
2017-01-27 16:53:29 -08:00
string query = "SELECT * FROM md5 WHERE md5=\"" + input + "\"" ;
2016-10-10 10:51:19 -07:00
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "For hash '{0}' there were {1} matches in the database" , input , sldr . RecordsAffected ) ;
2016-10-10 10:51:19 -07:00
}
else
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "Hash '{0}' had no matches in the database" , input ) ;
2016-10-10 10:51:19 -07:00
}
sldr . Dispose ( ) ;
slc . Dispose ( ) ;
}
foreach ( string input in sha1 )
2016-10-09 22:30:19 -07:00
{
2017-01-27 16:53:29 -08:00
string query = "SELECT * FROM sha1 WHERE sha1=\"" + input + "\"" ;
2016-10-09 22:30:19 -07:00
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "For hash '{0}' there were {1} matches in the database" , input , sldr . RecordsAffected ) ;
2016-10-09 22:30:19 -07:00
}
else
{
2017-08-26 14:11:10 -07:00
Globals . Logger . User ( "Hash '{0}' had no matches in the database" , input ) ;
2016-10-09 22:30:19 -07:00
}
sldr . Dispose ( ) ;
slc . Dispose ( ) ;
}
2016-09-08 17:52:02 -07:00
2016-10-09 22:30:19 -07:00
dbc . Dispose ( ) ;
2016-09-02 13:59:25 -07:00
}
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap printing memory stats
/// </summary>
private static void InitMemstats ( )
{
2018-01-27 16:32:46 -08:00
Globals . Logger . User ( "This feature is obsolete: cancel" ) ;
2018-01-27 16:24:24 -08:00
}
2017-02-02 21:57:34 -08:00
/// <summary>
/// Wrap merging an external depot into an existing one
/// </summary>
2018-01-27 16:24:24 -08:00
/// <param name="inputs">List of input depots to merge in</param>
/// <param name="onlyNeeded">True if only files in the database and don't exist are added, false otherwise</param>
/// <param name="resume">Resume a previously interrupted operation from the specified path</param>
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="skipInitialScan">True to skip the initial scan of the files to determine amount of work, false otherwise</param>
/// TODO: Add way of specifying "current depot" since that's what Romba relies on
2018-01-27 16:37:32 -08:00
/// TODO: Implement
2018-01-27 16:24:24 -08:00
private static void InitMerge (
List < string > inputs ,
bool onlyNeeded ,
string resume ,
int workers ,
bool skipInitialscan )
2017-02-02 21:57:34 -08:00
{
2018-01-27 16:24:24 -08:00
Globals . Logger . Error ( "This feature is not yet implemented: merge" ) ;
2017-02-02 21:57:34 -08:00
}
2016-09-02 14:08:34 -07:00
/// <summary>
/// Wrap creating a havefile and a missfile for each Dat
/// </summary>
2018-01-27 16:37:32 -08:00
/// <param name="inputs">List of DAT files to get a miss and have for, empty means all</param>
/// TODO: Implement
2016-09-02 13:59:25 -07:00
private static void InitMiss ( List < string > inputs )
{
2018-01-27 16:24:24 -08:00
Globals . Logger . Error ( "This feature is not yet implemented: miss" ) ;
}
2016-09-08 17:42:53 -07:00
2018-01-27 16:24:24 -08:00
/// <summary>
/// Wrap showing progress of currently running command
/// </summary>
private static void InitProgress ( )
{
2018-01-27 16:32:46 -08:00
Globals . Logger . User ( "This feature is obsolete: progress" ) ;
2018-01-27 16:24:24 -08:00
}
/// <summary>
/// Wrap backing up of no longer needed files from the depots
/// </summary>
/// <param name="backup">Backup directory where backup files are moved to</param>
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="depot">List of depots to scan files in, empty means all</param>
/// <param name="dats">List of DATs to use as the basis of scanning, empty means all</param>
/// <param name="logOnly">True if only the output of the operation is shown, false to actually run</param>
2018-01-27 16:41:12 -08:00
/// TODO: Implement
2018-01-27 16:24:24 -08:00
private static void InitPurgeBackup (
string backup ,
int workers ,
List < string > depot ,
List < string > dats ,
bool logOnly )
{
Globals . Logger . Error ( "This feature is not yet implemented: purge-backup" ) ;
}
/// <summary>
/// Wrap deleting of no longer needed files from the depots
/// </summary>
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="depot">List of depots to scan files in, empty means all</param>
/// <param name="dats">List of DATs to use as the basis of scanning, empty means all</param>
/// <param name="logOnly">True if only the output of the operation is shown, false to actually run</param>
2018-01-27 16:41:12 -08:00
/// TODO: Implement
2018-01-27 16:24:24 -08:00
private static void InitPurgeDelete (
int workers ,
List < string > depot ,
List < string > dats ,
bool logOnly )
{
Globals . Logger . Error ( "This feature is not yet implemented: purge-delete" ) ;
}
/// <summary>
/// Wrap refreshing the database with potentially new dats
/// </summary>
/// <param name="workers">How many workers to launch for the job, default from config</param>
/// <param name="missingSha1s">Write paths of dats with missing sha1s into this file</param>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitRefreshDats (
int workers ,
string missingSha1s )
{
2018-01-27 17:39:33 -08:00
// Make sure the db is set
if ( String . IsNullOrWhiteSpace ( _db ) )
{
_db = "db.sqlite" ;
_connectionString = "Data Source=" + _db + ";Version = 3;" ;
}
// Make sure the file exists
if ( ! File . Exists ( _db ) )
{
DatabaseTools . EnsureDatabase ( _dbSchema , _db , _connectionString ) ;
}
// Make sure the dats dir is set
if ( String . IsNullOrWhiteSpace ( _dats ) )
{
_dats = "dats" ;
}
2018-02-08 01:03:38 -08:00
_dats = Path . Combine ( Globals . ExeDir , _dats ) ;
2018-01-27 17:39:33 -08:00
// Make sure the folder exists
if ( ! Directory . Exists ( _dats ) )
{
Directory . CreateDirectory ( _dats ) ;
}
// First get a list of SHA-1's from the input DATs
DatFile datroot = new DatFile { Type = "SuperDAT" , } ;
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datroot . PopulateFromDir ( _dats , Hash . DeepHashes , false , false , SkipFileType . None , false , false , _tmpdir , false , null , true ) ;
datroot . BucketBy ( SortedBy . SHA1 , DedupeType . None ) ;
// Create a List of dat hashes in the database (SHA-1)
List < string > databaseDats = new List < string > ( ) ;
List < string > unneeded = new List < string > ( ) ;
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
dbc . Open ( ) ;
// Populate the List from the database
InternalStopwatch watch = new InternalStopwatch ( "Populating the list of existing DATs" ) ;
string query = "SELECT DISTINCT hash FROM dat" ;
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
sldr . Read ( ) ;
string hash = sldr . GetString ( 0 ) ;
if ( datroot . Contains ( hash ) )
{
datroot . Remove ( hash ) ;
databaseDats . Add ( hash ) ;
}
else if ( ! databaseDats . Contains ( hash ) )
{
unneeded . Add ( hash ) ;
}
}
datroot . BucketBy ( SortedBy . Game , DedupeType . None , norename : true ) ;
watch . Stop ( ) ;
slc . Dispose ( ) ;
sldr . Dispose ( ) ;
// Loop through the Dictionary and add all data
watch . Start ( "Adding new DAT information" ) ;
foreach ( string key in datroot . Keys )
{
foreach ( Rom value in datroot [ key ] )
{
AddDatToDatabase ( value , dbc ) ;
}
}
watch . Stop ( ) ;
// Now loop through and remove all references to old Dats
watch . Start ( "Removing unmatched DAT information" ) ;
foreach ( string dathash in unneeded )
{
2018-02-08 01:26:57 -08:00
query = "DELETE FROM dat WHERE hash=\"" + dathash + "\"" ;
2018-01-27 17:39:33 -08:00
slc = new SqliteCommand ( query , dbc ) ;
slc . ExecuteNonQuery ( ) ;
slc . Dispose ( ) ;
}
watch . Stop ( ) ;
dbc . Dispose ( ) ;
2018-01-27 16:24:24 -08:00
}
/// <summary>
/// Wrap rescanning depots
/// </summary>
/// <param name="inputs">List of depots to rescan, empty means all</param>
2018-01-27 16:37:32 -08:00
/// TODO: Verify implementation
2018-01-27 16:24:24 -08:00
private static void InitRescanDepots ( List < string > inputs )
{
Globals . Logger . Error ( "This feature is not yet implemented: rescan-depots" ) ;
2018-01-27 17:39:33 -08:00
foreach ( string depotname in inputs )
2018-01-27 16:24:24 -08:00
{
2018-01-27 17:39:33 -08:00
// Check that it's a valid depot first
if ( ! _depots . ContainsKey ( depotname ) )
{
Globals . Logger . User ( "'{0}' is not a recognized depot. Please add it to your configuration file and try again" , depotname ) ;
return ;
}
// Then check that the depot is online
if ( ! Directory . Exists ( depotname ) )
{
Globals . Logger . User ( "'{0}' does not appear to be online. Please check its status and try again" , depotname ) ;
return ;
}
// Open the database connection
SqliteConnection dbc = new SqliteConnection ( _connectionString ) ;
dbc . Open ( ) ;
// If we have it, then check for all hashes that are in that depot
List < string > hashes = new List < string > ( ) ;
string query = "SELECT sha1 FROM sha1 WHERE depot=\"" + depotname + "\"" ;
SqliteCommand slc = new SqliteCommand ( query , dbc ) ;
SqliteDataReader sldr = slc . ExecuteReader ( ) ;
if ( sldr . HasRows )
{
while ( sldr . Read ( ) )
{
hashes . Add ( sldr . GetString ( 0 ) ) ;
}
}
// Now rescan the depot itself
DatFile depot = new DatFile ( ) ;
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
depot . PopulateFromDir ( depotname , Hash . DeepHashes , false , false , SkipFileType . None , false , false , _tmpdir , false , null , true ) ;
depot . BucketBy ( SortedBy . SHA1 , DedupeType . None ) ;
// Set the base queries to use
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES" ;
string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES" ;
string sha1query = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES" ;
string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" ;
string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" ;
// Once we have both, check for any new files
List < string > dupehashes = new List < string > ( ) ;
List < string > keys = depot . Keys ;
foreach ( string key in keys )
{
List < DatItem > roms = depot [ key ] ;
foreach ( Rom rom in roms )
{
if ( hashes . Contains ( rom . SHA1 ) )
{
dupehashes . Add ( rom . SHA1 ) ;
hashes . Remove ( rom . SHA1 ) ;
}
else if ( ! dupehashes . Contains ( rom . SHA1 ) )
{
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
{
crcquery + = " (\"" + rom . CRC + "\")," ;
}
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
{
md5query + = " (\"" + rom . MD5 + "\")," ;
}
if ( ! String . IsNullOrWhiteSpace ( rom . SHA1 ) )
{
sha1query + = " (\"" + rom . SHA1 + "\", \"" + depotname + "\")," ;
if ( ! String . IsNullOrWhiteSpace ( rom . CRC ) )
{
crcsha1query + = " (\"" + rom . CRC + "\", \"" + rom . SHA1 + "\")," ;
}
if ( ! String . IsNullOrWhiteSpace ( rom . MD5 ) )
{
md5sha1query + = " (\"" + rom . MD5 + "\", \"" + rom . SHA1 + "\")," ;
}
}
}
}
}
// Now run the queries after fixing them
if ( crcquery ! = "INSERT OR IGNORE INTO crc (crc) VALUES" )
{
slc = new SqliteCommand ( crcquery . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
if ( md5query ! = "INSERT OR IGNORE INTO md5 (md5) VALUES" )
{
slc = new SqliteCommand ( md5query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
if ( sha1query ! = "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES" )
{
slc = new SqliteCommand ( sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
if ( crcsha1query ! = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" )
{
slc = new SqliteCommand ( crcsha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
if ( md5sha1query ! = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" )
{
slc = new SqliteCommand ( md5sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
// Now that we've added the information, we get to remove all of the hashes that we want to
query = @ "DELETE FROM sha1
JOIN crcsha1
ON sha1 . sha1 = crcsha1 . sha1
JOIN md5sha1
ON sha1 . sha1 = md5sha1 . sha1
JOIN crc
ON crcsha1 . crc = crc . crc
JOIN md5
ON md5sha1 . md5 = md5 . md5
WHERE sha1 . sha1 IN ( "" " + String.Join(" \ ",\"" , hashes ) + "\")" ;
slc = new SqliteCommand ( query , dbc ) ;
slc . ExecuteNonQuery ( ) ;
// Dispose of the database connection
slc . Dispose ( ) ;
dbc . Dispose ( ) ;
2018-01-27 16:24:24 -08:00
}
}
/// <summary>
/// Wrap gracefully shutting down the server
/// </summary>
private static void InitShutdown ( )
{
2018-01-27 16:32:46 -08:00
Globals . Logger . User ( "This feature is obsolete: shutdown" ) ;
2018-01-27 16:24:24 -08:00
}
/// <summary>
/// Wrap printing the version
/// </summary>
private static void InitVersion ( )
{
2018-01-27 16:32:46 -08:00
Globals . Logger . User ( "RombaSharp version: {0}" , Constants . Version ) ;
2016-09-02 13:59:25 -07:00
}
#endregion
}
}