2016-11-04 11:57:32 -07:00
using System ;
using System.Collections.Generic ;
using System.Linq ;
2017-03-18 21:26:50 -07:00
using System.Threading.Tasks ;
2016-11-04 11:57:32 -07:00
using System.Web ;
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Data ;
using SabreTools.Library.Tools ;
2016-11-04 11:57:32 -07:00
#if MONO
using System.IO ;
#else
using Alphaleonis.Win32.Filesystem ;
using SearchOption = System . IO . SearchOption ;
using StreamWriter = System . IO . StreamWriter ;
#endif
2017-05-04 02:41:11 -07:00
namespace SabreTools.Library.Dats
2016-11-04 11:57:32 -07:00
{
2017-02-27 23:00:57 -08:00
/ *
* TODO : Make output standard width ( HTML , without making the entire thing a table )
* TODO : Multithreading ? Either StringBuilder or locking
* /
2016-11-04 11:57:32 -07:00
public partial class DatFile
{
#region Instance Methods
2017-03-18 00:04:59 -07:00
#region Statistics
2016-11-04 11:57:32 -07:00
2017-06-13 13:18:41 -07:00
/// <summary>
/// Add to the internal statistics given a DatItem
/// </summary>
/// <param name="item">Item to add info from</param>
private void AddItemStatistics ( DatItem item )
{
// No matter what the item is, we increate the count
lock ( _statslock )
{
_count + = 1 ;
// Now we do different things for each item type
switch ( item . Type )
{
case ItemType . Archive :
break ;
case ItemType . BiosSet :
break ;
case ItemType . Disk :
_diskCount + = 1 ;
if ( ( ( Disk ) item ) . ItemStatus ! = ItemStatus . Nodump )
{
_md5Count + = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . MD5 ) ? 0 : 1 ) ;
_sha1Count + = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA1 ) ? 0 : 1 ) ;
_sha256Count + = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA256 ) ? 0 : 1 ) ;
_sha384Count + = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA384 ) ? 0 : 1 ) ;
_sha512Count + = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA512 ) ? 0 : 1 ) ;
}
_baddumpCount + = ( ( ( Disk ) item ) . ItemStatus = = ItemStatus . BadDump ? 1 : 0 ) ;
_nodumpCount + = ( ( ( Disk ) item ) . ItemStatus = = ItemStatus . Nodump ? 1 : 0 ) ;
break ;
case ItemType . Release :
break ;
case ItemType . Rom :
_romCount + = 1 ;
if ( ( ( Rom ) item ) . ItemStatus ! = ItemStatus . Nodump )
{
_totalSize + = ( ( Rom ) item ) . Size ;
_crcCount + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . CRC ) ? 0 : 1 ) ;
_md5Count + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . MD5 ) ? 0 : 1 ) ;
_sha1Count + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA1 ) ? 0 : 1 ) ;
_sha256Count + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA256 ) ? 0 : 1 ) ;
_sha384Count + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA384 ) ? 0 : 1 ) ;
_sha512Count + = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA512 ) ? 0 : 1 ) ;
}
_baddumpCount + = ( ( ( Rom ) item ) . ItemStatus = = ItemStatus . BadDump ? 1 : 0 ) ;
_nodumpCount + = ( ( ( Rom ) item ) . ItemStatus = = ItemStatus . Nodump ? 1 : 0 ) ;
break ;
case ItemType . Sample :
break ;
}
}
}
/// <summary>
/// Remove from the internal statistics given a DatItem
/// </summary>
/// <param name="item">Item to remove info for</param>
private void RemoveItemStatistics ( DatItem item )
{
// No matter what the item is, we increate the count
lock ( _statslock )
{
_count - = 1 ;
// Now we do different things for each item type
switch ( item . Type )
{
case ItemType . Archive :
break ;
case ItemType . BiosSet :
break ;
case ItemType . Disk :
_diskCount - = 1 ;
if ( ( ( Disk ) item ) . ItemStatus ! = ItemStatus . Nodump )
{
_md5Count - = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . MD5 ) ? 0 : 1 ) ;
_sha1Count - = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA1 ) ? 0 : 1 ) ;
_sha256Count - = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA256 ) ? 0 : 1 ) ;
_sha384Count - = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA384 ) ? 0 : 1 ) ;
_sha512Count - = ( String . IsNullOrEmpty ( ( ( Disk ) item ) . SHA512 ) ? 0 : 1 ) ;
}
_baddumpCount - = ( ( ( Disk ) item ) . ItemStatus = = ItemStatus . BadDump ? 1 : 0 ) ;
_nodumpCount - = ( ( ( Disk ) item ) . ItemStatus = = ItemStatus . Nodump ? 1 : 0 ) ;
break ;
case ItemType . Release :
break ;
case ItemType . Rom :
_romCount - = 1 ;
if ( ( ( Rom ) item ) . ItemStatus ! = ItemStatus . Nodump )
{
_totalSize - = ( ( Rom ) item ) . Size ;
_crcCount - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . CRC ) ? 0 : 1 ) ;
_md5Count - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . MD5 ) ? 0 : 1 ) ;
_sha1Count - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA1 ) ? 0 : 1 ) ;
_sha256Count - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA256 ) ? 0 : 1 ) ;
_sha384Count - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA384 ) ? 0 : 1 ) ;
_sha512Count - = ( String . IsNullOrEmpty ( ( ( Rom ) item ) . SHA512 ) ? 0 : 1 ) ;
}
_baddumpCount - = ( ( ( Rom ) item ) . ItemStatus = = ItemStatus . BadDump ? 1 : 0 ) ;
_nodumpCount - = ( ( ( Rom ) item ) . ItemStatus = = ItemStatus . Nodump ? 1 : 0 ) ;
break ;
case ItemType . Sample :
break ;
}
}
}
/// <summary>
/// Reset all statistics
/// </summary>
private void ResetStatistics ( )
{
_count = 0 ;
_romCount = 0 ;
_diskCount = 0 ;
_totalSize = 0 ;
_crcCount = 0 ;
_md5Count = 0 ;
_sha1Count = 0 ;
_sha256Count = 0 ;
_sha384Count = 0 ;
_sha512Count = 0 ;
_baddumpCount = 0 ;
_nodumpCount = 0 ;
}
2016-11-04 11:57:32 -07:00
/// <summary>
/// Recalculate the statistics for the Dat
/// </summary>
public void RecalculateStats ( )
{
// Wipe out any stats already there
2017-06-13 13:18:41 -07:00
ResetStatistics ( ) ;
2016-11-04 11:57:32 -07:00
// If we have a blank Dat in any way, return
2016-11-08 15:50:27 -08:00
if ( this = = null | | Count = = 0 )
2016-11-04 11:57:32 -07:00
{
return ;
}
// Loop through and add
2017-03-18 21:26:50 -07:00
List < string > keys = Keys . ToList ( ) ;
2017-07-13 17:03:38 -07:00
Parallel . ForEach ( keys , key = >
2016-11-04 11:57:32 -07:00
{
2017-01-26 20:59:11 -08:00
List < DatItem > items = this [ key ] ;
2017-06-14 10:17:28 -07:00
foreach ( DatItem item in items )
2016-11-04 11:57:32 -07:00
{
2017-06-13 13:18:41 -07:00
AddItemStatistics ( item ) ;
2017-06-14 10:17:28 -07:00
}
2017-03-18 21:26:50 -07:00
} ) ;
2016-11-04 11:57:32 -07:00
}
/// <summary>
/// Output the stats for the Dat in a human-readable format
/// </summary>
2016-12-05 11:43:48 -08:00
/// <param name="outputs">Dictionary representing the outputs</param>
2016-11-04 11:57:32 -07:00
/// <param name="statDatFormat">Set the statistics output format to use</param>
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
2017-03-01 21:26:27 -08:00
public void OutputStats ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat ,
2017-03-01 20:36:22 -08:00
bool recalculate = false , long game = - 1 , bool baddumpCol = false , bool nodumpCol = false )
2016-11-04 11:57:32 -07:00
{
// If we're supposed to recalculate the statistics, do so
if ( recalculate )
{
RecalculateStats ( ) ;
}
2017-03-01 21:26:27 -08:00
BucketBy ( SortedBy . Game , false /* mergeroms */ , norename : true ) ;
2017-06-13 13:18:41 -07:00
if ( _totalSize < 0 )
2016-11-04 11:57:32 -07:00
{
2017-06-13 13:18:41 -07:00
_totalSize = Int64 . MaxValue + _totalSize ;
2016-11-04 11:57:32 -07:00
}
// Log the results to screen
2017-06-13 13:18:41 -07:00
string results = @"For '" + _fileName + @ "':
2016-11-04 11:57:32 -07:00
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2017-06-13 13:18:41 -07:00
Uncompressed size : " + Style.GetBytesReadable(_totalSize) + @"
2017-03-29 11:19:48 -07:00
Games found : " + (game == -1 ? Keys.Count() : game) + @"
2017-06-13 13:18:41 -07:00
Roms found : " + _romCount + @"
Disks found : " + _diskCount + @"
Roms with CRC : " + _crcCount + @"
Roms with MD5 : " + _md5Count + @"
Roms with SHA - 1 : " + _sha1Count + @"
Roms with SHA - 256 : " + _sha256Count + @"
Roms with SHA - 384 : " + _sha384Count + @"
Roms with SHA - 512 : " + _sha512Count + " \ n ";
2016-11-04 11:57:32 -07:00
if ( baddumpCol )
{
2017-06-13 13:18:41 -07:00
results + = " Roms with BadDump status: " + _baddumpCount + "\n" ;
2016-11-04 11:57:32 -07:00
}
if ( nodumpCol )
{
2017-06-13 13:18:41 -07:00
results + = " Roms with Nodump status: " + _nodumpCount + "\n" ;
2016-11-04 11:57:32 -07:00
}
2017-03-29 15:02:48 -07:00
// For spacing between DATs
results + = "\n\n" ;
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( results ) ;
2016-11-04 11:57:32 -07:00
// Now write it out to file as well
2017-01-27 16:53:29 -08:00
string line = "" ;
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
2016-11-04 11:57:32 -07:00
{
2017-06-13 13:18:41 -07:00
line = @"'" + _fileName + @ "':
2016-12-05 11:43:48 -08:00
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2017-06-13 13:18:41 -07:00
Uncompressed size : " + Style.GetBytesReadable(_totalSize) + @"
2017-03-29 11:19:48 -07:00
Games found : " + (game == -1 ? Keys.Count() : game) + @"
2017-06-13 13:18:41 -07:00
Roms found : " + _romCount + @"
Disks found : " + _diskCount + @"
Roms with CRC : " + _crcCount + @"
Roms with SHA - 1 : " + _sha1Count + @"
Roms with SHA - 256 : " + _sha256Count + @"
Roms with SHA - 384 : " + _sha384Count + @"
Roms with SHA - 512 : " + _sha512Count + " \ n ";
2016-12-05 11:43:48 -08:00
if ( baddumpCol )
{
2017-06-13 13:18:41 -07:00
line + = " Roms with BadDump status: " + _baddumpCount + "\n" ;
2016-12-05 11:43:48 -08:00
}
if ( nodumpCol )
{
2017-06-13 13:18:41 -07:00
line + = " Roms with Nodump status: " + _nodumpCount + "\n" ;
2016-12-05 11:43:48 -08:00
}
2017-03-29 15:02:48 -07:00
// For spacing between DATs
line + = "\n\n" ;
2016-12-05 11:43:48 -08:00
outputs [ StatDatFormat . None ] . Write ( line ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
2017-06-13 13:18:41 -07:00
line = "\"" + _fileName + "\","
+ "\"" + _totalSize + "\","
2017-03-29 11:19:48 -07:00
+ "\"" + ( game = = - 1 ? Keys . Count ( ) : game ) + "\","
2017-06-13 13:18:41 -07:00
+ "\"" + _romCount + "\","
+ "\"" + _diskCount + "\","
+ "\"" + _crcCount + "\","
+ "\"" + _md5Count + "\","
+ "\"" + _sha1Count + "\","
+ "\"" + _sha256Count + "\","
+ "\"" + _sha384Count + "\","
+ "\"" + _sha512Count + "\"" ;
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
if ( baddumpCol )
{
2017-06-13 13:18:41 -07:00
line + = ",\"" + _baddumpCount + "\"" ;
2016-12-05 11:43:48 -08:00
}
if ( nodumpCol )
{
2017-06-13 13:18:41 -07:00
line + = ",\"" + _nodumpCount + "\"" ;
2016-12-05 11:43:48 -08:00
}
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
line + = "\n" ;
outputs [ StatDatFormat . CSV ] . Write ( line ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
{
2017-06-13 13:18:41 -07:00
line = "\t\t\t<tr" + ( _fileName . StartsWith ( "DIR: " )
? " class=\"dir\"><td>" + HttpUtility . HtmlEncode ( _fileName . Remove ( 0 , 5 ) )
: "><td>" + HttpUtility . HtmlEncode ( _fileName ) ) + "</td>"
+ "<td align=\"right\">" + Style . GetBytesReadable ( _totalSize ) + "</td>"
2017-03-29 11:19:48 -07:00
+ "<td align=\"right\">" + ( game = = - 1 ? Keys . Count ( ) : game ) + "</td>"
2017-06-13 13:18:41 -07:00
+ "<td align=\"right\">" + _romCount + "</td>"
+ "<td align=\"right\">" + _diskCount + "</td>"
+ "<td align=\"right\">" + _crcCount + "</td>"
+ "<td align=\"right\">" + _md5Count + "</td>"
+ "<td align=\"right\">" + _sha1Count + "</td>"
+ "<td align=\"right\">" + _sha256Count + "</td>" ;
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
if ( baddumpCol )
{
2017-06-13 13:18:41 -07:00
line + = "<td align=\"right\">" + _baddumpCount + "</td>" ;
2016-12-05 11:43:48 -08:00
}
if ( nodumpCol )
{
2017-06-13 13:18:41 -07:00
line + = "<td align=\"right\">" + _nodumpCount + "</td>" ;
2016-12-05 11:43:48 -08:00
}
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
line + = "</tr>\n" ;
outputs [ StatDatFormat . HTML ] . Write ( line ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
2017-06-13 13:18:41 -07:00
line = "\"" + _fileName + "\"\t"
+ "\"" + _totalSize + "\"\t"
2017-03-29 11:19:48 -07:00
+ "\"" + ( game = = - 1 ? Keys . Count ( ) : game ) + "\"\t"
2017-06-13 13:18:41 -07:00
+ "\"" + _romCount + "\"\t"
+ "\"" + _diskCount + "\"\t"
+ "\"" + _crcCount + "\"\t"
+ "\"" + _md5Count + "\"\t"
+ "\"" + _sha1Count + "\"\t"
+ "\"" + _sha256Count + "\"\t"
+ "\"" + _sha384Count + "\"\t"
+ "\"" + _sha512Count + "\"" ;
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
if ( baddumpCol )
{
2017-06-13 13:18:41 -07:00
line + = "\t\"" + _baddumpCount + "\"" ;
2016-12-05 11:43:48 -08:00
}
if ( nodumpCol )
{
2017-06-13 13:18:41 -07:00
line + = "\t\"" + _nodumpCount + "\"" ;
2016-12-05 11:43:48 -08:00
}
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
line + = "\n" ;
outputs [ StatDatFormat . TSV ] . Write ( line ) ;
2016-11-04 11:57:32 -07:00
}
}
#endregion
#endregion // Instance Methods
#region Static Methods
2017-03-18 00:04:59 -07:00
#region Statistics
2016-11-04 11:57:32 -07:00
/// <summary>
/// Output the stats for a list of input dats as files in a human-readable format
/// </summary>
/// <param name="inputs">List of input files and folders</param>
/// <param name="reportName">Name of the output file</param>
/// <param name="single">True if single DAT stats are output, false otherwise</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
/// <param name="statDatFormat" > Set the statistics output format to use</param>
2016-11-10 10:42:51 -08:00
public static void OutputStats ( List < string > inputs , string reportName , string outDir , bool single ,
2017-03-01 21:26:27 -08:00
bool baddumpCol , bool nodumpCol , StatDatFormat statDatFormat )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
// If there's no output format, set the default
if ( statDatFormat = = 0x0 )
{
statDatFormat = StatDatFormat . None ;
}
2016-11-10 10:42:51 -08:00
// Get the proper output file name
if ( String . IsNullOrEmpty ( reportName ) )
{
reportName = "report" ;
}
outDir = Path . GetFullPath ( outDir ) ;
2016-12-05 11:43:48 -08:00
// Get the dictionary of desired outputs
Dictionary < StatDatFormat , StreamWriter > outputs = OutputStatsGetOutputWriters ( statDatFormat , reportName , outDir ) ;
2016-11-04 11:57:32 -07:00
// Make sure we have all files
List < Tuple < string , string > > newinputs = new List < Tuple < string , string > > ( ) ; // item, basepath
2017-07-13 17:03:38 -07:00
Parallel . ForEach ( inputs , input = >
2016-11-04 11:57:32 -07:00
{
if ( File . Exists ( input ) )
{
2017-03-18 21:39:43 -07:00
lock ( newinputs )
{
newinputs . Add ( Tuple . Create ( Path . GetFullPath ( input ) , Path . GetDirectoryName ( Path . GetFullPath ( input ) ) ) ) ;
}
2016-11-04 11:57:32 -07:00
}
if ( Directory . Exists ( input ) )
{
foreach ( string file in Directory . GetFiles ( input , "*" , SearchOption . AllDirectories ) )
{
2017-03-18 21:39:43 -07:00
lock ( newinputs )
{
newinputs . Add ( Tuple . Create ( Path . GetFullPath ( file ) , Path . GetFullPath ( input ) ) ) ;
}
2016-11-04 11:57:32 -07:00
}
}
2017-03-18 21:39:43 -07:00
} ) ;
2016-11-04 11:57:32 -07:00
newinputs = newinputs
. OrderBy ( i = > Path . GetDirectoryName ( i . Item1 ) )
. ThenBy ( i = > Path . GetFileName ( i . Item1 ) )
. ToList ( ) ;
// Write the header, if any
2016-12-05 11:43:48 -08:00
OutputStatsWriteHeader ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Init all total variables
long totalSize = 0 ;
long totalGame = 0 ;
long totalRom = 0 ;
long totalDisk = 0 ;
long totalCRC = 0 ;
long totalMD5 = 0 ;
long totalSHA1 = 0 ;
2017-02-23 14:23:41 -08:00
long totalSHA256 = 0 ;
2017-06-13 13:18:41 -07:00
long totalSHA384 = 0 ;
long totalSHA512 = 0 ;
2016-11-04 11:57:32 -07:00
long totalBaddump = 0 ;
long totalNodump = 0 ;
// Init directory-level variables
string lastdir = null ;
string basepath = null ;
long dirSize = 0 ;
long dirGame = 0 ;
long dirRom = 0 ;
long dirDisk = 0 ;
long dirCRC = 0 ;
long dirMD5 = 0 ;
long dirSHA1 = 0 ;
2017-02-23 14:23:41 -08:00
long dirSHA256 = 0 ;
2017-06-13 13:18:41 -07:00
long dirSHA384 = 0 ;
long dirSHA512 = 0 ;
2016-11-04 11:57:32 -07:00
long dirBaddump = 0 ;
long dirNodump = 0 ;
// Now process each of the input files
foreach ( Tuple < string , string > filename in newinputs )
{
// Get the directory for the current file
string thisdir = Path . GetDirectoryName ( filename . Item1 ) ;
basepath = Path . GetDirectoryName ( filename . Item2 ) ;
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
if ( lastdir ! = null & & thisdir ! = lastdir )
{
// Output separator if needed
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidSeparator ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
DatFile lastdirdat = new DatFile
{
2017-06-13 13:18:41 -07:00
_fileName = "DIR: " + HttpUtility . HtmlEncode ( lastdir . Remove ( 0 , basepath . Length + ( basepath . Length = = 0 ? 0 : 1 ) ) ) ,
_totalSize = dirSize ,
_romCount = dirRom ,
_diskCount = dirDisk ,
_crcCount = dirCRC ,
_md5Count = dirMD5 ,
_sha1Count = dirSHA1 ,
_sha256Count = dirSHA256 ,
_sha384Count = dirSHA384 ,
_sha512Count = dirSHA512 ,
_baddumpCount = dirBaddump ,
_nodumpCount = dirNodump ,
2016-11-04 11:57:32 -07:00
} ;
2017-03-01 21:26:27 -08:00
lastdirdat . OutputStats ( outputs , statDatFormat ,
2017-03-01 20:36:22 -08:00
game : dirGame , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Write the mid-footer, if any
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidFooter ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Write the header, if any
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidHeader ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Reset the directory stats
dirSize = 0 ;
dirGame = 0 ;
dirRom = 0 ;
dirDisk = 0 ;
dirCRC = 0 ;
dirMD5 = 0 ;
dirSHA1 = 0 ;
2017-02-23 14:23:41 -08:00
dirSHA256 = 0 ;
2017-06-13 13:18:41 -07:00
dirSHA384 = 0 ;
dirSHA512 = 0 ;
2016-11-04 11:57:32 -07:00
dirBaddump = 0 ;
dirNodump = 0 ;
}
2017-03-01 21:26:27 -08:00
Globals . Logger . Verbose ( "Beginning stat collection for '" + filename . Item1 + "'" , false ) ;
2016-11-04 11:57:32 -07:00
List < string > games = new List < string > ( ) ;
DatFile datdata = new DatFile ( ) ;
2017-03-01 21:26:27 -08:00
datdata . Parse ( filename . Item1 , 0 , 0 ) ;
datdata . BucketBy ( SortedBy . Game , false /* mergeroms */ , norename : true ) ;
2016-11-04 11:57:32 -07:00
// Output single DAT stats (if asked)
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( "Adding stats for file '" + filename . Item1 + "'\n" , false ) ;
2016-11-04 11:57:32 -07:00
if ( single )
{
2017-03-01 21:26:27 -08:00
datdata . OutputStats ( outputs , statDatFormat ,
2017-03-01 20:36:22 -08:00
baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
2016-11-04 11:57:32 -07:00
}
// Add single DAT stats to dir
dirSize + = datdata . TotalSize ;
2017-04-07 13:31:53 -07:00
dirGame + = datdata . Keys . Count ( ) ;
2016-11-04 11:57:32 -07:00
dirRom + = datdata . RomCount ;
dirDisk + = datdata . DiskCount ;
dirCRC + = datdata . CRCCount ;
dirMD5 + = datdata . MD5Count ;
dirSHA1 + = datdata . SHA1Count ;
2017-02-23 14:23:41 -08:00
dirSHA256 + = datdata . SHA256Count ;
2017-06-13 13:18:41 -07:00
dirSHA384 + = datdata . SHA384Count ;
dirSHA512 + = datdata . SHA512Count ;
2016-11-04 11:57:32 -07:00
dirBaddump + = datdata . BaddumpCount ;
dirNodump + = datdata . NodumpCount ;
// Add single DAT stats to totals
totalSize + = datdata . TotalSize ;
2017-04-07 13:31:53 -07:00
totalGame + = datdata . Keys . Count ( ) ;
2016-11-04 11:57:32 -07:00
totalRom + = datdata . RomCount ;
totalDisk + = datdata . DiskCount ;
totalCRC + = datdata . CRCCount ;
totalMD5 + = datdata . MD5Count ;
totalSHA1 + = datdata . SHA1Count ;
2017-02-23 14:23:41 -08:00
totalSHA256 + = datdata . SHA256Count ;
2017-06-13 13:18:41 -07:00
totalSHA384 + = datdata . SHA384Count ;
totalSHA512 + = datdata . SHA512Count ;
2016-11-04 11:57:32 -07:00
totalBaddump + = datdata . BaddumpCount ;
totalNodump + = datdata . NodumpCount ;
// Make sure to assign the new directory
lastdir = thisdir ;
}
// Output the directory stats one last time
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidSeparator ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
if ( single )
{
DatFile dirdat = new DatFile
{
2017-06-13 13:18:41 -07:00
_fileName = "DIR: " + HttpUtility . HtmlEncode ( lastdir . Remove ( 0 , basepath . Length + ( basepath . Length = = 0 ? 0 : 1 ) ) ) ,
_totalSize = dirSize ,
_romCount = dirRom ,
_diskCount = dirDisk ,
_crcCount = dirCRC ,
_md5Count = dirMD5 ,
_sha1Count = dirSHA1 ,
_sha256Count = dirSHA256 ,
_sha384Count = dirSHA384 ,
_sha512Count = dirSHA512 ,
_baddumpCount = dirBaddump ,
_nodumpCount = dirNodump ,
2016-11-04 11:57:32 -07:00
} ;
2017-03-01 21:26:27 -08:00
dirdat . OutputStats ( outputs , statDatFormat ,
2017-03-01 20:36:22 -08:00
game : dirGame , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
2016-11-04 11:57:32 -07:00
}
// Write the mid-footer, if any
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidFooter ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Write the header, if any
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidHeader ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Reset the directory stats
dirSize = 0 ;
dirGame = 0 ;
dirRom = 0 ;
dirDisk = 0 ;
dirCRC = 0 ;
dirMD5 = 0 ;
dirSHA1 = 0 ;
2017-02-23 14:23:41 -08:00
dirSHA256 = 0 ;
2017-06-13 13:18:41 -07:00
dirSHA384 = 0 ;
dirSHA512 = 0 ;
2016-11-04 11:57:32 -07:00
dirNodump = 0 ;
// Output total DAT stats
DatFile totaldata = new DatFile
{
2017-06-13 13:18:41 -07:00
_fileName = "DIR: All DATs" ,
_totalSize = totalSize ,
_romCount = totalRom ,
_diskCount = totalDisk ,
_crcCount = totalCRC ,
_md5Count = totalMD5 ,
_sha1Count = totalSHA1 ,
_sha256Count = totalSHA256 ,
_sha384Count = totalSHA384 ,
_sha512Count = totalSHA512 ,
_baddumpCount = totalBaddump ,
_nodumpCount = totalNodump ,
2016-11-04 11:57:32 -07:00
} ;
2017-03-01 21:26:27 -08:00
totaldata . OutputStats ( outputs , statDatFormat ,
2017-03-01 20:36:22 -08:00
game : totalGame , baddumpCol : baddumpCol , nodumpCol : nodumpCol ) ;
2016-11-04 11:57:32 -07:00
// Output footer if needed
2016-12-05 11:43:48 -08:00
OutputStatsWriteFooter ( outputs , statDatFormat ) ;
2016-11-04 11:57:32 -07:00
2016-12-05 11:43:48 -08:00
// Flush and dispose of the stream writers
foreach ( StatDatFormat format in outputs . Keys )
{
outputs [ format ] . Flush ( ) ;
outputs [ format ] . Dispose ( ) ;
}
2016-11-04 11:57:32 -07:00
2017-03-01 21:26:27 -08:00
Globals . Logger . User ( @ "
2016-11-04 11:57:32 -07:00
Please check the log folder if the stats scrolled offscreen ", false);
}
/// <summary>
/// Get the proper extension for the stat output format
/// </summary>
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
2016-12-05 11:43:48 -08:00
/// <param name="reportName">Name of the input file to use</param>
/// <param name="outDir">Output path to use</param>
/// <returns>Dictionary of file types to StreamWriters</returns>
private static Dictionary < StatDatFormat , StreamWriter > OutputStatsGetOutputWriters ( StatDatFormat statDatFormat , string reportName , string outDir )
2016-11-04 11:57:32 -07:00
{
2017-03-29 14:53:51 -07:00
Dictionary < StatDatFormat , StreamWriter > output = new Dictionary < StatDatFormat , StreamWriter > ( ) ;
// First try to create the output directory if we need to
if ( ! Directory . Exists ( outDir ) )
{
2017-03-29 14:55:46 -07:00
Directory . CreateDirectory ( outDir ) ;
2017-03-29 14:53:51 -07:00
}
2016-12-05 11:43:48 -08:00
// For each output format, get the appropriate stream writer
if ( ( statDatFormat & StatDatFormat . None ) ! = 0 )
{
reportName = Style . GetFileNameWithoutExtension ( reportName ) + ".txt" ;
2017-03-29 14:53:51 -07:00
reportName = Path . Combine ( outDir , reportName ) ;
2016-12-05 11:43:48 -08:00
// Create the StreamWriter for this file
2017-03-15 20:07:28 -07:00
output . Add ( StatDatFormat . None , new StreamWriter ( FileTools . TryCreate ( reportName ) ) ) ;
2016-12-05 11:43:48 -08:00
}
if ( ( statDatFormat & StatDatFormat . CSV ) ! = 0 )
{
reportName = Style . GetFileNameWithoutExtension ( reportName ) + ".csv" ;
2017-03-29 14:53:51 -07:00
reportName = Path . Combine ( outDir , reportName ) ;
2016-12-05 11:43:48 -08:00
// Create the StreamWriter for this file
2017-03-15 20:07:28 -07:00
output . Add ( StatDatFormat . CSV , new StreamWriter ( FileTools . TryCreate ( reportName ) ) ) ;
2016-12-05 11:43:48 -08:00
}
if ( ( statDatFormat & StatDatFormat . HTML ) ! = 0 )
{
reportName = Style . GetFileNameWithoutExtension ( reportName ) + ".html" ;
2017-03-29 14:53:51 -07:00
reportName = Path . Combine ( outDir , reportName ) ;
2016-12-05 11:43:48 -08:00
// Create the StreamWriter for this file
2017-03-15 20:07:28 -07:00
output . Add ( StatDatFormat . HTML , new StreamWriter ( FileTools . TryCreate ( reportName ) ) ) ;
2016-12-05 11:43:48 -08:00
}
if ( ( statDatFormat & StatDatFormat . TSV ) ! = 0 )
{
reportName = Style . GetFileNameWithoutExtension ( reportName ) + ".csv" ;
2017-03-29 14:53:51 -07:00
reportName = Path . Combine ( outDir , reportName ) ;
2016-12-05 11:43:48 -08:00
// Create the StreamWriter for this file
2017-03-15 20:07:28 -07:00
output . Add ( StatDatFormat . TSV , new StreamWriter ( FileTools . TryCreate ( reportName ) ) ) ;
2016-12-05 11:43:48 -08:00
}
return output ;
2016-11-04 11:57:32 -07:00
}
/// <summary>
/// Write out the header to the stream, if any exists
/// </summary>
2016-12-05 11:43:48 -08:00
/// <param name="outputs">Dictionary representing the outputs</param>
2016-11-04 11:57:32 -07:00
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
2016-12-05 11:43:48 -08:00
private static void OutputStatsWriteHeader ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat , bool baddumpCol , bool nodumpCol )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
{
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
2017-02-23 14:23:41 -08:00
outputs [ StatDatFormat . CSV ] . Write ( "\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\",\"# with SHA-256\""
2017-01-27 16:53:29 -08:00
+ ( baddumpCol ? ",\"BadDumps\"" : "" ) + ( nodumpCol ? ",\"Nodumps\"" : "" ) + "\n" ) ;
2016-12-05 11:43:48 -08:00
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
{
outputs [ StatDatFormat . HTML ] . Write ( @ "<!DOCTYPE html>
2016-11-04 11:57:32 -07:00
< html >
< header >
< title > DAT Statistics Report < / title >
< style >
body {
background - color : lightgray ;
}
. dir {
color : # 0088F F ;
}
. right {
align : right ;
}
< / style >
< / header >
< body >
< h2 > DAT Statistics Report ( " + DateTime.Now.ToShortDateString() + @" ) < / h2 >
2017-01-27 16:53:29 -08:00
< table border = "" 1 "" cellpadding = "" 5 "" cellspacing = "" 0 "" >
2016-12-05 11:43:48 -08:00
");
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
2017-02-23 14:23:41 -08:00
outputs [ StatDatFormat . TSV ] . Write ( "\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\"\t\"# with SHA-256\""
2017-01-27 16:53:29 -08:00
+ ( baddumpCol ? "\t\"BadDumps\"" : "" ) + ( nodumpCol ? "\t\"Nodumps\"" : "" ) + "\n" ) ;
2016-12-05 11:43:48 -08:00
}
2016-11-04 11:57:32 -07:00
// Now write the mid header for those who need it
2016-12-05 11:43:48 -08:00
OutputStatsWriteMidHeader ( outputs , statDatFormat , baddumpCol , nodumpCol ) ;
2016-11-04 11:57:32 -07:00
}
/// <summary>
/// Write out the mid-header to the stream, if any exists
/// </summary>
2016-12-05 11:43:48 -08:00
/// <param name="outputs">Dictionary representing the outputs</param>
2016-11-04 11:57:32 -07:00
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
2016-12-05 11:43:48 -08:00
private static void OutputStatsWriteMidHeader ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat , bool baddumpCol , bool nodumpCol )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
{
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
2016-11-04 11:57:32 -07:00
{
2017-01-27 16:53:29 -08:00
outputs [ StatDatFormat . HTML ] . Write ( @" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
2017-02-23 14:23:41 -08:00
+ @"<th align=""right"">Disks</th><th align=""right""># with CRC</th><th align=""right""># with MD5</th><th align=""right""># with SHA-1</th><th align=""right""># with SHA-256</th>"
2017-01-27 16:53:29 -08:00
+ ( baddumpCol ? "<th class=\".right\">Baddumps</th>" : "" ) + ( nodumpCol ? "<th class=\".right\">Nodumps</th>" : "" ) + "</tr>\n" ) ;
2016-12-05 11:43:48 -08:00
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
// Nothing
}
2016-11-04 11:57:32 -07:00
}
/// <summary>
/// Write out the separator to the stream, if any exists
/// </summary>
2016-12-05 11:43:48 -08:00
/// <param name="outputs">Dictionary representing the outputs</param>
2016-11-04 11:57:32 -07:00
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
2016-12-05 11:43:48 -08:00
private static void OutputStatsWriteMidSeparator ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat , bool baddumpCol , bool nodumpCol )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
{
2017-01-27 16:53:29 -08:00
outputs [ StatDatFormat . HTML ] . Write ( "<tr><td colspan=\""
2016-11-04 11:57:32 -07:00
+ ( baddumpCol & & nodumpCol
2017-02-23 14:23:41 -08:00
? "12"
2016-11-04 11:57:32 -07:00
: ( baddumpCol ^ nodumpCol
2017-02-23 14:23:41 -08:00
? "11"
: "10" )
2016-11-04 11:57:32 -07:00
)
2016-12-05 11:43:48 -08:00
+ "\"></td></tr>\n" ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
// Nothing
2016-11-04 11:57:32 -07:00
}
}
/// <summary>
/// Write out the footer-separator to the stream, if any exists
/// </summary>
2016-12-05 11:43:48 -08:00
/// <param name="outputs">Dictionary representing the outputs</param>
2016-11-04 11:57:32 -07:00
/// <param name="statDatFormat">StatDatFormat representing output format</param>
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
2016-12-05 11:43:48 -08:00
private static void OutputStatsWriteMidFooter ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat , bool baddumpCol , bool nodumpCol )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
{
outputs [ StatDatFormat . None ] . Write ( "\n" ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
outputs [ StatDatFormat . CSV ] . Write ( "\n" ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
{
2017-01-27 16:53:29 -08:00
outputs [ StatDatFormat . HTML ] . Write ( "<tr border=\"0\"><td colspan=\""
2016-11-04 11:57:32 -07:00
+ ( baddumpCol & & nodumpCol
2017-02-23 14:23:41 -08:00
? "12"
2016-11-04 11:57:32 -07:00
: ( baddumpCol ^ nodumpCol
2017-02-23 14:23:41 -08:00
? "11"
: "10" )
2016-11-04 11:57:32 -07:00
)
2016-12-05 11:43:48 -08:00
+ "\"></td></tr>\n" ) ;
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
outputs [ StatDatFormat . TSV ] . Write ( "\n" ) ;
}
2016-11-04 11:57:32 -07:00
}
/// <summary>
/// Write out the footer to the stream, if any exists
/// </summary>
/// <param name="sw">StreamWriter representing the output</param>
/// <param name="statDatFormat">StatDatFormat representing output format</param>
2016-12-05 11:43:48 -08:00
private static void OutputStatsWriteFooter ( Dictionary < StatDatFormat , StreamWriter > outputs , StatDatFormat statDatFormat )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
if ( outputs . ContainsKey ( StatDatFormat . None ) )
2016-11-04 11:57:32 -07:00
{
2016-12-05 11:43:48 -08:00
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . CSV ) )
{
// Nothing
}
if ( outputs . ContainsKey ( StatDatFormat . HTML ) )
{
outputs [ StatDatFormat . HTML ] . Write ( @ " </table>
2016-11-04 11:57:32 -07:00
< / body >
< / html >
2016-12-05 11:43:48 -08:00
");
}
if ( outputs . ContainsKey ( StatDatFormat . TSV ) )
{
// Nothing
}
2016-11-04 11:57:32 -07:00
}
#endregion
#endregion // Static Methods
}
}