mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[DatFile] Create partial classes because the base was getting too big
This commit is contained in:
File diff suppressed because it is too large
Load Diff
494
SabreTools.Helper/Dats/Partials/DatFile.Bucketing.cs
Normal file
494
SabreTools.Helper/Dats/Partials/DatFile.Bucketing.cs
Normal file
@@ -0,0 +1,494 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Web;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Instance Methods
|
||||||
|
|
||||||
|
#region Bucketing [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by CRC
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
public void BucketByCRC(bool mergeroms, Logger logger, bool output = true)
|
||||||
|
{
|
||||||
|
// If we already have the right sorting, trust it
|
||||||
|
if (_sortedBy == SortedBy.CRC)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the sorted type
|
||||||
|
_sortedBy = SortedBy.CRC;
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by CRC");
|
||||||
|
|
||||||
|
// Process each all of the roms
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = Files[key];
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
roms = DatItem.Merge(roms, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
count++;
|
||||||
|
string newkey = (rom.Type == ItemType.Rom ? ((Rom)rom).CRC : Constants.CRCZero);
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now go through and sort all of the lists
|
||||||
|
keys = sortable.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> sortedlist = sortable[key];
|
||||||
|
DatItem.Sort(ref sortedlist, false);
|
||||||
|
sortable[key] = sortedlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the count if told to
|
||||||
|
if (output)
|
||||||
|
{
|
||||||
|
logger.User("A total of " + count + " file hashes will be written out to file");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now assign the dictionary back
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by Game
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
/// <param name="lower">True if the game should be lowercased (default), false otherwise</param>
|
||||||
|
public void BucketByGame(bool mergeroms, bool norename, Logger logger, bool output = true, bool lower = true)
|
||||||
|
{
|
||||||
|
// If we already have the right sorting, trust it
|
||||||
|
if (_sortedBy == SortedBy.Game)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the sorted type
|
||||||
|
_sortedBy = SortedBy.Game;
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by game");
|
||||||
|
|
||||||
|
// Process each all of the roms
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = Files[key];
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
roms = DatItem.Merge(roms, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
count++;
|
||||||
|
string newkey = (norename ? ""
|
||||||
|
: rom.SystemID.ToString().PadLeft(10, '0')
|
||||||
|
+ "-"
|
||||||
|
+ rom.SourceID.ToString().PadLeft(10, '0') + "-")
|
||||||
|
+ (String.IsNullOrEmpty(rom.Machine.Name)
|
||||||
|
? "Default"
|
||||||
|
: rom.Machine.Name);
|
||||||
|
if (lower)
|
||||||
|
{
|
||||||
|
newkey = newkey.ToLowerInvariant();
|
||||||
|
}
|
||||||
|
newkey = HttpUtility.HtmlEncode(newkey);
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now go through and sort all of the lists
|
||||||
|
keys = sortable.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> sortedlist = sortable[key];
|
||||||
|
DatItem.Sort(ref sortedlist, norename);
|
||||||
|
sortable[key] = sortedlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the count if told to
|
||||||
|
if (output)
|
||||||
|
{
|
||||||
|
logger.User("A total of " + count + " file hashes will be written out to file");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now assign the dictionary back
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by MD5
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
public void BucketByMD5(bool mergeroms, Logger logger, bool output = true)
|
||||||
|
{
|
||||||
|
// If we already have the right sorting, trust it
|
||||||
|
if (_sortedBy == SortedBy.MD5)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the sorted type
|
||||||
|
_sortedBy = SortedBy.MD5;
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by MD5");
|
||||||
|
|
||||||
|
// Process each all of the roms
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = Files[key];
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
roms = DatItem.Merge(roms, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
count++;
|
||||||
|
string newkey = (rom.Type == ItemType.Rom
|
||||||
|
? ((Rom)rom).MD5
|
||||||
|
: (rom.Type == ItemType.Disk
|
||||||
|
? ((Disk)rom).MD5
|
||||||
|
: Constants.MD5Zero));
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now go through and sort all of the lists
|
||||||
|
keys = sortable.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> sortedlist = sortable[key];
|
||||||
|
DatItem.Sort(ref sortedlist, false);
|
||||||
|
sortable[key] = sortedlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the count if told to
|
||||||
|
if (output)
|
||||||
|
{
|
||||||
|
logger.User("A total of " + count + " file hashes will be written out to file");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now assign the dictionary back
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by SHA1
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
public void BucketBySHA1(bool mergeroms, Logger logger, bool output = true)
|
||||||
|
{
|
||||||
|
// If we already have the right sorting, trust it
|
||||||
|
if (_sortedBy == SortedBy.SHA1)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the sorted type
|
||||||
|
_sortedBy = SortedBy.SHA1;
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by SHA-1");
|
||||||
|
|
||||||
|
// Process each all of the roms
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = Files[key];
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
roms = DatItem.Merge(roms, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
count++;
|
||||||
|
string newkey = (rom.Type == ItemType.Rom
|
||||||
|
? ((Rom)rom).SHA1
|
||||||
|
: (rom.Type == ItemType.Disk
|
||||||
|
? ((Disk)rom).SHA1
|
||||||
|
: Constants.MD5Zero));
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now go through and sort all of the lists
|
||||||
|
keys = sortable.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> sortedlist = sortable[key];
|
||||||
|
DatItem.Sort(ref sortedlist, false);
|
||||||
|
sortable[key] = sortedlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the count if told to
|
||||||
|
if (output)
|
||||||
|
{
|
||||||
|
logger.User("A total of " + count + " file hashes will be written out to file");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now assign the dictionary back
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take the arbitrarily sorted Files Dictionary and convert to one sorted by Size
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
public void BucketBySize(bool mergeroms, Logger logger, bool output = true)
|
||||||
|
{
|
||||||
|
// If we already have the right sorting, trust it
|
||||||
|
if (_sortedBy == SortedBy.Size)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the sorted type
|
||||||
|
_sortedBy = SortedBy.Size;
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms by size");
|
||||||
|
|
||||||
|
// Process each all of the roms
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = Files[key];
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
roms = DatItem.Merge(roms, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
count++;
|
||||||
|
string newkey = (rom.Type == ItemType.Rom ? ((Rom)rom).Size.ToString() : "-1");
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now go through and sort all of the lists
|
||||||
|
keys = sortable.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> sortedlist = sortable[key];
|
||||||
|
DatItem.Sort(ref sortedlist, false);
|
||||||
|
sortable[key] = sortedlist;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the count if told to
|
||||||
|
if (output)
|
||||||
|
{
|
||||||
|
logger.User("A total of " + count + " file hashes will be written out to file");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now assign the dictionary back
|
||||||
|
Files = sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#endregion // Instance Methods
|
||||||
|
|
||||||
|
#region Static Methods
|
||||||
|
|
||||||
|
#region Bucketing [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take an arbitrarily ordered List and return a Dictionary sorted by Game
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="list">Input unsorted list</param>
|
||||||
|
/// <param name="mergeroms">True if roms should be deduped, false otherwise</param>
|
||||||
|
/// <param name="norename">True if games should only be compared on game and file name, false if system and source are counted</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <param name="output">True if the number of hashes counted is to be output (default), false otherwise</param>
|
||||||
|
/// <returns>SortedDictionary bucketed by game name</returns>
|
||||||
|
public static SortedDictionary<string, List<DatItem>> BucketListByGame(List<DatItem> list, bool mergeroms, bool norename, Logger logger, bool output = true)
|
||||||
|
{
|
||||||
|
logger.User("Organizing " + (mergeroms ? "and merging " : "") + "roms for output");
|
||||||
|
|
||||||
|
SortedDictionary<string, List<DatItem>> sortable = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
long count = 0;
|
||||||
|
|
||||||
|
// If we have a null dict or an empty one, output a new dictionary
|
||||||
|
if (list == null || list.Count == 0)
|
||||||
|
{
|
||||||
|
return sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're merging the roms, do so
|
||||||
|
if (mergeroms)
|
||||||
|
{
|
||||||
|
list = DatItem.Merge(list, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add each of the roms to their respective games
|
||||||
|
foreach (DatItem rom in list)
|
||||||
|
{
|
||||||
|
if (rom == null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
count++;
|
||||||
|
string newkey = (norename ? ""
|
||||||
|
: rom.SystemID.ToString().PadLeft(10, '0')
|
||||||
|
+ "-"
|
||||||
|
+ rom.SourceID.ToString().PadLeft(10, '0') + "-")
|
||||||
|
+ (rom.Machine == null || String.IsNullOrEmpty(rom.Machine.Name)
|
||||||
|
? "Default"
|
||||||
|
: rom.Machine.Name.ToLowerInvariant());
|
||||||
|
newkey = HttpUtility.HtmlEncode(newkey);
|
||||||
|
if (sortable.ContainsKey(newkey))
|
||||||
|
{
|
||||||
|
sortable[newkey].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sortable.Add(newkey, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sortable;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#endregion // Static Methods
|
||||||
|
}
|
||||||
|
}
|
||||||
553
SabreTools.Helper/Dats/Partials/DatFile.ConvertUpdate.cs
Normal file
553
SabreTools.Helper/Dats/Partials/DatFile.ConvertUpdate.cs
Normal file
@@ -0,0 +1,553 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
using SabreTools.Helper.Tools;
|
||||||
|
|
||||||
|
#if MONO
|
||||||
|
using System.IO;
|
||||||
|
#else
|
||||||
|
using Alphaleonis.Win32.Filesystem;
|
||||||
|
|
||||||
|
using SearchOption = System.IO.SearchOption;
|
||||||
|
#endif
|
||||||
|
using NaturalSort;
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Converting and Updating [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Determine if input files should be merged, diffed, or processed invidually
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
|
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||||
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||||
|
/// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
|
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||||
|
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||||
|
/// <param name="softlist">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
|
||||||
|
bool bare, bool clean, bool softlist, Filter filter, bool trim, bool single, string root, int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
// If we're in merging or diffing mode, use the full list of inputs
|
||||||
|
if (merge || diff != 0)
|
||||||
|
{
|
||||||
|
// Make sure there are no folders in inputs
|
||||||
|
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, maxDegreeOfParallelism, logger, appendparent: true);
|
||||||
|
|
||||||
|
// If we're in inverse cascade, reverse the list
|
||||||
|
if ((diff & DiffMode.ReverseCascade) != 0)
|
||||||
|
{
|
||||||
|
newInputFileNames.Reverse();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a dictionary of all ROMs from the input DATs
|
||||||
|
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean, softlist,
|
||||||
|
outDir, filter, trim, single, root, maxDegreeOfParallelism, logger);
|
||||||
|
|
||||||
|
// Modify the Dictionary if necessary and output the results
|
||||||
|
if (diff != 0 && diff < DiffMode.Cascade)
|
||||||
|
{
|
||||||
|
DiffNoCascade(diff, outDir, newInputFileNames, logger);
|
||||||
|
}
|
||||||
|
// If we're in cascade and diff, output only cascaded diffs
|
||||||
|
else if (diff != 0 && diff >= DiffMode.Cascade)
|
||||||
|
{
|
||||||
|
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip, logger);
|
||||||
|
}
|
||||||
|
// Output all entries with user-defined merge
|
||||||
|
else
|
||||||
|
{
|
||||||
|
MergeNoDiff(outDir, newInputFileNames, datHeaders, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, loop through all of the inputs individually
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Update(inputPaths, outDir, clean, softlist, filter, trim, single, root, maxDegreeOfParallelism, logger);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Populate the user DatData object from the input files
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
/// <returns>List of DatData objects representing headers</returns>
|
||||||
|
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool softlist, string outDir,
|
||||||
|
Filter filter, bool trim, bool single, string root, int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
DatFile[] datHeaders = new DatFile[inputs.Count];
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
logger.User("Processing individual DATs");
|
||||||
|
|
||||||
|
/// BEGIN
|
||||||
|
Parallel.For(0,
|
||||||
|
inputs.Count,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
i =>
|
||||||
|
{
|
||||||
|
string input = inputs[i];
|
||||||
|
logger.User("Adding DAT: " + input.Split('¬')[0]);
|
||||||
|
datHeaders[i] = new DatFile
|
||||||
|
{
|
||||||
|
DatFormat = (DatFormat != 0 ? DatFormat : 0),
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
MergeRoms = MergeRoms,
|
||||||
|
};
|
||||||
|
|
||||||
|
datHeaders[i].Parse(input.Split('¬')[0], i, 0, filter, trim, single, root, logger, true, clean, softlist);
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
logger.User("Populating internal DAT");
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
for (int i = 0; i < inputs.Count; i++)
|
||||||
|
{
|
||||||
|
List<string> keys = datHeaders[i].Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
if (Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
Files[key].AddRange(datHeaders[i].Files[key]);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Files.Add(key, datHeaders[i].Files[key]);
|
||||||
|
}
|
||||||
|
datHeaders[i].Files.Remove(key);
|
||||||
|
}
|
||||||
|
datHeaders[i].Files = null;
|
||||||
|
}
|
||||||
|
/// END
|
||||||
|
|
||||||
|
logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
return datHeaders.ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output non-cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs, Logger logger)
|
||||||
|
{
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
logger.User("Initializing all output DATs");
|
||||||
|
|
||||||
|
// Default vars for use
|
||||||
|
string post = "";
|
||||||
|
DatFile outerDiffData = new DatFile();
|
||||||
|
DatFile dupeData = new DatFile();
|
||||||
|
|
||||||
|
// Don't have External dupes
|
||||||
|
if ((diff & DiffMode.NoDupes) != 0)
|
||||||
|
{
|
||||||
|
post = " (No Duplicates)";
|
||||||
|
outerDiffData = (DatFile)CloneHeader();
|
||||||
|
outerDiffData.FileName += post;
|
||||||
|
outerDiffData.Name += post;
|
||||||
|
outerDiffData.Description += post;
|
||||||
|
outerDiffData.Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Have External dupes
|
||||||
|
if ((diff & DiffMode.Dupes) != 0)
|
||||||
|
{
|
||||||
|
post = " (Duplicates)";
|
||||||
|
dupeData = (DatFile)CloneHeader();
|
||||||
|
dupeData.FileName += post;
|
||||||
|
dupeData.Name += post;
|
||||||
|
dupeData.Description += post;
|
||||||
|
dupeData.Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a list of DatData objects representing individual output files
|
||||||
|
List<DatFile> outDats = new List<DatFile>();
|
||||||
|
|
||||||
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
|
if ((diff & DiffMode.Individuals) != 0)
|
||||||
|
{
|
||||||
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||||
|
|
||||||
|
Parallel.For(0, inputs.Count, j =>
|
||||||
|
{
|
||||||
|
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||||
|
DatFile diffData = (DatFile)CloneHeader();
|
||||||
|
diffData.FileName += innerpost;
|
||||||
|
diffData.Name += innerpost;
|
||||||
|
diffData.Description += innerpost;
|
||||||
|
diffData.Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
outDatsArray[j] = diffData;
|
||||||
|
});
|
||||||
|
|
||||||
|
outDats = outDatsArray.ToList();
|
||||||
|
}
|
||||||
|
logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
logger.User("Populating all output DATs");
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = DatItem.Merge(Files[key], logger);
|
||||||
|
|
||||||
|
if (roms != null && roms.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
// No duplicates
|
||||||
|
if ((diff & DiffMode.NoDupes) != 0 || (diff & DiffMode.Individuals) != 0)
|
||||||
|
{
|
||||||
|
if ((rom.Dupe & DupeType.Internal) != 0)
|
||||||
|
{
|
||||||
|
// Individual DATs that are output
|
||||||
|
if ((diff & DiffMode.Individuals) != 0)
|
||||||
|
{
|
||||||
|
if (outDats[rom.SystemID].Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
outDats[rom.SystemID].Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> tl = new List<DatItem>();
|
||||||
|
tl.Add(rom);
|
||||||
|
outDats[rom.SystemID].Files.Add(key, tl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merged no-duplicates DAT
|
||||||
|
if ((diff & DiffMode.NoDupes) != 0)
|
||||||
|
{
|
||||||
|
DatItem newrom = rom;
|
||||||
|
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||||
|
|
||||||
|
if (outerDiffData.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
outerDiffData.Files[key].Add(newrom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> tl = new List<DatItem>();
|
||||||
|
tl.Add(rom);
|
||||||
|
outerDiffData.Files.Add(key, tl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Duplicates only
|
||||||
|
if ((diff & DiffMode.Dupes) != 0)
|
||||||
|
{
|
||||||
|
if ((rom.Dupe & DupeType.External) != 0)
|
||||||
|
{
|
||||||
|
DatItem newrom = rom;
|
||||||
|
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||||
|
|
||||||
|
if (dupeData.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
dupeData.Files[key].Add(newrom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> tl = new List<DatItem>();
|
||||||
|
tl.Add(rom);
|
||||||
|
dupeData.Files.Add(key, tl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Finally, loop through and output each of the DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
logger.User("Outputting all created DATs");
|
||||||
|
|
||||||
|
// Output the difflist (a-b)+(b-a) diff
|
||||||
|
if ((diff & DiffMode.NoDupes) != 0)
|
||||||
|
{
|
||||||
|
outerDiffData.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the (ab) diff
|
||||||
|
if ((diff & DiffMode.Dupes) != 0)
|
||||||
|
{
|
||||||
|
dupeData.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the individual (a-b) DATs
|
||||||
|
if ((diff & DiffMode.Individuals) != 0)
|
||||||
|
{
|
||||||
|
for (int j = 0; j < inputs.Count; j++)
|
||||||
|
{
|
||||||
|
// If we have an output directory set, replace the path
|
||||||
|
string[] split = inputs[j].Split('¬');
|
||||||
|
string path = outDir + (split[0] == split[1]
|
||||||
|
? Path.GetFileName(split[0])
|
||||||
|
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
|
||||||
|
|
||||||
|
// If we have more than 0 roms, output
|
||||||
|
if (outDats[j].Files.Count > 0)
|
||||||
|
{
|
||||||
|
outDats[j].WriteToFile(path, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output cascading diffs
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip, Logger logger)
|
||||||
|
{
|
||||||
|
string post = "";
|
||||||
|
|
||||||
|
// Create a list of DatData objects representing output files
|
||||||
|
List<DatFile> outDats = new List<DatFile>();
|
||||||
|
|
||||||
|
// Loop through each of the inputs and get or create a new DatData object
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
logger.User("Initializing all output DATs");
|
||||||
|
|
||||||
|
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||||
|
|
||||||
|
Parallel.For(0, inputs.Count, j =>
|
||||||
|
{
|
||||||
|
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||||
|
DatFile diffData;
|
||||||
|
|
||||||
|
// If we're in inplace mode, take the appropriate DatData object already stored
|
||||||
|
if (inplace || !String.IsNullOrEmpty(outDir))
|
||||||
|
{
|
||||||
|
diffData = datHeaders[j];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
diffData = (DatFile)CloneHeader();
|
||||||
|
diffData.FileName += post;
|
||||||
|
diffData.Name += post;
|
||||||
|
diffData.Description += post;
|
||||||
|
}
|
||||||
|
diffData.Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
|
||||||
|
outDatsArray[j] = diffData;
|
||||||
|
});
|
||||||
|
|
||||||
|
outDats = outDatsArray.ToList();
|
||||||
|
logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Now, loop through the dictionary and populate the correct DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
logger.User("Populating all output DATs");
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = DatItem.Merge(Files[key], logger);
|
||||||
|
|
||||||
|
if (roms != null && roms.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
// There's odd cases where there are items with System ID < 0. Skip them for now
|
||||||
|
if (rom.SystemID < 0)
|
||||||
|
{
|
||||||
|
logger.Warning("Item found with a <0 SystemID: " + rom.Name);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (outDats[rom.SystemID].Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
outDats[rom.SystemID].Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> tl = new List<DatItem>();
|
||||||
|
tl.Add(rom);
|
||||||
|
outDats[rom.SystemID].Files.Add(key, tl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
// Finally, loop through and output each of the DATs
|
||||||
|
start = DateTime.Now;
|
||||||
|
logger.User("Outputting all created DATs");
|
||||||
|
for (int j = (skip ? 1 : 0); j < inputs.Count; j++)
|
||||||
|
{
|
||||||
|
// If we have an output directory set, replace the path
|
||||||
|
string path = "";
|
||||||
|
if (inplace)
|
||||||
|
{
|
||||||
|
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
|
||||||
|
}
|
||||||
|
else if (!String.IsNullOrEmpty(outDir))
|
||||||
|
{
|
||||||
|
string[] split = inputs[j].Split('¬');
|
||||||
|
path = outDir + (split[0] == split[1]
|
||||||
|
? Path.GetFileName(split[0])
|
||||||
|
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have more than 0 roms, output
|
||||||
|
if (outDats[j].Files.Count > 0)
|
||||||
|
{
|
||||||
|
outDats[j].WriteToFile(path, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output user defined merge
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||||
|
/// <param name="inputs">List of inputs to write out from</param>
|
||||||
|
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders, Logger logger)
|
||||||
|
{
|
||||||
|
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||||
|
if (Type == "SuperDAT")
|
||||||
|
{
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> newroms = new List<DatItem>();
|
||||||
|
foreach (DatItem rom in Files[key])
|
||||||
|
{
|
||||||
|
DatItem newrom = rom;
|
||||||
|
string filename = inputs[newrom.SystemID].Split('¬')[0];
|
||||||
|
string rootpath = inputs[newrom.SystemID].Split('¬')[1];
|
||||||
|
|
||||||
|
rootpath += (rootpath == "" ? "" : Path.DirectorySeparatorChar.ToString());
|
||||||
|
filename = filename.Remove(0, rootpath.Length);
|
||||||
|
newrom.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
||||||
|
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
||||||
|
+ newrom.Machine.Name;
|
||||||
|
newroms.Add(newrom);
|
||||||
|
}
|
||||||
|
Files[key] = newroms;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output a DAT only if there are roms
|
||||||
|
if (Files.Count != 0)
|
||||||
|
{
|
||||||
|
WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Convert, update, and filter a DAT file or set of files using a base
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputFileNames">Names of the input files and/or folders</param>
|
||||||
|
/// <param name="outDir">Optional param for output directory</param>
|
||||||
|
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||||
|
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||||
|
/// <param name="inplace">True if the cascade-diffed files should overwrite their inputs, false otherwise</param>
|
||||||
|
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||||
|
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||||
|
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||||
|
/// <param name="softlist">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||||
|
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||||
|
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||||
|
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||||
|
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
/// <param name="logger">Logging object for console and file output</param>
|
||||||
|
public void Update(List<string> inputFileNames, string outDir, bool clean, bool softlist, Filter filter,
|
||||||
|
bool trim, bool single, string root, int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
// Sort the input filenames
|
||||||
|
inputFileNames.Sort(new NaturalComparer());
|
||||||
|
|
||||||
|
Parallel.ForEach(inputFileNames,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
inputFileName =>
|
||||||
|
{
|
||||||
|
// Clean the input string
|
||||||
|
if (inputFileName != "")
|
||||||
|
{
|
||||||
|
inputFileName = Path.GetFullPath(inputFileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (File.Exists(inputFileName))
|
||||||
|
{
|
||||||
|
DatFile innerDatdata = (DatFile)CloneHeader();
|
||||||
|
logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
|
||||||
|
innerDatdata.Parse(inputFileName, 0, 0, filter, trim, single,
|
||||||
|
root, logger, true, clean, softlist,
|
||||||
|
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||||
|
|
||||||
|
// If we have roms, output them
|
||||||
|
if (innerDatdata.Files.Count != 0)
|
||||||
|
{
|
||||||
|
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(inputFileName) : outDir), logger, overwrite: (outDir != ""));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (Directory.Exists(inputFileName))
|
||||||
|
{
|
||||||
|
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
|
||||||
|
|
||||||
|
Parallel.ForEach(Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories),
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
file =>
|
||||||
|
{
|
||||||
|
logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
|
||||||
|
DatFile innerDatdata = (DatFile)Clone();
|
||||||
|
innerDatdata.Files = null;
|
||||||
|
innerDatdata.Parse(file, 0, 0, filter,
|
||||||
|
trim, single, root, logger, true, clean, softlist,
|
||||||
|
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||||
|
|
||||||
|
// If we have roms, output them
|
||||||
|
if (innerDatdata.Files != null && innerDatdata.Files.Count != 0)
|
||||||
|
{
|
||||||
|
innerDatdata.WriteToFile((outDir == "" ? Path.GetDirectoryName(file) : outDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)), logger, overwrite: (outDir != ""));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.Error("I'm sorry but " + inputFileName + " doesn't exist!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
}
|
||||||
506
SabreTools.Helper/Dats/Partials/DatFile.DFD.cs
Normal file
506
SabreTools.Helper/Dats/Partials/DatFile.DFD.cs
Normal file
@@ -0,0 +1,506 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
using SabreTools.Helper.Tools;
|
||||||
|
|
||||||
|
#if MONO
|
||||||
|
using System.IO;
|
||||||
|
#else
|
||||||
|
using Alphaleonis.Win32.Filesystem;
|
||||||
|
|
||||||
|
using IOException = System.IO.IOException;
|
||||||
|
using SearchOption = System.IO.SearchOption;
|
||||||
|
#endif
|
||||||
|
using SharpCompress.Common;
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Populate DAT from Directory [MODULAR DONE, FOR NOW]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Create a new Dat from a directory
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||||
|
/// <param name="noMD5">True if MD5 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="noSHA1">True if SHA-1 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||||
|
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||||
|
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||||
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||||
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||||
|
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||||
|
/// <param name="outDir">Output directory to </param>
|
||||||
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
/// <param name="logger">Logger object for console and file output</param>
|
||||||
|
public bool PopulateFromDir(string basePath, bool noMD5, bool noSHA1, bool bare, bool archivesAsFiles,
|
||||||
|
bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst,
|
||||||
|
int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
// If the description is defined but not the name, set the name from the description
|
||||||
|
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
|
||||||
|
{
|
||||||
|
Name = Description;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the name is defined but not the description, set the description from the name
|
||||||
|
else if (!String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||||
|
{
|
||||||
|
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||||
|
}
|
||||||
|
|
||||||
|
// If neither the name or description are defined, set them from the automatic values
|
||||||
|
else if (String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||||
|
{
|
||||||
|
Name = basePath.Split(Path.DirectorySeparatorChar).Last();
|
||||||
|
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure the dictionary is defined
|
||||||
|
if (Files == null || Files.Keys.Count == 0)
|
||||||
|
{
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the input
|
||||||
|
if (Directory.Exists(basePath))
|
||||||
|
{
|
||||||
|
logger.Verbose("Folder found: " + basePath);
|
||||||
|
|
||||||
|
// Process the files in the main folder
|
||||||
|
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||||
|
Parallel.ForEach(files,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
item =>
|
||||||
|
{
|
||||||
|
PopulateFromDirCheckFile(item, basePath, noMD5, noSHA1, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
|
||||||
|
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find all top-level subfolders
|
||||||
|
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||||
|
Parallel.ForEach(files,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
item =>
|
||||||
|
{
|
||||||
|
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
|
||||||
|
Parallel.ForEach(subfiles,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
subitem =>
|
||||||
|
{
|
||||||
|
PopulateFromDirCheckFile(subitem, basePath, noMD5, noSHA1, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
|
||||||
|
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process the files in all subfolders
|
||||||
|
files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
|
||||||
|
Parallel.ForEach(files,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
item =>
|
||||||
|
{
|
||||||
|
PopulateFromDirCheckFile(item, basePath, noMD5, noSHA1, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
|
||||||
|
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Now find all folders that are empty, if we are supposed to
|
||||||
|
if (!Romba && addBlanks)
|
||||||
|
{
|
||||||
|
List<string> empties = Directory.EnumerateDirectories(basePath, "*", SearchOption.AllDirectories).ToList();
|
||||||
|
Parallel.ForEach(empties,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
dir =>
|
||||||
|
{
|
||||||
|
if (Directory.EnumerateFiles(dir, "*", SearchOption.TopDirectoryOnly).Count() == 0)
|
||||||
|
{
|
||||||
|
// Get the full path for the directory
|
||||||
|
string fulldir = Path.GetFullPath(dir);
|
||||||
|
|
||||||
|
// Set the temporary variables
|
||||||
|
string gamename = "";
|
||||||
|
string romname = "";
|
||||||
|
|
||||||
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||||
|
if (Type == "SuperDAT")
|
||||||
|
{
|
||||||
|
gamename = fulldir.Remove(0, basePath.Length + 1);
|
||||||
|
romname = "-";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||||
|
else
|
||||||
|
{
|
||||||
|
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
|
||||||
|
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "-");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanitize the names
|
||||||
|
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
gamename = gamename.Substring(1);
|
||||||
|
}
|
||||||
|
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||||
|
}
|
||||||
|
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
romname = romname.Substring(1);
|
||||||
|
}
|
||||||
|
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
romname = romname.Substring(0, romname.Length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Verbose("Adding blank empty folder: " + gamename);
|
||||||
|
Files["null"].Add(new Rom(romname, gamename));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (File.Exists(basePath))
|
||||||
|
{
|
||||||
|
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), noMD5, noSHA1, bare, archivesAsFiles, enableGzip, addBlanks, addDate,
|
||||||
|
tempDir, copyFiles, headerToCheckAgainst, maxDegreeOfParallelism, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now that we're done, delete the temp folder (if it's not the default)
|
||||||
|
logger.User("Cleaning temp folder");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (tempDir != Path.GetTempPath())
|
||||||
|
{
|
||||||
|
Directory.Delete(tempDir, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// Just absorb the error for now
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Check a given file for hashes, based on current settings
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="item">Filename of the item to be checked</param>
|
||||||
|
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||||
|
/// <param name="noMD5">True if MD5 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="noSHA1">True if SHA-1 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||||
|
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||||
|
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||||
|
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||||
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||||
|
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||||
|
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="maxDegreeOfParallelism">Integer representing the maximum amount of parallelization to be used</param>
|
||||||
|
/// <param name="logger">Logger object for console and file output</param>
|
||||||
|
private void PopulateFromDirCheckFile(string item, string basePath, bool noMD5, bool noSHA1, bool bare, bool archivesAsFiles,
|
||||||
|
bool enableGzip, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst,
|
||||||
|
int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
// Define the temporary directory
|
||||||
|
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
|
||||||
|
|
||||||
|
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
|
||||||
|
if (Romba)
|
||||||
|
{
|
||||||
|
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item, logger);
|
||||||
|
|
||||||
|
// If the rom is valid, write it out
|
||||||
|
if (rom != null && rom.Name != null)
|
||||||
|
{
|
||||||
|
// Add the list if it doesn't exist already
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
if (!Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
Files.Add(key, new List<DatItem>());
|
||||||
|
}
|
||||||
|
|
||||||
|
Files[key].Add(rom);
|
||||||
|
logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're copying files, copy it first and get the new filename
|
||||||
|
string newItem = item;
|
||||||
|
string newBasePath = basePath;
|
||||||
|
if (copyFiles)
|
||||||
|
{
|
||||||
|
newBasePath = Path.Combine(tempDir, Path.GetRandomFileName());
|
||||||
|
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
|
||||||
|
Directory.CreateDirectory(Path.GetDirectoryName(newItem));
|
||||||
|
File.Copy(item, newItem, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both deep hash skip flags are set, do a quickscan
|
||||||
|
if (noMD5 && noSHA1)
|
||||||
|
{
|
||||||
|
ArchiveType? type = ArchiveTools.GetCurrentArchiveType(newItem, logger);
|
||||||
|
|
||||||
|
// If we have an archive, scan it
|
||||||
|
if (type != null && !archivesAsFiles)
|
||||||
|
{
|
||||||
|
List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(newItem, logger);
|
||||||
|
|
||||||
|
foreach (Rom rom in extracted)
|
||||||
|
{
|
||||||
|
PopulateFromDirProcessFileHelper(newItem,
|
||||||
|
rom,
|
||||||
|
basePath,
|
||||||
|
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item),
|
||||||
|
logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, just get the info on the file itself
|
||||||
|
else if (File.Exists(newItem))
|
||||||
|
{
|
||||||
|
PopulateFromDirProcessFile(newItem, "", newBasePath, noMD5, noSHA1, addDate, headerToCheckAgainst, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, attempt to extract the files to the temporary directory
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ArchiveScanLevel asl = (archivesAsFiles ? ArchiveScanLevel.SevenZipExternal : ArchiveScanLevel.SevenZipInternal)
|
||||||
|
| (!archivesAsFiles && enableGzip ? ArchiveScanLevel.GZipInternal : ArchiveScanLevel.GZipExternal)
|
||||||
|
| (archivesAsFiles ? ArchiveScanLevel.RarExternal : ArchiveScanLevel.RarInternal)
|
||||||
|
| (archivesAsFiles ? ArchiveScanLevel.ZipExternal : ArchiveScanLevel.ZipInternal);
|
||||||
|
|
||||||
|
bool encounteredErrors = ArchiveTools.ExtractArchive(newItem, tempSubDir, asl, logger);
|
||||||
|
|
||||||
|
// If the file was an archive and was extracted successfully, check it
|
||||||
|
if (!encounteredErrors)
|
||||||
|
{
|
||||||
|
logger.Verbose(Path.GetFileName(item) + " treated like an archive");
|
||||||
|
List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList();
|
||||||
|
Parallel.ForEach(extracted,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
entry =>
|
||||||
|
{
|
||||||
|
PopulateFromDirProcessFile(entry,
|
||||||
|
Path.Combine((Type == "SuperDAT"
|
||||||
|
? (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length)
|
||||||
|
: ""),
|
||||||
|
Path.GetFileNameWithoutExtension(item)),
|
||||||
|
tempSubDir,
|
||||||
|
noMD5,
|
||||||
|
noSHA1,
|
||||||
|
addDate,
|
||||||
|
headerToCheckAgainst,
|
||||||
|
logger);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Otherwise, just get the info on the file itself
|
||||||
|
else if (File.Exists(newItem))
|
||||||
|
{
|
||||||
|
PopulateFromDirProcessFile(newItem, "", newBasePath, noMD5, noSHA1, addDate, headerToCheckAgainst, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cue to delete the file if it's a copy
|
||||||
|
if (copyFiles && item != newItem)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Directory.Delete(newBasePath, true);
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the sub temp directory
|
||||||
|
if (Directory.Exists(tempSubDir))
|
||||||
|
{
|
||||||
|
Directory.Delete(tempSubDir, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Process a single file as a file
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="item">File to be added</param>
|
||||||
|
/// <param name="parent">Parent game to be used</param>
|
||||||
|
/// <param name="basePath">Path the represents the parent directory</param>
|
||||||
|
/// <param name="noMD5">True if MD5 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="noSHA1">True if SHA-1 hashes should be skipped over, false otherwise</param>
|
||||||
|
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for console and file output</param>
|
||||||
|
private void PopulateFromDirProcessFile(string item, string parent, string basePath, bool noMD5, bool noSHA1, bool addDate, string headerToCheckAgainst, Logger logger)
|
||||||
|
{
|
||||||
|
logger.Verbose(Path.GetFileName(item) + " treated like a file");
|
||||||
|
Rom rom = FileTools.GetFileInfo(item, logger, noMD5: noMD5, noSHA1: noSHA1, date: addDate, header: headerToCheckAgainst);
|
||||||
|
|
||||||
|
PopulateFromDirProcessFileHelper(item, rom, basePath, parent, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Process a single file as a file (with found Rom data)
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="item">File to be added</param>
|
||||||
|
/// <param name="item">Rom data to be used to write to file</param>
|
||||||
|
/// <param name="basepath">Path the represents the parent directory</param>
|
||||||
|
/// <param name="parent">Parent game to be used</param>
|
||||||
|
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent, Logger logger)
|
||||||
|
{
|
||||||
|
// If the datItem isn't a Rom or Disk, return
|
||||||
|
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
string key = "";
|
||||||
|
if (datItem.Type == ItemType.Rom)
|
||||||
|
{
|
||||||
|
key = ((Rom)datItem).Size + "-" + ((Rom)datItem).CRC;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
key = ((Disk)datItem).MD5;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the list if it doesn't exist already
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
if (!Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
Files.Add(key, new List<DatItem>());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// If the basepath ends with a directory separator, remove it
|
||||||
|
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
basepath += Path.DirectorySeparatorChar.ToString();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we have the full item path
|
||||||
|
item = Path.GetFullPath(item);
|
||||||
|
|
||||||
|
// Get the data to be added as game and item names
|
||||||
|
string gamename = "";
|
||||||
|
string romname = "";
|
||||||
|
|
||||||
|
// If the parent is blank, then we have a non-archive file
|
||||||
|
if (parent == "")
|
||||||
|
{
|
||||||
|
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||||
|
if (Type == "SuperDAT")
|
||||||
|
{
|
||||||
|
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
|
||||||
|
romname = Path.GetFileName(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||||
|
else
|
||||||
|
{
|
||||||
|
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
|
||||||
|
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we assume that we have an archive
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
|
||||||
|
if (Type == "SuperDAT")
|
||||||
|
{
|
||||||
|
gamename = parent;
|
||||||
|
romname = item.Remove(0, basepath.Length);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we want the archive name as the game, and the file as everything else
|
||||||
|
else
|
||||||
|
{
|
||||||
|
gamename = parent;
|
||||||
|
romname = item.Remove(0, basepath.Length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanitize the names
|
||||||
|
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
gamename = gamename.Substring(1);
|
||||||
|
}
|
||||||
|
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||||
|
}
|
||||||
|
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
romname = romname.Substring(1);
|
||||||
|
}
|
||||||
|
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||||
|
{
|
||||||
|
romname = romname.Substring(0, romname.Length - 1);
|
||||||
|
}
|
||||||
|
if (!String.IsNullOrEmpty(gamename) && String.IsNullOrEmpty(romname))
|
||||||
|
{
|
||||||
|
romname = gamename;
|
||||||
|
gamename = "Default";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update rom information
|
||||||
|
datItem.Name = romname;
|
||||||
|
if (datItem.Machine == null)
|
||||||
|
{
|
||||||
|
datItem.Machine = new Machine
|
||||||
|
{
|
||||||
|
Name = gamename,
|
||||||
|
Description = gamename,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
datItem.Machine.Name = gamename;
|
||||||
|
datItem.Machine.Description = gamename;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the file information to the DAT
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
if (Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
Files[key].Add(datItem);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(datItem);
|
||||||
|
Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User("File added: " + romname + Environment.NewLine);
|
||||||
|
}
|
||||||
|
catch (IOException ex)
|
||||||
|
{
|
||||||
|
logger.Error(ex.ToString());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
}
|
||||||
2361
SabreTools.Helper/Dats/Partials/DatFile.Parsers.cs
Normal file
2361
SabreTools.Helper/Dats/Partials/DatFile.Parsers.cs
Normal file
File diff suppressed because it is too large
Load Diff
645
SabreTools.Helper/Dats/Partials/DatFile.Rebuild.cs
Normal file
645
SabreTools.Helper/Dats/Partials/DatFile.Rebuild.cs
Normal file
@@ -0,0 +1,645 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
using SabreTools.Helper.Skippers;
|
||||||
|
using SabreTools.Helper.Tools;
|
||||||
|
|
||||||
|
#if MONO
|
||||||
|
using System.IO;
|
||||||
|
#else
|
||||||
|
using Alphaleonis.Win32.Filesystem;
|
||||||
|
|
||||||
|
using SearchOption = System.IO.SearchOption;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Rebuilding and Verifying [MODULAR DONE, FOR NOW]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Process the DAT and find all matches in input files and folders
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of input files/folders to check</param>
|
||||||
|
/// <param name="outDir">Output directory to use to build to</param>
|
||||||
|
/// <param name="tempDir">Temporary directory for archive extraction</param>
|
||||||
|
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
|
||||||
|
/// <param name="date">True if the date from the DAT should be used if available, false otherwise</param>
|
||||||
|
/// <param name="delete">True if input files should be deleted, false otherwise</param>
|
||||||
|
/// <param name="inverse">True if the DAT should be used as a filter instead of a template, false otherwise</param>
|
||||||
|
/// <param name="outputFormat">Output format that files should be written to</param>
|
||||||
|
/// <param name="romba">True if files should be output in Romba depot folders, false otherwise</param>
|
||||||
|
/// <param name="archiveScanLevel">ArchiveScanLevel representing the archive handling levels</param>
|
||||||
|
/// <param name="updateDat">True if the updated DAT should be output, false otherwise</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <returns>True if rebuilding was a success, false otherwise</returns>
|
||||||
|
public bool RebuildToOutput(List<string> inputs, string outDir, string tempDir, bool quickScan, bool date,
|
||||||
|
bool delete, bool inverse, OutputFormat outputFormat, bool romba, ArchiveScanLevel archiveScanLevel, bool updateDat,
|
||||||
|
string headerToCheckAgainst, int maxDegreeOfParallelism, Logger logger)
|
||||||
|
{
|
||||||
|
#region Perform setup
|
||||||
|
|
||||||
|
// If the DAT is not populated and inverse is not set, inform the user and quit
|
||||||
|
if ((Files == null || Files.Count == 0) && !inverse)
|
||||||
|
{
|
||||||
|
logger.User("No entries were found to rebuild, exiting...");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that the output directory exists
|
||||||
|
if (!Directory.Exists(outDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(outDir);
|
||||||
|
outDir = Path.GetFullPath(outDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the temp directory
|
||||||
|
if (String.IsNullOrEmpty(tempDir))
|
||||||
|
{
|
||||||
|
tempDir = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then create or clean the temp directory
|
||||||
|
if (!Directory.Exists(tempDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(tempDir);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
FileTools.CleanDirectory(tempDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preload the Skipper list
|
||||||
|
int listcount = Skipper.List.Count;
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
bool success = true;
|
||||||
|
DatFile matched = new DatFile();
|
||||||
|
List<string> files = new List<string>();
|
||||||
|
|
||||||
|
#region Retrieve a list of all files
|
||||||
|
|
||||||
|
logger.User("Retrieving list all files from input");
|
||||||
|
DateTime start = DateTime.Now;
|
||||||
|
|
||||||
|
// Create a list of just files from inputs
|
||||||
|
Parallel.ForEach(inputs,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, },
|
||||||
|
input => {
|
||||||
|
if (File.Exists(input))
|
||||||
|
{
|
||||||
|
logger.Verbose("File found: '" + input + "'");
|
||||||
|
files.Add(Path.GetFullPath(input));
|
||||||
|
}
|
||||||
|
else if (Directory.Exists(input))
|
||||||
|
{
|
||||||
|
logger.Verbose("Directory found: '" + input + "'");
|
||||||
|
Parallel.ForEach(Directory.EnumerateFiles(input, "*", SearchOption.AllDirectories),
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, },
|
||||||
|
file =>
|
||||||
|
{
|
||||||
|
logger.Verbose("File found: '" + file + "'");
|
||||||
|
files.Add(Path.GetFullPath(file));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.Error("'" + input + "' is not a file or directory!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
logger.User("Retrieving complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
DatFile current = new DatFile();
|
||||||
|
Dictionary<string, SkipperRule> fileToSkipperRule = new Dictionary<string, SkipperRule>();
|
||||||
|
|
||||||
|
#region Create a dat from input files
|
||||||
|
|
||||||
|
logger.User("Getting hash information for all input files");
|
||||||
|
start = DateTime.Now;
|
||||||
|
|
||||||
|
// Now that we have a list of just files, we get a DAT from the input files
|
||||||
|
Parallel.ForEach(files,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
file =>
|
||||||
|
{
|
||||||
|
// If we somehow have a null filename, return
|
||||||
|
if (file == null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the temporary directory
|
||||||
|
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
|
||||||
|
|
||||||
|
// Get the required scanning level for the file
|
||||||
|
bool shouldExternalProcess = false;
|
||||||
|
bool shouldInternalProcess = false;
|
||||||
|
ArchiveTools.GetInternalExternalProcess(file, archiveScanLevel, logger, out shouldExternalProcess, out shouldInternalProcess);
|
||||||
|
|
||||||
|
// If we're supposed to scan the file externally
|
||||||
|
if (shouldExternalProcess)
|
||||||
|
{
|
||||||
|
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan, header: headerToCheckAgainst);
|
||||||
|
rom.Name = Path.GetFullPath(file);
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we had a header, we want the full file information too
|
||||||
|
if (headerToCheckAgainst != null)
|
||||||
|
{
|
||||||
|
rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan);
|
||||||
|
rom.Name = Path.GetFullPath(file);
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're supposed to scan the file internally
|
||||||
|
if (shouldInternalProcess)
|
||||||
|
{
|
||||||
|
// If quickscan is set, do so
|
||||||
|
if (quickScan)
|
||||||
|
{
|
||||||
|
List<Rom> extracted = ArchiveTools.GetArchiveFileInfo(file, logger);
|
||||||
|
|
||||||
|
foreach (Rom rom in extracted)
|
||||||
|
{
|
||||||
|
Rom newrom = rom;
|
||||||
|
newrom.Machine = new Machine(Path.GetFullPath(file), "");
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(newrom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(newrom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, attempt to extract the files to the temporary directory
|
||||||
|
else
|
||||||
|
{
|
||||||
|
bool encounteredErrors = ArchiveTools.ExtractArchive(file, tempSubDir, archiveScanLevel, logger);
|
||||||
|
|
||||||
|
// If the file was an archive and was extracted successfully, check it
|
||||||
|
if (!encounteredErrors)
|
||||||
|
{
|
||||||
|
logger.Verbose(Path.GetFileName(file) + " treated like an archive");
|
||||||
|
List<string> extracted = Directory.EnumerateFiles(tempSubDir, "*", SearchOption.AllDirectories).ToList();
|
||||||
|
Parallel.ForEach(extracted,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
entry =>
|
||||||
|
{
|
||||||
|
Rom rom = FileTools.GetFileInfo(entry, logger, noMD5: quickScan, noSHA1: quickScan, header: headerToCheckAgainst);
|
||||||
|
rom.Machine = new Machine(Path.GetFullPath(file), "");
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we had a header, we want the full file information too
|
||||||
|
if (headerToCheckAgainst != null)
|
||||||
|
{
|
||||||
|
rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan);
|
||||||
|
rom.Machine = new Machine(Path.GetFullPath(file), "");
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Otherwise, just get the info on the file itself
|
||||||
|
else if (File.Exists(file))
|
||||||
|
{
|
||||||
|
Rom rom = FileTools.GetFileInfo(file, logger, noMD5: quickScan, noSHA1: quickScan, header: headerToCheckAgainst);
|
||||||
|
rom.Name = Path.GetFullPath(file);
|
||||||
|
|
||||||
|
lock (Files)
|
||||||
|
{
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (current.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
current.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
current.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now delete the temp directory
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Directory.Delete(tempSubDir, true);
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.User("Getting hash information complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
// Create a mapping from destination file to source file
|
||||||
|
Dictionary<DatItem, DatItem> toFromMap = new Dictionary<DatItem, DatItem>();
|
||||||
|
|
||||||
|
#region Find all required files for rebuild
|
||||||
|
|
||||||
|
logger.User("Determining files to rebuild");
|
||||||
|
start = DateTime.Now;
|
||||||
|
|
||||||
|
// Order the DATs by hash first to make things easier
|
||||||
|
logger.User("Sorting input DAT...");
|
||||||
|
BucketByCRC(false, logger, output: false);
|
||||||
|
logger.User("Sorting found files...");
|
||||||
|
current.BucketByCRC(false, logger, output: false);
|
||||||
|
|
||||||
|
// Now loop over and find all files that need to be rebuilt
|
||||||
|
List<string> keys = current.Files.Keys.ToList();
|
||||||
|
Parallel.ForEach(keys,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
key =>
|
||||||
|
{
|
||||||
|
// If we are using the DAT as a filter, treat the files one way
|
||||||
|
if (inverse)
|
||||||
|
{
|
||||||
|
// Check for duplicates
|
||||||
|
List<DatItem> datItems = current.Files[key];
|
||||||
|
foreach (Rom rom in datItems)
|
||||||
|
{
|
||||||
|
// If the rom has duplicates, we skip it
|
||||||
|
if (rom.HasDuplicates(this, logger))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, map the file to itself
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Rom newrom = new Rom
|
||||||
|
{
|
||||||
|
Name = rom.Name.Remove(0, Path.GetDirectoryName(rom.Name).Length),
|
||||||
|
Size = rom.Size,
|
||||||
|
CRC = rom.CRC,
|
||||||
|
MD5 = rom.MD5,
|
||||||
|
SHA1 = rom.SHA1,
|
||||||
|
|
||||||
|
Machine = new Machine
|
||||||
|
{
|
||||||
|
Name = Path.GetFileNameWithoutExtension(rom.Machine.Name),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
newrom.Name = newrom.Name.Remove(0, (newrom.Name.StartsWith("\\") || newrom.Name.StartsWith("/") ? 1 : 0));
|
||||||
|
|
||||||
|
lock (toFromMap)
|
||||||
|
{
|
||||||
|
toFromMap.Add(newrom, rom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, treat it like a standard rebuild
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// If the input DAT doesn't have the key, then nothing from the current DAT are there
|
||||||
|
if (!Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we try to find duplicates
|
||||||
|
List<DatItem> datItems = current.Files[key];
|
||||||
|
foreach (Rom rom in datItems)
|
||||||
|
{
|
||||||
|
List<DatItem> found = rom.GetDuplicates(this, logger, false);
|
||||||
|
|
||||||
|
// Now add all of the duplicates mapped to the current file
|
||||||
|
foreach (Rom mid in found)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
lock (toFromMap)
|
||||||
|
{
|
||||||
|
toFromMap.Add(mid, rom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.User("Determining complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
// Now bucket the list of keys by game so that we can rebuild properly
|
||||||
|
SortedDictionary<string, List<DatItem>> keysGroupedByGame = BucketListByGame(toFromMap.Keys.ToList(), false, true, logger, output: false);
|
||||||
|
|
||||||
|
#region Rebuild games in order
|
||||||
|
|
||||||
|
switch (outputFormat)
|
||||||
|
{
|
||||||
|
case OutputFormat.Folder:
|
||||||
|
logger.User("Rebuilding all files to directory");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TapeArchive:
|
||||||
|
logger.User("Rebuilding all files to TAR");
|
||||||
|
break;
|
||||||
|
case OutputFormat.Torrent7Zip:
|
||||||
|
logger.User("Rebuilding all files to Torrent7Z");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentGzip:
|
||||||
|
logger.User("Rebuilding all files to TorrentGZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentLrzip:
|
||||||
|
logger.User("Rebuilding all files to TorrentLRZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentRar:
|
||||||
|
logger.User("Rebuilding all files to TorrentRAR");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentXZ:
|
||||||
|
logger.User("Rebuilding all files to TorrentXZ");
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentZip:
|
||||||
|
logger.User("Rebuilding all files to TorrentZip");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
start = DateTime.Now;
|
||||||
|
|
||||||
|
// Now loop through the keys and create the correct output items
|
||||||
|
List<string> games = keysGroupedByGame.Keys.ToList();
|
||||||
|
Parallel.ForEach(games,
|
||||||
|
new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism },
|
||||||
|
game =>
|
||||||
|
{
|
||||||
|
// Define the temporary directory
|
||||||
|
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
|
||||||
|
|
||||||
|
// Create an empty list for getting paths for rebuilding
|
||||||
|
List<string> pathsToFiles = new List<string>();
|
||||||
|
|
||||||
|
// Loop through all of the matched items in the game
|
||||||
|
List<DatItem> itemsInGame = keysGroupedByGame[game];
|
||||||
|
List<Rom> romsInGame = new List<Rom>();
|
||||||
|
foreach (Rom rom in itemsInGame)
|
||||||
|
{
|
||||||
|
// Get the rom that's mapped to this item
|
||||||
|
Rom source = (Rom)toFromMap[rom];
|
||||||
|
|
||||||
|
// If we have an empty rom or machine, there was an issue
|
||||||
|
if (source == null || source.Machine == null || source.Machine.Name == null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the file is in an archive, we need to treat it specially
|
||||||
|
string machinename = source.Machine.Name.ToLowerInvariant();
|
||||||
|
if (machinename.EndsWith(".7z")
|
||||||
|
|| machinename.EndsWith(".gz")
|
||||||
|
|| machinename.EndsWith(".rar")
|
||||||
|
|| machinename.EndsWith(".zip"))
|
||||||
|
{
|
||||||
|
string tempPath = ArchiveTools.ExtractItem(source.Machine.Name, Path.GetFileName(source.Name), tempSubDir, logger);
|
||||||
|
pathsToFiles.Add(tempPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we want to just add the full path
|
||||||
|
else
|
||||||
|
{
|
||||||
|
pathsToFiles.Add(source.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the size doesn't match, then we add the CRC as a postfix to the file
|
||||||
|
Rom fi = FileTools.GetFileInfo(pathsToFiles.Last(), logger);
|
||||||
|
if (fi.Size != source.Size)
|
||||||
|
{
|
||||||
|
rom.Name = Path.GetDirectoryName(rom.Name)
|
||||||
|
+ (String.IsNullOrEmpty(Path.GetDirectoryName(rom.Name)) ? "" : Path.DirectorySeparatorChar.ToString())
|
||||||
|
+ Path.GetFileNameWithoutExtension(rom.Name)
|
||||||
|
+ " (" + fi.CRC + ")"
|
||||||
|
+ Path.GetExtension(rom.Name);
|
||||||
|
rom.CRC = fi.CRC;
|
||||||
|
rom.Size = fi.Size;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add the rom to the output list
|
||||||
|
romsInGame.Add(rom);
|
||||||
|
}
|
||||||
|
|
||||||
|
// And now rebuild accordingly
|
||||||
|
switch (outputFormat)
|
||||||
|
{
|
||||||
|
case OutputFormat.Folder:
|
||||||
|
for (int i = 0; i < romsInGame.Count; i++)
|
||||||
|
{
|
||||||
|
string infile = pathsToFiles[i];
|
||||||
|
Rom outrom = romsInGame[i];
|
||||||
|
string outfile = Path.Combine(outDir, Style.RemovePathUnsafeCharacters(outrom.Machine.Name), outrom.Name);
|
||||||
|
|
||||||
|
// Make sure the output folder is created
|
||||||
|
Directory.CreateDirectory(Path.GetDirectoryName(outfile));
|
||||||
|
|
||||||
|
// Now copy the file over
|
||||||
|
try
|
||||||
|
{
|
||||||
|
File.Copy(infile, outfile);
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case OutputFormat.TapeArchive:
|
||||||
|
ArchiveTools.WriteTAR(pathsToFiles, outDir, romsInGame, logger);
|
||||||
|
break;
|
||||||
|
case OutputFormat.Torrent7Zip:
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentGzip:
|
||||||
|
for (int i = 0; i < itemsInGame.Count; i++)
|
||||||
|
{
|
||||||
|
string infile = pathsToFiles[i];
|
||||||
|
Rom outrom = romsInGame[i];
|
||||||
|
outrom.Machine.Name = Style.RemovePathUnsafeCharacters(outrom.Machine.Name);
|
||||||
|
ArchiveTools.WriteTorrentGZ(infile, outDir, romba, logger);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentLrzip:
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentRar:
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentXZ:
|
||||||
|
break;
|
||||||
|
case OutputFormat.TorrentZip:
|
||||||
|
ArchiveTools.WriteTorrentZip(pathsToFiles, outDir, romsInGame, logger);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// And now clear the temp folder to get rid of any transient files
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Directory.Delete(tempSubDir, true);
|
||||||
|
}
|
||||||
|
catch { }
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.User("Rebuilding complete in: " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Process the DAT and verify the output directory
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="datFile">DAT to use to verify the directory</param>
|
||||||
|
/// <param name="inputs">List of input directories to compare against</param>
|
||||||
|
/// <param name="tempDir">Temporary directory for archive extraction</param>
|
||||||
|
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
/// <returns>True if verification was a success, false otherwise</returns>
|
||||||
|
public bool VerifyDirectory(List<string> inputs, string tempDir, string headerToCheckAgainst, Logger logger)
|
||||||
|
{
|
||||||
|
// First create or clean the temp directory
|
||||||
|
if (!Directory.Exists(tempDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(tempDir);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
FileTools.CleanDirectory(tempDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool success = true;
|
||||||
|
|
||||||
|
/*
|
||||||
|
We want the cross section of what's the folder and what's in the DAT. Right now, it just has what's in the DAT that's not in the folder
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Then, loop through and check each of the inputs
|
||||||
|
logger.User("Processing files:\n");
|
||||||
|
foreach (string input in inputs)
|
||||||
|
{
|
||||||
|
PopulateFromDir(input, false /* noMD5 */, false /* noSHA1 */, true /* bare */, false /* archivesAsFiles */,
|
||||||
|
true /* enableGzip */, false /* addBlanks */, false /* addDate */, tempDir /* tempDir */, false /* copyFiles */,
|
||||||
|
headerToCheckAgainst, 4 /* maxDegreeOfParallelism */, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup the fixdat
|
||||||
|
DatFile matched = (DatFile)CloneHeader();
|
||||||
|
matched.Files = new SortedDictionary<string, List<DatItem>>();
|
||||||
|
matched.FileName = "fixDat_" + matched.FileName;
|
||||||
|
matched.Name = "fixDat_" + matched.Name;
|
||||||
|
matched.Description = "fixDat_" + matched.Description;
|
||||||
|
matched.DatFormat = DatFormat.Logiqx;
|
||||||
|
|
||||||
|
// Now that all files are parsed, get only files found in directory
|
||||||
|
bool found = false;
|
||||||
|
foreach (List<DatItem> roms in Files.Values)
|
||||||
|
{
|
||||||
|
List<DatItem> newroms = DatItem.Merge(roms, logger);
|
||||||
|
foreach (Rom rom in newroms)
|
||||||
|
{
|
||||||
|
if (rom.SourceID == 99)
|
||||||
|
{
|
||||||
|
found = true;
|
||||||
|
string key = rom.Size + "-" + rom.CRC;
|
||||||
|
if (matched.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
matched.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
matched.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now output the fixdat to the main folder
|
||||||
|
if (found)
|
||||||
|
{
|
||||||
|
matched.WriteToFile("", logger, stats: true);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logger.User("No fixDat needed");
|
||||||
|
}
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
}
|
||||||
694
SabreTools.Helper/Dats/Partials/DatFile.Splitters.cs
Normal file
694
SabreTools.Helper/Dats/Partials/DatFile.Splitters.cs
Normal file
@@ -0,0 +1,694 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Web;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
using SabreTools.Helper.Tools;
|
||||||
|
|
||||||
|
#if MONO
|
||||||
|
using System.IO;
|
||||||
|
#else
|
||||||
|
using Alphaleonis.Win32.Filesystem;
|
||||||
|
#endif
|
||||||
|
using NaturalSort;
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Splitting [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Split a DAT by input extensions
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||||
|
/// <param name="basepath">Parent path for replacement</param>
|
||||||
|
/// <param name="extA">List of extensions to split on (first DAT)</param>
|
||||||
|
/// <param name="extB">List of extensions to split on (second DAT)</param>
|
||||||
|
/// <param name="logger">Logger object for console and file writing</param>
|
||||||
|
/// <returns>True if split succeeded, false otherwise</returns>
|
||||||
|
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB, Logger logger)
|
||||||
|
{
|
||||||
|
// Make sure all of the extensions have a dot at the beginning
|
||||||
|
List<string> newExtA = new List<string>();
|
||||||
|
foreach (string s in extA)
|
||||||
|
{
|
||||||
|
newExtA.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||||
|
}
|
||||||
|
string newExtAString = string.Join(",", newExtA);
|
||||||
|
|
||||||
|
List<string> newExtB = new List<string>();
|
||||||
|
foreach (string s in extB)
|
||||||
|
{
|
||||||
|
newExtB.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||||
|
}
|
||||||
|
string newExtBString = string.Join(",", newExtB);
|
||||||
|
|
||||||
|
// Set all of the appropriate outputs for each of the subsets
|
||||||
|
DatFile datdataA = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (" + newExtAString + ")",
|
||||||
|
Name = this.Name + " (" + newExtAString + ")",
|
||||||
|
Description = this.Description + " (" + newExtAString + ")",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
};
|
||||||
|
DatFile datdataB = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (" + newExtBString + ")",
|
||||||
|
Name = this.Name + " (" + newExtBString + ")",
|
||||||
|
Description = this.Description + " (" + newExtBString + ")",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
};
|
||||||
|
|
||||||
|
// If roms is empty, return false
|
||||||
|
if (this.Files.Count == 0)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now separate the roms accordingly
|
||||||
|
foreach (string key in this.Files.Keys)
|
||||||
|
{
|
||||||
|
foreach (DatItem rom in this.Files[key])
|
||||||
|
{
|
||||||
|
if (newExtA.Contains(Path.GetExtension(rom.Name.ToUpperInvariant())))
|
||||||
|
{
|
||||||
|
if (datdataA.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
datdataA.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
datdataA.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (newExtB.Contains(Path.GetExtension(rom.Name.ToUpperInvariant())))
|
||||||
|
{
|
||||||
|
if (datdataB.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
datdataB.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
datdataB.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (datdataA.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
datdataA.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
datdataA.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
if (datdataB.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
datdataB.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
datdataB.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the output directory
|
||||||
|
if (outDir != "")
|
||||||
|
{
|
||||||
|
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
outDir = Path.GetDirectoryName(this.FileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then write out both files
|
||||||
|
bool success = datdataA.WriteToFile(outDir, logger);
|
||||||
|
success &= datdataB.WriteToFile(outDir, logger);
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Split a DAT by best available hashes
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||||
|
/// <param name="basepath">Parent path for replacement</param>
|
||||||
|
/// <param name="logger">Logger object for console and file writing</param>
|
||||||
|
/// <returns>True if split succeeded, false otherwise</returns>
|
||||||
|
public bool SplitByHash(string outDir, string basepath, Logger logger)
|
||||||
|
{
|
||||||
|
// Sanitize the basepath to be more predictable
|
||||||
|
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||||
|
|
||||||
|
// Create each of the respective output DATs
|
||||||
|
logger.User("Creating and populating new DATs");
|
||||||
|
DatFile itemStatus = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (Nodump)",
|
||||||
|
Name = this.Name + " (Nodump)",
|
||||||
|
Description = this.Description + " (Nodump)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
DatFile sha1 = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (SHA-1)",
|
||||||
|
Name = this.Name + " (SHA-1)",
|
||||||
|
Description = this.Description + " (SHA-1)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
DatFile md5 = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (MD5)",
|
||||||
|
Name = this.Name + " (MD5)",
|
||||||
|
Description = this.Description + " (MD5)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
DatFile crc = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (CRC)",
|
||||||
|
Name = this.Name + " (CRC)",
|
||||||
|
Description = this.Description + " (CRC)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
DatFile other = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (Other)",
|
||||||
|
Name = this.Name + " (Other)",
|
||||||
|
Description = this.Description + " (Other)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Now populate each of the DAT objects in turn
|
||||||
|
List<string> keys = this.Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = this.Files[key];
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
// If the file is not a Rom or Disk, continue
|
||||||
|
if (rom.Type != ItemType.Disk && rom.Type != ItemType.Rom)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the file is a itemStatus
|
||||||
|
if ((rom.Type == ItemType.Rom && ((Rom)rom).ItemStatus == ItemStatus.Nodump)
|
||||||
|
|| (rom.Type == ItemType.Disk && ((Disk)rom).ItemStatus == ItemStatus.Nodump))
|
||||||
|
{
|
||||||
|
if (itemStatus.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
itemStatus.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
itemStatus.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the file has a SHA-1
|
||||||
|
else if ((rom.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)rom).SHA1))
|
||||||
|
|| (rom.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)rom).SHA1)))
|
||||||
|
{
|
||||||
|
if (sha1.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
sha1.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sha1.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the file has no SHA-1 but has an MD5
|
||||||
|
else if ((rom.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)rom).MD5))
|
||||||
|
|| (rom.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)rom).MD5)))
|
||||||
|
{
|
||||||
|
if (md5.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
md5.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
md5.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the file has no MD5 but a CRC
|
||||||
|
else if ((rom.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)rom).SHA1))
|
||||||
|
|| (rom.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)rom).SHA1)))
|
||||||
|
{
|
||||||
|
if (crc.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
crc.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
crc.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (other.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
other.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
other.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the output directory
|
||||||
|
if (outDir != "")
|
||||||
|
{
|
||||||
|
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
outDir = Path.GetDirectoryName(this.FileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now, output all of the files to the output directory
|
||||||
|
logger.User("DAT information created, outputting new files");
|
||||||
|
bool success = true;
|
||||||
|
if (itemStatus.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= itemStatus.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
if (sha1.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= sha1.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
if (md5.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= md5.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
if (crc.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= crc.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Split a SuperDAT by lowest available directory level
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||||
|
/// <param name="basepath">Parent path for replacement</param>
|
||||||
|
/// <param name="shortname">True if short names should be used, false otherwise</param>
|
||||||
|
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for console and file writing</param>
|
||||||
|
/// <returns>True if split succeeded, false otherwise</returns>
|
||||||
|
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat, Logger logger)
|
||||||
|
{
|
||||||
|
// Sanitize the basepath to be more predictable
|
||||||
|
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||||
|
|
||||||
|
// First, organize by games so that we can do the right thing
|
||||||
|
BucketByGame(false, true, logger, output: false, lower: false);
|
||||||
|
|
||||||
|
// Create a temporary DAT to add things to
|
||||||
|
DatFile tempDat = (DatFile)CloneHeader();
|
||||||
|
tempDat.Name = null;
|
||||||
|
|
||||||
|
// Sort the input keys
|
||||||
|
List<string> keys = Files.Keys.ToList();
|
||||||
|
keys.Sort(SplitByLevelSort);
|
||||||
|
|
||||||
|
// Then, we loop over the games
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
// Here, the key is the name of the game to be used for comparison
|
||||||
|
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
|
||||||
|
{
|
||||||
|
// Process and output the DAT
|
||||||
|
SplitByLevelHelper(tempDat, outDir, shortname, basedat, logger);
|
||||||
|
|
||||||
|
// Reset the DAT for the next items
|
||||||
|
tempDat = (DatFile)CloneHeader();
|
||||||
|
tempDat.Name = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean the input list and set all games to be pathless
|
||||||
|
List<DatItem> items = Files[key];
|
||||||
|
items.ForEach(item => item.Machine.Name = Style.GetFileName(item.Machine.Name));
|
||||||
|
items.ForEach(item => item.Machine.Description = Style.GetFileName(item.Machine.Description));
|
||||||
|
|
||||||
|
// Now add the game to the output DAT
|
||||||
|
if (tempDat.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
tempDat.Files[key].AddRange(items);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
tempDat.Files.Add(key, items);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then set the DAT name to be the parent directory name
|
||||||
|
tempDat.Name = Style.GetDirectoryName(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then we write the last DAT out since it would be skipped otherwise
|
||||||
|
SplitByLevelHelper(tempDat, outDir, shortname, basedat, logger);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Helper function for SplitByLevel to sort the input game names
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="a">First string to compare</param>
|
||||||
|
/// <param name="b">Second string to compare</param>
|
||||||
|
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
|
||||||
|
private int SplitByLevelSort(string a, string b)
|
||||||
|
{
|
||||||
|
NaturalComparer nc = new NaturalComparer();
|
||||||
|
int adeep = a.Count(c => c == '/' || c == '\\');
|
||||||
|
int bdeep = b.Count(c => c == '/' || c == '\\');
|
||||||
|
|
||||||
|
if (adeep == bdeep)
|
||||||
|
{
|
||||||
|
return nc.Compare(a, b);
|
||||||
|
}
|
||||||
|
return adeep - bdeep;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Helper function for SplitByLevel to clean and write out a DAT
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="datFile">DAT to clean and write out</param>
|
||||||
|
/// <param name="outDir">Directory to write out to</param>
|
||||||
|
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
|
||||||
|
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore, Logger logger)
|
||||||
|
{
|
||||||
|
// Get the name from the DAT to use separately
|
||||||
|
string name = datFile.Name;
|
||||||
|
string expName = name.Replace("/", " - ").Replace("\\", " - ");
|
||||||
|
|
||||||
|
// Get the path that the file will be written out to
|
||||||
|
string path = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||||
|
? outDir
|
||||||
|
: Path.Combine(outDir, name));
|
||||||
|
|
||||||
|
// Now set the new output values
|
||||||
|
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||||
|
? FileName
|
||||||
|
: (shortname
|
||||||
|
? Style.GetFileName(name)
|
||||||
|
: expName
|
||||||
|
)
|
||||||
|
);
|
||||||
|
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
|
||||||
|
datFile.Name = Name + " (" + expName + ")";
|
||||||
|
datFile.Description = (String.IsNullOrEmpty(Description) ? datFile.Name : Description + " (" + expName + ")");
|
||||||
|
datFile.Type = null;
|
||||||
|
|
||||||
|
// Write out the temporary DAT to the proper directory
|
||||||
|
datFile.WriteToFile(path, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Split a DAT by type of Rom
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||||
|
/// <param name="basepath">Parent path for replacement</param>
|
||||||
|
/// <param name="logger">Logger object for console and file writing</param>
|
||||||
|
/// <returns>True if split succeeded, false otherwise</returns>
|
||||||
|
public bool SplitByType(string outDir, string basepath, Logger logger)
|
||||||
|
{
|
||||||
|
// Sanitize the basepath to be more predictable
|
||||||
|
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||||
|
|
||||||
|
// Create each of the respective output DATs
|
||||||
|
logger.User("Creating and populating new DATs");
|
||||||
|
DatFile romdat = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (ROM)",
|
||||||
|
Name = this.Name + " (ROM)",
|
||||||
|
Description = this.Description + " (ROM)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
DatFile diskdat = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (Disk)",
|
||||||
|
Name = this.Name + " (Disk)",
|
||||||
|
Description = this.Description + " (Disk)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
DatFile sampledat = new DatFile
|
||||||
|
{
|
||||||
|
FileName = this.FileName + " (Sample)",
|
||||||
|
Name = this.Name + " (Sample)",
|
||||||
|
Description = this.Description + " (Sample)",
|
||||||
|
Category = this.Category,
|
||||||
|
Version = this.Version,
|
||||||
|
Date = this.Date,
|
||||||
|
Author = this.Author,
|
||||||
|
Email = this.Email,
|
||||||
|
Homepage = this.Homepage,
|
||||||
|
Url = this.Url,
|
||||||
|
Comment = this.Comment,
|
||||||
|
Header = this.Header,
|
||||||
|
Type = this.Type,
|
||||||
|
ForceMerging = this.ForceMerging,
|
||||||
|
ForceNodump = this.ForceNodump,
|
||||||
|
ForcePacking = this.ForcePacking,
|
||||||
|
DatFormat = this.DatFormat,
|
||||||
|
MergeRoms = this.MergeRoms,
|
||||||
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Now populate each of the DAT objects in turn
|
||||||
|
List<string> keys = this.Files.Keys.ToList();
|
||||||
|
foreach (string key in keys)
|
||||||
|
{
|
||||||
|
List<DatItem> roms = this.Files[key];
|
||||||
|
foreach (DatItem rom in roms)
|
||||||
|
{
|
||||||
|
// If the file is a Rom
|
||||||
|
if (rom.Type == ItemType.Rom)
|
||||||
|
{
|
||||||
|
if (romdat.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
romdat.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
romdat.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the file is a Disk
|
||||||
|
else if (rom.Type == ItemType.Disk)
|
||||||
|
{
|
||||||
|
if (diskdat.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
diskdat.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
diskdat.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the file is a Sample
|
||||||
|
else if (rom.Type == ItemType.Sample)
|
||||||
|
{
|
||||||
|
if (sampledat.Files.ContainsKey(key))
|
||||||
|
{
|
||||||
|
sampledat.Files[key].Add(rom);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
List<DatItem> temp = new List<DatItem>();
|
||||||
|
temp.Add(rom);
|
||||||
|
sampledat.Files.Add(key, temp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the output directory
|
||||||
|
if (outDir != "")
|
||||||
|
{
|
||||||
|
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
outDir = Path.GetDirectoryName(this.FileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now, output all of the files to the output directory
|
||||||
|
logger.User("DAT information created, outputting new files");
|
||||||
|
bool success = true;
|
||||||
|
if (romdat.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= romdat.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
if (diskdat.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= diskdat.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
if (sampledat.Files.Count > 0)
|
||||||
|
{
|
||||||
|
success &= sampledat.WriteToFile(outDir, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
}
|
||||||
640
SabreTools.Helper/Dats/Partials/DatFile.Statistics.cs
Normal file
640
SabreTools.Helper/Dats/Partials/DatFile.Statistics.cs
Normal file
@@ -0,0 +1,640 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Web;
|
||||||
|
|
||||||
|
using SabreTools.Helper.Data;
|
||||||
|
using SabreTools.Helper.Tools;
|
||||||
|
|
||||||
|
#if MONO
|
||||||
|
using System.IO;
|
||||||
|
#else
|
||||||
|
using Alphaleonis.Win32.Filesystem;
|
||||||
|
|
||||||
|
using FileAccess = System.IO.FileAccess;
|
||||||
|
using FileMode = System.IO.FileMode;
|
||||||
|
using SearchOption = System.IO.SearchOption;
|
||||||
|
using StreamWriter = System.IO.StreamWriter;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace SabreTools.Helper.Dats
|
||||||
|
{
|
||||||
|
public partial class DatFile
|
||||||
|
{
|
||||||
|
#region Instance Methods
|
||||||
|
|
||||||
|
#region Statistics [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Recalculate the statistics for the Dat
|
||||||
|
/// </summary>
|
||||||
|
public void RecalculateStats()
|
||||||
|
{
|
||||||
|
// Wipe out any stats already there
|
||||||
|
RomCount = 0;
|
||||||
|
DiskCount = 0;
|
||||||
|
TotalSize = 0;
|
||||||
|
CRCCount = 0;
|
||||||
|
MD5Count = 0;
|
||||||
|
SHA1Count = 0;
|
||||||
|
BaddumpCount = 0;
|
||||||
|
NodumpCount = 0;
|
||||||
|
|
||||||
|
// If we have a blank Dat in any way, return
|
||||||
|
if (this == null || Files == null || Files.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop through and add
|
||||||
|
foreach (List<DatItem> roms in Files.Values)
|
||||||
|
{
|
||||||
|
foreach (Rom rom in roms)
|
||||||
|
{
|
||||||
|
RomCount += (rom.Type == ItemType.Rom ? 1 : 0);
|
||||||
|
DiskCount += (rom.Type == ItemType.Disk ? 1 : 0);
|
||||||
|
TotalSize += (rom.ItemStatus == ItemStatus.Nodump ? 0 : rom.Size);
|
||||||
|
CRCCount += (String.IsNullOrEmpty(rom.CRC) ? 0 : 1);
|
||||||
|
MD5Count += (String.IsNullOrEmpty(rom.MD5) ? 0 : 1);
|
||||||
|
SHA1Count += (String.IsNullOrEmpty(rom.SHA1) ? 0 : 1);
|
||||||
|
BaddumpCount += (rom.Type == ItemType.Disk
|
||||||
|
? (((Disk)rom).ItemStatus == ItemStatus.BadDump ? 1 : 0)
|
||||||
|
: (rom.Type == ItemType.Rom
|
||||||
|
? (((Rom)rom).ItemStatus == ItemStatus.BadDump ? 1 : 0)
|
||||||
|
: 0)
|
||||||
|
);
|
||||||
|
NodumpCount += (rom.Type == ItemType.Disk
|
||||||
|
? (((Disk)rom).ItemStatus == ItemStatus.Nodump ? 1 : 0)
|
||||||
|
: (rom.Type == ItemType.Rom
|
||||||
|
? (((Rom)rom).ItemStatus == ItemStatus.Nodump ? 1 : 0)
|
||||||
|
: 0)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output the stats for the Dat in a human-readable format
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output file or stream for the statistics</param>
|
||||||
|
/// <param name="statDatFormat">Set the statistics output format to use</param>
|
||||||
|
/// <param name="logger">Logger object for file and console writing</param>
|
||||||
|
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
|
||||||
|
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
|
||||||
|
public void OutputStats(StreamWriter sw, StatDatFormat statDatFormat, Logger logger, bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
|
||||||
|
{
|
||||||
|
// If we're supposed to recalculate the statistics, do so
|
||||||
|
if (recalculate)
|
||||||
|
{
|
||||||
|
RecalculateStats();
|
||||||
|
}
|
||||||
|
|
||||||
|
BucketByGame(false, true, logger, false);
|
||||||
|
if (TotalSize < 0)
|
||||||
|
{
|
||||||
|
TotalSize = Int64.MaxValue + TotalSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the results to screen
|
||||||
|
string results = @"For '" + FileName + @"':
|
||||||
|
--------------------------------------------------
|
||||||
|
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||||
|
Games found: " + (game == -1 ? Files.Count : game) + @"
|
||||||
|
Roms found: " + RomCount + @"
|
||||||
|
Disks found: " + DiskCount + @"
|
||||||
|
Roms with CRC: " + CRCCount + @"
|
||||||
|
Roms with MD5: " + MD5Count + @"
|
||||||
|
Roms with SHA-1: " + SHA1Count + "\n";
|
||||||
|
|
||||||
|
if (baddumpCol)
|
||||||
|
{
|
||||||
|
results += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||||
|
}
|
||||||
|
if (nodumpCol)
|
||||||
|
{
|
||||||
|
results += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.User(results);
|
||||||
|
|
||||||
|
// Now write it out to file as well
|
||||||
|
string line = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
line = "\"" + FileName + "\","
|
||||||
|
+ "\"" + Style.GetBytesReadable(TotalSize) + "\","
|
||||||
|
+ "\"" + (game == -1 ? Files.Count : game) + "\","
|
||||||
|
+ "\"" + RomCount + "\","
|
||||||
|
+ "\"" + DiskCount + "\","
|
||||||
|
+ "\"" + CRCCount + "\","
|
||||||
|
+ "\"" + MD5Count + "\","
|
||||||
|
+ "\"" + SHA1Count + "\"";
|
||||||
|
|
||||||
|
if (baddumpCol)
|
||||||
|
{
|
||||||
|
line += ",\"" + BaddumpCount + "\"";
|
||||||
|
}
|
||||||
|
if (nodumpCol)
|
||||||
|
{
|
||||||
|
line += ",\"" + NodumpCount + "\"";
|
||||||
|
}
|
||||||
|
|
||||||
|
line += "\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
line = "\t\t\t<tr" + (FileName.StartsWith("DIR: ")
|
||||||
|
? " class=\"dir\"><td>" + HttpUtility.HtmlEncode(FileName.Remove(0, 5))
|
||||||
|
: "><td>" + HttpUtility.HtmlEncode(FileName)) + "</td>"
|
||||||
|
+ "<td align=\"right\">" + Style.GetBytesReadable(TotalSize) + "</td>"
|
||||||
|
+ "<td align=\"right\">" + (game == -1 ? Files.Count : game) + "</td>"
|
||||||
|
+ "<td align=\"right\">" + RomCount + "</td>"
|
||||||
|
+ "<td align=\"right\">" + DiskCount + "</td>"
|
||||||
|
+ "<td align=\"right\">" + CRCCount + "</td>"
|
||||||
|
+ "<td align=\"right\">" + MD5Count + "</td>"
|
||||||
|
+ "<td align=\"right\">" + SHA1Count + "</td>";
|
||||||
|
|
||||||
|
if (baddumpCol)
|
||||||
|
{
|
||||||
|
line += "<td align=\"right\">" + BaddumpCount + "</td>";
|
||||||
|
}
|
||||||
|
if (nodumpCol)
|
||||||
|
{
|
||||||
|
line += "<td align=\"right\">" + NodumpCount + "</td>";
|
||||||
|
}
|
||||||
|
|
||||||
|
line += "</tr>\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
line = @"'" + FileName + @"':
|
||||||
|
--------------------------------------------------
|
||||||
|
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||||
|
Games found: " + (game == -1 ? Files.Count : game) + @"
|
||||||
|
Roms found: " + RomCount + @"
|
||||||
|
Disks found: " + DiskCount + @"
|
||||||
|
Roms with CRC: " + CRCCount + @"
|
||||||
|
Roms with MD5: " + MD5Count + @"
|
||||||
|
Roms with SHA-1: " + SHA1Count + "\n";
|
||||||
|
|
||||||
|
if (baddumpCol)
|
||||||
|
{
|
||||||
|
line += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||||
|
}
|
||||||
|
if (nodumpCol)
|
||||||
|
{
|
||||||
|
line += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
line = "\"" + FileName + "\"\t"
|
||||||
|
+ "\"" + Style.GetBytesReadable(TotalSize) + "\"\t"
|
||||||
|
+ "\"" + (game == -1 ? Files.Count : game) + "\"\t"
|
||||||
|
+ "\"" + RomCount + "\"\t"
|
||||||
|
+ "\"" + DiskCount + "\"\t"
|
||||||
|
+ "\"" + CRCCount + "\"\t"
|
||||||
|
+ "\"" + MD5Count + "\"\t"
|
||||||
|
+ "\"" + SHA1Count + "\"";
|
||||||
|
|
||||||
|
if (baddumpCol)
|
||||||
|
{
|
||||||
|
line += "\t\"" + BaddumpCount + "\"";
|
||||||
|
}
|
||||||
|
if (nodumpCol)
|
||||||
|
{
|
||||||
|
line += "\t\"" + NodumpCount + "\"";
|
||||||
|
}
|
||||||
|
|
||||||
|
line += "\n";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the line to the streamwriter
|
||||||
|
sw.Write(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#endregion // Instance Methods
|
||||||
|
|
||||||
|
#region Static Methods
|
||||||
|
|
||||||
|
#region Statistics [MODULAR DONE]
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output the stats for a list of input dats as files in a human-readable format
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="inputs">List of input files and folders</param>
|
||||||
|
/// <param name="reportName">Name of the output file</param>
|
||||||
|
/// <param name="single">True if single DAT stats are output, false otherwise</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="statDatFormat" > Set the statistics output format to use</param>
|
||||||
|
/// <param name="logger">Logger object for file and console output</param>
|
||||||
|
public static void OutputStats(List<string> inputs, string reportName, bool single, bool baddumpCol,
|
||||||
|
bool nodumpCol, StatDatFormat statDatFormat, Logger logger)
|
||||||
|
{
|
||||||
|
reportName += OutputStatsGetExtension(statDatFormat);
|
||||||
|
StreamWriter sw = new StreamWriter(File.Open(reportName, FileMode.Create, FileAccess.Write));
|
||||||
|
|
||||||
|
// Make sure we have all files
|
||||||
|
List<Tuple<string, string>> newinputs = new List<Tuple<string, string>>(); // item, basepath
|
||||||
|
foreach (string input in inputs)
|
||||||
|
{
|
||||||
|
if (File.Exists(input))
|
||||||
|
{
|
||||||
|
newinputs.Add(Tuple.Create(Path.GetFullPath(input), Path.GetDirectoryName(Path.GetFullPath(input))));
|
||||||
|
}
|
||||||
|
if (Directory.Exists(input))
|
||||||
|
{
|
||||||
|
foreach (string file in Directory.GetFiles(input, "*", SearchOption.AllDirectories))
|
||||||
|
{
|
||||||
|
newinputs.Add(Tuple.Create(Path.GetFullPath(file), Path.GetFullPath(input)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
newinputs = newinputs
|
||||||
|
.OrderBy(i => Path.GetDirectoryName(i.Item1))
|
||||||
|
.ThenBy(i => Path.GetFileName(i.Item1))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Write the header, if any
|
||||||
|
OutputStatsWriteHeader(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
// Init all total variables
|
||||||
|
long totalSize = 0;
|
||||||
|
long totalGame = 0;
|
||||||
|
long totalRom = 0;
|
||||||
|
long totalDisk = 0;
|
||||||
|
long totalCRC = 0;
|
||||||
|
long totalMD5 = 0;
|
||||||
|
long totalSHA1 = 0;
|
||||||
|
long totalBaddump = 0;
|
||||||
|
long totalNodump = 0;
|
||||||
|
|
||||||
|
// Init directory-level variables
|
||||||
|
string lastdir = null;
|
||||||
|
string basepath = null;
|
||||||
|
long dirSize = 0;
|
||||||
|
long dirGame = 0;
|
||||||
|
long dirRom = 0;
|
||||||
|
long dirDisk = 0;
|
||||||
|
long dirCRC = 0;
|
||||||
|
long dirMD5 = 0;
|
||||||
|
long dirSHA1 = 0;
|
||||||
|
long dirBaddump = 0;
|
||||||
|
long dirNodump = 0;
|
||||||
|
|
||||||
|
// Now process each of the input files
|
||||||
|
foreach (Tuple<string, string> filename in newinputs)
|
||||||
|
{
|
||||||
|
// Get the directory for the current file
|
||||||
|
string thisdir = Path.GetDirectoryName(filename.Item1);
|
||||||
|
basepath = Path.GetDirectoryName(filename.Item2);
|
||||||
|
|
||||||
|
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
|
||||||
|
if (lastdir != null && thisdir != lastdir)
|
||||||
|
{
|
||||||
|
// Output separator if needed
|
||||||
|
OutputStatsWriteMidSeparator(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
DatFile lastdirdat = new DatFile
|
||||||
|
{
|
||||||
|
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||||
|
TotalSize = dirSize,
|
||||||
|
RomCount = dirRom,
|
||||||
|
DiskCount = dirDisk,
|
||||||
|
CRCCount = dirCRC,
|
||||||
|
MD5Count = dirMD5,
|
||||||
|
SHA1Count = dirSHA1,
|
||||||
|
BaddumpCount = dirBaddump,
|
||||||
|
NodumpCount = dirNodump,
|
||||||
|
};
|
||||||
|
lastdirdat.OutputStats(sw, statDatFormat, logger, game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||||
|
|
||||||
|
// Write the mid-footer, if any
|
||||||
|
OutputStatsWriteMidFooter(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
// Write the header, if any
|
||||||
|
OutputStatsWriteMidHeader(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
// Reset the directory stats
|
||||||
|
dirSize = 0;
|
||||||
|
dirGame = 0;
|
||||||
|
dirRom = 0;
|
||||||
|
dirDisk = 0;
|
||||||
|
dirCRC = 0;
|
||||||
|
dirMD5 = 0;
|
||||||
|
dirSHA1 = 0;
|
||||||
|
dirBaddump = 0;
|
||||||
|
dirNodump = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false);
|
||||||
|
List<string> games = new List<string>();
|
||||||
|
DatFile datdata = new DatFile();
|
||||||
|
datdata.Parse(filename.Item1, 0, 0, logger);
|
||||||
|
datdata.BucketByGame(false, true, logger, false);
|
||||||
|
|
||||||
|
// Output single DAT stats (if asked)
|
||||||
|
logger.User("Adding stats for file '" + filename.Item1 + "'\n", false);
|
||||||
|
if (single)
|
||||||
|
{
|
||||||
|
datdata.OutputStats(sw, statDatFormat, logger, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add single DAT stats to dir
|
||||||
|
dirSize += datdata.TotalSize;
|
||||||
|
dirGame += datdata.Files.Count;
|
||||||
|
dirRom += datdata.RomCount;
|
||||||
|
dirDisk += datdata.DiskCount;
|
||||||
|
dirCRC += datdata.CRCCount;
|
||||||
|
dirMD5 += datdata.MD5Count;
|
||||||
|
dirSHA1 += datdata.SHA1Count;
|
||||||
|
dirBaddump += datdata.BaddumpCount;
|
||||||
|
dirNodump += datdata.NodumpCount;
|
||||||
|
|
||||||
|
// Add single DAT stats to totals
|
||||||
|
totalSize += datdata.TotalSize;
|
||||||
|
totalGame += datdata.Files.Count;
|
||||||
|
totalRom += datdata.RomCount;
|
||||||
|
totalDisk += datdata.DiskCount;
|
||||||
|
totalCRC += datdata.CRCCount;
|
||||||
|
totalMD5 += datdata.MD5Count;
|
||||||
|
totalSHA1 += datdata.SHA1Count;
|
||||||
|
totalBaddump += datdata.BaddumpCount;
|
||||||
|
totalNodump += datdata.NodumpCount;
|
||||||
|
|
||||||
|
// Make sure to assign the new directory
|
||||||
|
lastdir = thisdir;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the directory stats one last time
|
||||||
|
OutputStatsWriteMidSeparator(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
if (single)
|
||||||
|
{
|
||||||
|
DatFile dirdat = new DatFile
|
||||||
|
{
|
||||||
|
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||||
|
TotalSize = dirSize,
|
||||||
|
RomCount = dirRom,
|
||||||
|
DiskCount = dirDisk,
|
||||||
|
CRCCount = dirCRC,
|
||||||
|
MD5Count = dirMD5,
|
||||||
|
SHA1Count = dirSHA1,
|
||||||
|
BaddumpCount = dirBaddump,
|
||||||
|
NodumpCount = dirNodump,
|
||||||
|
};
|
||||||
|
dirdat.OutputStats(sw, statDatFormat, logger, game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the mid-footer, if any
|
||||||
|
OutputStatsWriteMidFooter(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
// Write the header, if any
|
||||||
|
OutputStatsWriteMidHeader(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
|
||||||
|
// Reset the directory stats
|
||||||
|
dirSize = 0;
|
||||||
|
dirGame = 0;
|
||||||
|
dirRom = 0;
|
||||||
|
dirDisk = 0;
|
||||||
|
dirCRC = 0;
|
||||||
|
dirMD5 = 0;
|
||||||
|
dirSHA1 = 0;
|
||||||
|
dirNodump = 0;
|
||||||
|
|
||||||
|
// Output total DAT stats
|
||||||
|
DatFile totaldata = new DatFile
|
||||||
|
{
|
||||||
|
FileName = "DIR: All DATs",
|
||||||
|
TotalSize = totalSize,
|
||||||
|
RomCount = totalRom,
|
||||||
|
DiskCount = totalDisk,
|
||||||
|
CRCCount = totalCRC,
|
||||||
|
MD5Count = totalMD5,
|
||||||
|
SHA1Count = totalSHA1,
|
||||||
|
BaddumpCount = totalBaddump,
|
||||||
|
NodumpCount = totalNodump,
|
||||||
|
};
|
||||||
|
totaldata.OutputStats(sw, statDatFormat, logger, game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||||
|
|
||||||
|
// Output footer if needed
|
||||||
|
OutputStatsWriteFooter(sw, statDatFormat);
|
||||||
|
|
||||||
|
sw.Flush();
|
||||||
|
sw.Dispose();
|
||||||
|
|
||||||
|
logger.User(@"
|
||||||
|
Please check the log folder if the stats scrolled offscreen", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Get the proper extension for the stat output format
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
|
||||||
|
/// <returns>File extension with leading period</returns>
|
||||||
|
private static string OutputStatsGetExtension(StatDatFormat statDatFormat)
|
||||||
|
{
|
||||||
|
string reportExtension = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
reportExtension = ".csv";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
reportExtension = ".html";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
reportExtension = ".txt";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
reportExtension = ".csv";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return reportExtension;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write out the header to the stream, if any exists
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output</param>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||||
|
private static void OutputStatsWriteHeader(StreamWriter sw, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||||
|
{
|
||||||
|
string head = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
head = @"<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<header>
|
||||||
|
<title>DAT Statistics Report</title>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
background-color: lightgray;
|
||||||
|
}
|
||||||
|
.dir {
|
||||||
|
color: #0088FF;
|
||||||
|
}
|
||||||
|
.right {
|
||||||
|
align: right;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</header>
|
||||||
|
<body>
|
||||||
|
<h2>DAT Statistics Report (" + DateTime.Now.ToShortDateString() + @")</h2>
|
||||||
|
<table border=""1"" cellpadding=""5"" cellspacing=""0"">
|
||||||
|
";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sw.Write(head);
|
||||||
|
|
||||||
|
// Now write the mid header for those who need it
|
||||||
|
OutputStatsWriteMidHeader(sw, statDatFormat, baddumpCol, nodumpCol);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write out the mid-header to the stream, if any exists
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output</param>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||||
|
private static void OutputStatsWriteMidHeader(StreamWriter sw, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||||
|
{
|
||||||
|
string head = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
head = "\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\""
|
||||||
|
+ (baddumpCol ? ",\"BadDumps\"" : "") + (nodumpCol ? ",\"Nodumps\"" : "") + "\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
head = @" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
|
||||||
|
+ @"<th align=""right"">Disks</th><th align=""right""># with CRC</th><th align=""right""># with MD5</th><th align=""right""># with SHA-1</th>"
|
||||||
|
+ (baddumpCol ? "<th class=\".right\">Baddumps</th>" : "") + (nodumpCol ? "<th class=\".right\">Nodumps</th>" : "") + "</tr>\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
head = "\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\""
|
||||||
|
+ (baddumpCol ? "\t\"BadDumps\"" : "") + (nodumpCol ? "\t\"Nodumps\"" : "") + "\n";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sw.Write(head);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write out the separator to the stream, if any exists
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output</param>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||||
|
private static void OutputStatsWriteMidSeparator(StreamWriter sw, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||||
|
{
|
||||||
|
string mid = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
mid = "<tr><td colspan=\""
|
||||||
|
+ (baddumpCol && nodumpCol
|
||||||
|
? "11"
|
||||||
|
: (baddumpCol ^ nodumpCol
|
||||||
|
? "10"
|
||||||
|
: "9")
|
||||||
|
)
|
||||||
|
+ "\"></td></tr>\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sw.Write(mid);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write out the footer-separator to the stream, if any exists
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output</param>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||||
|
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||||
|
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||||
|
private static void OutputStatsWriteMidFooter(StreamWriter sw, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||||
|
{
|
||||||
|
string end = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
end = "\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
end = "<tr border=\"0\"><td colspan=\""
|
||||||
|
+ (baddumpCol && nodumpCol
|
||||||
|
? "11"
|
||||||
|
: (baddumpCol ^ nodumpCol
|
||||||
|
? "10"
|
||||||
|
: "9")
|
||||||
|
)
|
||||||
|
+ "\"></td></tr>\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
end = "\n";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
end = "\n";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sw.Write(end);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write out the footer to the stream, if any exists
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="sw">StreamWriter representing the output</param>
|
||||||
|
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||||
|
private static void OutputStatsWriteFooter(StreamWriter sw, StatDatFormat statDatFormat)
|
||||||
|
{
|
||||||
|
string end = "";
|
||||||
|
switch (statDatFormat)
|
||||||
|
{
|
||||||
|
case StatDatFormat.CSV:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.HTML:
|
||||||
|
end = @" </table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
";
|
||||||
|
break;
|
||||||
|
case StatDatFormat.None:
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case StatDatFormat.TSV:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sw.Write(end);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#endregion // Static Methods
|
||||||
|
}
|
||||||
|
}
|
||||||
1373
SabreTools.Helper/Dats/Partials/DatFile.Writers.cs
Normal file
1373
SabreTools.Helper/Dats/Partials/DatFile.Writers.cs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -109,7 +109,15 @@
|
|||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<Compile Include="Data\Constants.cs" />
|
<Compile Include="Data\Constants.cs" />
|
||||||
<Compile Include="Data\Flags.cs" />
|
<Compile Include="Data\Flags.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Bucketing.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.ConvertUpdate.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.DFD.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Parsers.cs" />
|
||||||
<Compile Include="Dats\Filter.cs" />
|
<Compile Include="Dats\Filter.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Rebuild.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Splitters.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Statistics.cs" />
|
||||||
|
<Compile Include="Dats\Partials\DatFile.Writers.cs" />
|
||||||
<Compile Include="External\NaturalSort\NaturalComparer.cs" />
|
<Compile Include="External\NaturalSort\NaturalComparer.cs" />
|
||||||
<Compile Include="External\NaturalSort\NaturalReversedComparer.cs" />
|
<Compile Include="External\NaturalSort\NaturalReversedComparer.cs" />
|
||||||
<Compile Include="External\OptimizedCRC.cs" />
|
<Compile Include="External\OptimizedCRC.cs" />
|
||||||
|
|||||||
Reference in New Issue
Block a user