2016-10-09 22:30:19 -07:00
|
|
|
|
using Mono.Data.Sqlite;
|
|
|
|
|
|
using SabreTools.Helper;
|
2016-09-02 13:59:25 -07:00
|
|
|
|
using System.Collections.Generic;
|
2016-09-02 14:13:44 -07:00
|
|
|
|
using System.IO;
|
2016-10-10 10:51:19 -07:00
|
|
|
|
using System.Linq;
|
2016-09-02 13:59:25 -07:00
|
|
|
|
|
|
|
|
|
|
namespace SabreTools
|
|
|
|
|
|
{
|
|
|
|
|
|
public partial class RombaSharp
|
|
|
|
|
|
{
|
|
|
|
|
|
#region Init Methods
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap adding files to the depots
|
|
|
|
|
|
/// </summary>
|
2016-10-10 10:51:19 -07:00
|
|
|
|
/// <param name="inputs">List of input folders to use</param>
|
|
|
|
|
|
/// <param name="onlyNeeded">True if only files in the database and don't exist are added, false otherwise</param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitArchive(List<string> inputs, bool onlyNeeded)
|
|
|
|
|
|
{
|
2016-10-10 10:51:19 -07:00
|
|
|
|
// First we want to get just all directories from the inputs
|
|
|
|
|
|
List<string> onlyDirs = new List<string>();
|
|
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (Directory.Exists(input))
|
|
|
|
|
|
{
|
|
|
|
|
|
onlyDirs.Add(Path.GetFullPath(input));
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Then process all of the input directories into an internal DAT
|
|
|
|
|
|
DatFile df = new DatFile();
|
|
|
|
|
|
foreach (string dir in onlyDirs)
|
|
|
|
|
|
{
|
|
|
|
|
|
df.PopulateDatFromDir(dir, false, false, false, false, true, false, false, _tmpdir, false, null, 4, _logger);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Create an empty Dat for files that need to be rebuilt
|
|
|
|
|
|
DatFile need = new DatFile();
|
|
|
|
|
|
need.Files = new SortedDictionary<string, List<DatItem>>();
|
|
|
|
|
|
|
|
|
|
|
|
SqliteConnection dbc = new SqliteConnection(_connectionString);
|
2016-10-10 13:14:35 -07:00
|
|
|
|
dbc.Open();
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have the Dats, add the files to the database
|
2016-10-10 10:51:19 -07:00
|
|
|
|
foreach (string key in df.Files.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> datItems = df.Files[key];
|
|
|
|
|
|
foreach (Rom rom in datItems)
|
|
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
string query = "SELECT id FROM data WHERE size=" + rom.Size
|
|
|
|
|
|
+ " AND (crc=\"" + rom.CRC + "\" OR crc=\"null\")"
|
|
|
|
|
|
+ " AND (md5=\"" + rom.MD5 + "\" OR md5=\"null\")"
|
|
|
|
|
|
+ " AND (sha1=\"" + rom.SHA1 + "\" OR sha1=\"null\")"
|
|
|
|
|
|
+ " AND indepot=0";
|
|
|
|
|
|
SqliteCommand slc = new SqliteCommand(query, dbc);
|
|
|
|
|
|
SqliteDataReader sldr = slc.ExecuteReader();
|
|
|
|
|
|
|
|
|
|
|
|
// If a row is returned, add the file and change the existence
|
|
|
|
|
|
if (sldr.HasRows)
|
2016-10-10 10:51:19 -07:00
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
sldr.Read();
|
|
|
|
|
|
long id = sldr.GetInt64(0);
|
|
|
|
|
|
|
|
|
|
|
|
string squery = "UPDATE data SET indepot=1 WHERE id=" + id;
|
|
|
|
|
|
SqliteCommand sslc = new SqliteCommand(squery, dbc);
|
|
|
|
|
|
sslc.ExecuteNonQuery();
|
|
|
|
|
|
sslc.Dispose();
|
|
|
|
|
|
|
|
|
|
|
|
// Add the rom to the files that need to be rebuilt
|
|
|
|
|
|
if (need.Files.ContainsKey(key))
|
2016-10-10 10:51:19 -07:00
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
need.Files[key].Add(rom);
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> temp = new List<DatItem>();
|
|
|
|
|
|
temp.Add(rom);
|
|
|
|
|
|
need.Files.Add(key, temp);
|
2016-10-10 10:51:19 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2016-10-14 14:04:15 -07:00
|
|
|
|
|
2016-10-14 16:58:15 -07:00
|
|
|
|
// If it doesn't exist, and we're not adding only needed files
|
|
|
|
|
|
else if (!onlyNeeded)
|
|
|
|
|
|
{
|
|
|
|
|
|
string squery = "INSERT INTO data (size, crc, md5, sha1, indepot) VALUES ("
|
|
|
|
|
|
+ rom.Size + ","
|
|
|
|
|
|
+ "\"" + (rom.CRC == "" ? "null" : rom.CRC) + "\","
|
|
|
|
|
|
+ "\"" + (rom.MD5 == "" ? "null" : rom.MD5) + "\","
|
|
|
|
|
|
+ "\"" + (rom.SHA1 == "" ? "null" : rom.SHA1) + "\","
|
|
|
|
|
|
+ "1)";
|
|
|
|
|
|
SqliteCommand sslc = new SqliteCommand(squery, dbc);
|
|
|
|
|
|
sslc.ExecuteNonQuery();
|
|
|
|
|
|
sslc.Dispose();
|
2016-10-10 10:51:19 -07:00
|
|
|
|
|
|
|
|
|
|
// Add the rom to the files that need to be rebuilt
|
|
|
|
|
|
if (need.Files.ContainsKey(key))
|
|
|
|
|
|
{
|
|
|
|
|
|
need.Files[key].Add(rom);
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
List<DatItem> temp = new List<DatItem>();
|
|
|
|
|
|
temp.Add(rom);
|
|
|
|
|
|
need.Files.Add(key, temp);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Create the sorting object to use and rebuild the needed files
|
|
|
|
|
|
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(0, 0, 0, 0);
|
2016-10-14 16:58:15 -07:00
|
|
|
|
SimpleSort ss = new SimpleSort(need, onlyDirs, _depots.Keys.ToList()[0], _tmpdir, false, false, false, false, false, true, true, asl, false, _logger);
|
2016-10-10 10:51:19 -07:00
|
|
|
|
ss.StartProcessing();
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap building all files from a set of DATs
|
|
|
|
|
|
/// </summary>
|
2016-10-12 15:40:06 -07:00
|
|
|
|
/// <param name="inputs">List of input DATs to rebuild from</param>
|
|
|
|
|
|
/// <param name="copy">True if files should be copied to output, false for rebuild</param>
|
|
|
|
|
|
private static void InitBuild(List<string> inputs, bool copy)
|
2016-09-02 13:59:25 -07:00
|
|
|
|
{
|
2016-09-08 17:42:53 -07:00
|
|
|
|
// Verify the filenames
|
|
|
|
|
|
Dictionary<string, string> foundDats = GetValidDats(inputs);
|
|
|
|
|
|
|
2016-10-12 14:40:21 -07:00
|
|
|
|
// Create a base output folder
|
|
|
|
|
|
if (!Directory.Exists("out"))
|
|
|
|
|
|
{
|
|
|
|
|
|
Directory.CreateDirectory("out");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Open the database
|
|
|
|
|
|
SqliteConnection dbc = new SqliteConnection(_connectionString);
|
|
|
|
|
|
dbc.Open();
|
|
|
|
|
|
|
2016-09-08 17:42:53 -07:00
|
|
|
|
// Now that we have the dictionary, we can loop through and output to a new folder for each
|
2016-10-12 14:40:21 -07:00
|
|
|
|
foreach (string key in foundDats.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// Get the DAT file associated with the key
|
|
|
|
|
|
DatFile datFile = new DatFile();
|
|
|
|
|
|
datFile.Parse(Path.Combine(_dats, foundDats[key]), 0, 0, _logger, softlist: true);
|
|
|
|
|
|
ArchiveScanLevel asl = ArchiveTools.GetArchiveScanLevelFromNumbers(0, 0, 0, 0);
|
|
|
|
|
|
|
|
|
|
|
|
// Create the new output directory if it doesn't exist
|
|
|
|
|
|
string outputFolder = Path.Combine("out", Path.GetFileNameWithoutExtension(foundDats[key]));
|
|
|
|
|
|
if (!Directory.Exists(outputFolder))
|
|
|
|
|
|
{
|
|
|
|
|
|
Directory.CreateDirectory(outputFolder);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2016-10-14 16:58:15 -07:00
|
|
|
|
// Then get all hashes associated with this DAT
|
|
|
|
|
|
string query = "SELECT sha1 FROM dats JOIN data ON dats.id=data.id WHERE hash=\"" + key + "\"";
|
|
|
|
|
|
SqliteCommand slc = new SqliteCommand(query, dbc);
|
|
|
|
|
|
SqliteDataReader sldr = slc.ExecuteReader();
|
|
|
|
|
|
if (sldr.HasRows)
|
|
|
|
|
|
{
|
|
|
|
|
|
while (sldr.Read())
|
|
|
|
|
|
{
|
|
|
|
|
|
string sha1 = sldr.GetString(0);
|
|
|
|
|
|
string filename = Path.Combine(sha1.Substring(0, 2), sha1.Substring(2, 2), sha1.Substring(4, 2), sha1.Substring(6, 2), sha1 + ".gz");
|
2016-10-12 14:40:21 -07:00
|
|
|
|
|
2016-10-14 16:58:15 -07:00
|
|
|
|
// Find the first depot that contains the folder
|
|
|
|
|
|
foreach (string depot in _depots.Keys)
|
|
|
|
|
|
{
|
|
|
|
|
|
// If the depot is online, check it
|
|
|
|
|
|
if (_depots[depot].Item2)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (File.Exists(Path.Combine(depot, filename)))
|
|
|
|
|
|
{
|
|
|
|
|
|
if (copy)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!Directory.Exists(Path.Combine(outputFolder, Path.GetDirectoryName(filename))))
|
|
|
|
|
|
{
|
|
|
|
|
|
Directory.CreateDirectory(Path.Combine(outputFolder, Path.GetDirectoryName(filename)));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
File.Copy(Path.Combine(depot, filename), Path.Combine(outputFolder, filename), true);
|
|
|
|
|
|
}
|
|
|
|
|
|
catch { }
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
ArchiveTools.ExtractArchive(Path.Combine(depot, filename), _tmpdir, asl, _logger);
|
|
|
|
|
|
}
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Now that we have extracted everything, we rebuild to the output folder
|
|
|
|
|
|
if (!copy)
|
|
|
|
|
|
{
|
|
|
|
|
|
List<string> temp = new List<string>();
|
|
|
|
|
|
temp.Add(_tmpdir);
|
|
|
|
|
|
SimpleSort ss = new SimpleSort(datFile, temp, outputFolder, "", false, false, false, false, true, false, false, asl, false, _logger);
|
|
|
|
|
|
ss.StartProcessing();
|
|
|
|
|
|
}
|
2016-10-12 14:40:21 -07:00
|
|
|
|
}
|
2016-09-08 17:42:53 -07:00
|
|
|
|
|
2016-10-12 14:40:21 -07:00
|
|
|
|
dbc.Dispose();
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap finding all files that are in both the database and a new Dat
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="newdat"></param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitDiffDat(string newdat)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("This feature is not yet implemented: diffdat");
|
2016-09-08 17:42:53 -07:00
|
|
|
|
|
|
|
|
|
|
// First, we want to read in the DAT. Then for each file listed in the DAT, we check if it's in there or not.
|
|
|
|
|
|
// If it is in there, we add it to an output DAT. If it's not, we skip. Then we output the DAT.
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap creating a Dat from a directory
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs"></param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitDir2Dat(List<string> inputs)
|
|
|
|
|
|
{
|
2016-09-02 14:13:44 -07:00
|
|
|
|
// Create a simple Dat output
|
2016-09-19 20:08:25 -07:00
|
|
|
|
DatFile datdata = new DatFile()
|
2016-09-02 14:13:44 -07:00
|
|
|
|
{
|
|
|
|
|
|
FileName = Path.GetFileName(inputs[0]) + " Dir2Dat",
|
|
|
|
|
|
Name = Path.GetFileName(inputs[0]) + " Dir2Dat",
|
|
|
|
|
|
Description = Path.GetFileName(inputs[0]) + " Dir2Dat",
|
2016-09-30 12:15:36 -07:00
|
|
|
|
OutputFormat = OutputFormat.Logiqx,
|
2016-09-22 18:15:02 -07:00
|
|
|
|
Files = new SortedDictionary<string, List<DatItem>>(),
|
2016-09-02 14:13:44 -07:00
|
|
|
|
};
|
|
|
|
|
|
|
2016-09-14 14:53:48 -07:00
|
|
|
|
Logger logger = new Logger(false, "");
|
|
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
2016-09-22 17:46:21 -07:00
|
|
|
|
datdata.PopulateDatFromDir(input, false /* noMD5 */, false /* noSHA1 */, true /* bare */, false /* archivesAsFiles */,
|
2016-10-10 11:28:27 -07:00
|
|
|
|
true /* enableGzip */, false /* addBlanks */, false /* addDate */, _tmpdir /* tempDir */, false /* copyFiles */,
|
2016-10-03 21:16:59 -07:00
|
|
|
|
null /* headerToCheckAgainst */, 4 /* maxDegreeOfParallelism */, _logger);
|
2016-09-22 17:46:21 -07:00
|
|
|
|
datdata.WriteToFile("", logger);
|
2016-09-14 14:53:48 -07:00
|
|
|
|
}
|
|
|
|
|
|
logger.Close();
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap creating a fixdat for each Dat
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs"></param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitFixdat(List<string> inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("This feature is not yet implemented: fixdat");
|
2016-09-08 17:42:53 -07:00
|
|
|
|
|
|
|
|
|
|
// Verify the filenames
|
|
|
|
|
|
Dictionary<string, string> foundDats = GetValidDats(inputs);
|
2016-09-08 17:52:02 -07:00
|
|
|
|
|
|
|
|
|
|
// Once we have each DAT, look up each associated hash based on the hash of the DATs.
|
|
|
|
|
|
// Then, for each rom, check to see if they exist in the folder. If they don't, add it
|
|
|
|
|
|
// to the fixDAT. Then output when the DAT is done, processing, moving on to the next...
|
|
|
|
|
|
// NOTE: This might share code with InitMiss
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap looking up if hashes exist in the database
|
|
|
|
|
|
/// </summary>
|
2016-10-10 10:51:19 -07:00
|
|
|
|
/// <param name="inputs">List of input strings representing hashes to check for</param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitLookup(List<string> inputs)
|
|
|
|
|
|
{
|
2016-10-09 22:30:19 -07:00
|
|
|
|
// First, try to figure out what type of hash each is by length and clean it
|
2016-10-10 10:51:19 -07:00
|
|
|
|
List<string> crc = new List<string>();
|
|
|
|
|
|
List<string> md5 = new List<string>();
|
|
|
|
|
|
List<string> sha1 = new List<string>();
|
2016-10-09 22:30:19 -07:00
|
|
|
|
foreach (string input in inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
string temp = "";
|
|
|
|
|
|
if (input.Length == Constants.CRCLength)
|
|
|
|
|
|
{
|
|
|
|
|
|
temp = Style.CleanHashData(input, Constants.CRCLength);
|
2016-10-10 10:51:19 -07:00
|
|
|
|
if (temp != "")
|
|
|
|
|
|
{
|
|
|
|
|
|
crc.Add(temp);
|
|
|
|
|
|
}
|
2016-10-09 22:30:19 -07:00
|
|
|
|
}
|
|
|
|
|
|
else if (input.Length == Constants.MD5Length)
|
|
|
|
|
|
{
|
|
|
|
|
|
temp = Style.CleanHashData(input, Constants.MD5Length);
|
2016-10-10 10:51:19 -07:00
|
|
|
|
if (temp != "")
|
|
|
|
|
|
{
|
|
|
|
|
|
md5.Add(temp);
|
|
|
|
|
|
}
|
2016-10-09 22:30:19 -07:00
|
|
|
|
}
|
|
|
|
|
|
else if (input.Length == Constants.SHA1Length)
|
|
|
|
|
|
{
|
|
|
|
|
|
temp = Style.CleanHashData(input, Constants.SHA1Length);
|
2016-10-10 10:51:19 -07:00
|
|
|
|
if (temp != "")
|
|
|
|
|
|
{
|
|
|
|
|
|
sha1.Add(temp);
|
|
|
|
|
|
}
|
2016-10-09 22:30:19 -07:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
SqliteConnection dbc = new SqliteConnection(_connectionString);
|
2016-10-10 13:14:35 -07:00
|
|
|
|
dbc.Open();
|
|
|
|
|
|
|
|
|
|
|
|
// Now, search for each of them and return true or false for each
|
2016-10-10 10:51:19 -07:00
|
|
|
|
foreach (string input in crc)
|
|
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
string query = "SELECT * FROM data WHERE crc=\"" + input + "\"";
|
2016-10-10 10:51:19 -07:00
|
|
|
|
SqliteCommand slc = new SqliteCommand(query, dbc);
|
|
|
|
|
|
SqliteDataReader sldr = slc.ExecuteReader();
|
|
|
|
|
|
if (sldr.HasRows)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("Hash '" + input + "' had no matches in the database");
|
|
|
|
|
|
}
|
2016-10-09 22:30:19 -07:00
|
|
|
|
|
2016-10-10 10:51:19 -07:00
|
|
|
|
sldr.Dispose();
|
|
|
|
|
|
slc.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
foreach (string input in md5)
|
|
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
string query = "SELECT * FROM data WHERE md5=\"" + input + "\"";
|
2016-10-10 10:51:19 -07:00
|
|
|
|
SqliteCommand slc = new SqliteCommand(query, dbc);
|
|
|
|
|
|
SqliteDataReader sldr = slc.ExecuteReader();
|
|
|
|
|
|
if (sldr.HasRows)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("Hash '" + input + "' had no matches in the database");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
sldr.Dispose();
|
|
|
|
|
|
slc.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
foreach (string input in sha1)
|
2016-10-09 22:30:19 -07:00
|
|
|
|
{
|
2016-10-14 16:58:15 -07:00
|
|
|
|
string query = "SELECT * FROM data WHERE sha1=\"" + input + "\"";
|
2016-10-09 22:30:19 -07:00
|
|
|
|
SqliteCommand slc = new SqliteCommand(query, dbc);
|
|
|
|
|
|
SqliteDataReader sldr = slc.ExecuteReader();
|
|
|
|
|
|
if (sldr.HasRows)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("For hash '" + input + "' there were " + sldr.RecordsAffected + " matches in the database");
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("Hash '" + input + "' had no matches in the database");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
sldr.Dispose();
|
|
|
|
|
|
slc.Dispose();
|
|
|
|
|
|
}
|
2016-09-08 17:52:02 -07:00
|
|
|
|
|
2016-10-09 22:30:19 -07:00
|
|
|
|
dbc.Dispose();
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
2016-09-02 14:08:34 -07:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// Wrap creating a havefile and a missfile for each Dat
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <param name="inputs"></param>
|
2016-09-02 13:59:25 -07:00
|
|
|
|
private static void InitMiss(List<string> inputs)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.User("This feature is not yet implemented: miss");
|
2016-09-08 17:42:53 -07:00
|
|
|
|
|
|
|
|
|
|
// Verify the filenames
|
|
|
|
|
|
Dictionary<string, string> foundDats = GetValidDats(inputs);
|
2016-09-08 17:52:02 -07:00
|
|
|
|
|
|
|
|
|
|
// Once we have each DAT, look up each associated hash based on the hash of the DATs.
|
|
|
|
|
|
// Then, for each rom, check to see if they exist in the folder. If they do, add it
|
|
|
|
|
|
// to the have DAT, else wise go to the miss DAT. Then output both when the DAT is done
|
|
|
|
|
|
// processing, moving on to the next...
|
|
|
|
|
|
// NOTE: This might share code with InitFixdat
|
2016-09-02 13:59:25 -07:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|