[RombaSharp/] Space, the final frontier

This commit is contained in:
Matt Nadareski
2019-02-08 20:31:07 -08:00
parent dde93eec67
commit eee15bbc32
4 changed files with 2338 additions and 2338 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -21,362 +21,362 @@ using StreamWriter = System.IO.StreamWriter;
namespace RombaSharp
{
public partial class RombaSharp
{
#region Helper methods
public partial class RombaSharp
{
#region Helper methods
/// <summary>
/// Gets all valid DATs that match in the DAT root
/// </summary>
/// <param name="inputs">List of input strings to check for, presumably file names</param>
/// <returns>Dictionary of hash/full path for each of the valid DATs</returns>
private static Dictionary<string, string> GetValidDats(List<string> inputs)
{
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
List<string> datRootDats = Directory.EnumerateFiles(_dats, "*", SearchOption.AllDirectories).ToList();
List<string> lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant());
Dictionary<string, string> foundDats = new Dictionary<string, string>();
foreach (string input in inputs)
{
if (lowerCaseDats.Contains(input.ToLowerInvariant()))
{
string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]);
string sha1 = Utilities.ByteArrayToString(Utilities.GetFileInfo(fullpath).SHA1);
foundDats.Add(sha1, fullpath);
}
else
{
Globals.Logger.Warning("The file '{0}' could not be found in the DAT root", input);
}
}
/// <summary>
/// Gets all valid DATs that match in the DAT root
/// </summary>
/// <param name="inputs">List of input strings to check for, presumably file names</param>
/// <returns>Dictionary of hash/full path for each of the valid DATs</returns>
private static Dictionary<string, string> GetValidDats(List<string> inputs)
{
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
List<string> datRootDats = Directory.EnumerateFiles(_dats, "*", SearchOption.AllDirectories).ToList();
List<string> lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant());
Dictionary<string, string> foundDats = new Dictionary<string, string>();
foreach (string input in inputs)
{
if (lowerCaseDats.Contains(input.ToLowerInvariant()))
{
string fullpath = Path.GetFullPath(datRootDats[lowerCaseDats.IndexOf(input.ToLowerInvariant())]);
string sha1 = Utilities.ByteArrayToString(Utilities.GetFileInfo(fullpath).SHA1);
foundDats.Add(sha1, fullpath);
}
else
{
Globals.Logger.Warning("The file '{0}' could not be found in the DAT root", input);
}
}
return foundDats;
}
return foundDats;
}
/// <summary>
/// Initialize the Romba application from XML config
/// </summary>
private static void InitializeConfiguration()
{
// Get default values if they're not written
int workers = 4,
verbosity = 1,
cores = 4,
port = 4003;
string logdir = "logs",
tmpdir = "tmp",
webdir = "web",
baddir = "bad",
dats = "dats",
db = "db",
connectionString = "";
Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>();
/// <summary>
/// Initialize the Romba application from XML config
/// </summary>
private static void InitializeConfiguration()
{
// Get default values if they're not written
int workers = 4,
verbosity = 1,
cores = 4,
port = 4003;
string logdir = "logs",
tmpdir = "tmp",
webdir = "web",
baddir = "bad",
dats = "dats",
db = "db",
connectionString = "";
Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>();
// Get the XML text reader for the configuration file, if possible
XmlReader xtr = Utilities.GetXmlTextReader(_config);
// Get the XML text reader for the configuration file, if possible
XmlReader xtr = Utilities.GetXmlTextReader(_config);
// Now parse the XML file for settings
if (xtr != null)
{
xtr.MoveToContent();
while (!xtr.EOF)
{
// We only want elements
if (xtr.NodeType != XmlNodeType.Element)
{
xtr.Read();
continue;
}
// Now parse the XML file for settings
if (xtr != null)
{
xtr.MoveToContent();
while (!xtr.EOF)
{
// We only want elements
if (xtr.NodeType != XmlNodeType.Element)
{
xtr.Read();
continue;
}
switch (xtr.Name)
{
case "workers":
workers = xtr.ReadElementContentAsInt();
break;
case "logdir":
logdir = xtr.ReadElementContentAsString();
break;
case "tmpdir":
tmpdir = xtr.ReadElementContentAsString();
break;
case "webdir":
webdir = xtr.ReadElementContentAsString();
break;
case "baddir":
baddir = xtr.ReadElementContentAsString();
break;
case "verbosity":
verbosity = xtr.ReadElementContentAsInt();
break;
case "cores":
cores = xtr.ReadElementContentAsInt();
break;
case "dats":
dats = xtr.ReadElementContentAsString();
break;
case "db":
db = xtr.ReadElementContentAsString();
break;
case "depot":
XmlReader subreader = xtr.ReadSubtree();
if (subreader != null)
{
string root = "";
long maxsize = -1;
bool online = true;
switch (xtr.Name)
{
case "workers":
workers = xtr.ReadElementContentAsInt();
break;
case "logdir":
logdir = xtr.ReadElementContentAsString();
break;
case "tmpdir":
tmpdir = xtr.ReadElementContentAsString();
break;
case "webdir":
webdir = xtr.ReadElementContentAsString();
break;
case "baddir":
baddir = xtr.ReadElementContentAsString();
break;
case "verbosity":
verbosity = xtr.ReadElementContentAsInt();
break;
case "cores":
cores = xtr.ReadElementContentAsInt();
break;
case "dats":
dats = xtr.ReadElementContentAsString();
break;
case "db":
db = xtr.ReadElementContentAsString();
break;
case "depot":
XmlReader subreader = xtr.ReadSubtree();
if (subreader != null)
{
string root = "";
long maxsize = -1;
bool online = true;
while (!subreader.EOF)
{
// We only want elements
if (subreader.NodeType != XmlNodeType.Element)
{
subreader.Read();
continue;
}
while (!subreader.EOF)
{
// We only want elements
if (subreader.NodeType != XmlNodeType.Element)
{
subreader.Read();
continue;
}
switch (subreader.Name)
{
case "root":
root = subreader.ReadElementContentAsString();
break;
case "maxsize":
maxsize = subreader.ReadElementContentAsLong();
break;
case "online":
online = subreader.ReadElementContentAsBoolean();
break;
default:
subreader.Read();
break;
}
}
switch (subreader.Name)
{
case "root":
root = subreader.ReadElementContentAsString();
break;
case "maxsize":
maxsize = subreader.ReadElementContentAsLong();
break;
case "online":
online = subreader.ReadElementContentAsBoolean();
break;
default:
subreader.Read();
break;
}
}
try
{
depots.Add(root, new Tuple<long, bool>(maxsize, online));
}
catch
{
// Ignore add errors
}
}
try
{
depots.Add(root, new Tuple<long, bool>(maxsize, online));
}
catch
{
// Ignore add errors
}
}
xtr.Skip();
break;
case "port":
port = xtr.ReadElementContentAsInt();
break;
default:
xtr.Read();
break;
}
}
}
xtr.Skip();
break;
case "port":
port = xtr.ReadElementContentAsInt();
break;
default:
xtr.Read();
break;
}
}
}
// Now validate the values given
if (workers < 1)
{
workers = 1;
}
if (workers > 8)
{
workers = 8;
}
if (!Directory.Exists(logdir))
{
Directory.CreateDirectory(logdir);
}
if (!Directory.Exists(tmpdir))
{
Directory.CreateDirectory(tmpdir);
}
if (!Directory.Exists(webdir))
{
Directory.CreateDirectory(webdir);
}
if (!Directory.Exists(baddir))
{
Directory.CreateDirectory(baddir);
}
if (verbosity < 0)
{
verbosity = 0;
}
if (verbosity > 3)
{
verbosity = 3;
}
if (cores < 1)
{
cores = 1;
}
if (cores > 16)
{
cores = 16;
}
if (!Directory.Exists(dats))
{
Directory.CreateDirectory(dats);
}
db = Path.GetFileNameWithoutExtension(db) + ".sqlite";
connectionString = "Data Source=" + db + ";Version = 3;";
foreach (string key in depots.Keys)
{
if (!Directory.Exists(key))
{
Directory.CreateDirectory(key);
File.CreateText(Path.Combine(key, ".romba_size"));
File.CreateText(Path.Combine(key, ".romba_size.backup"));
}
else
{
if (!File.Exists(Path.Combine(key, ".romba_size")))
{
File.CreateText(Path.Combine(key, ".romba_size"));
}
if (!File.Exists(Path.Combine(key, ".romba_size.backup")))
{
File.CreateText(Path.Combine(key, ".romba_size.backup"));
}
}
}
if (port < 0)
{
port = 0;
}
if (port > 65535)
{
port = 65535;
}
// Now validate the values given
if (workers < 1)
{
workers = 1;
}
if (workers > 8)
{
workers = 8;
}
if (!Directory.Exists(logdir))
{
Directory.CreateDirectory(logdir);
}
if (!Directory.Exists(tmpdir))
{
Directory.CreateDirectory(tmpdir);
}
if (!Directory.Exists(webdir))
{
Directory.CreateDirectory(webdir);
}
if (!Directory.Exists(baddir))
{
Directory.CreateDirectory(baddir);
}
if (verbosity < 0)
{
verbosity = 0;
}
if (verbosity > 3)
{
verbosity = 3;
}
if (cores < 1)
{
cores = 1;
}
if (cores > 16)
{
cores = 16;
}
if (!Directory.Exists(dats))
{
Directory.CreateDirectory(dats);
}
db = Path.GetFileNameWithoutExtension(db) + ".sqlite";
connectionString = "Data Source=" + db + ";Version = 3;";
foreach (string key in depots.Keys)
{
if (!Directory.Exists(key))
{
Directory.CreateDirectory(key);
File.CreateText(Path.Combine(key, ".romba_size"));
File.CreateText(Path.Combine(key, ".romba_size.backup"));
}
else
{
if (!File.Exists(Path.Combine(key, ".romba_size")))
{
File.CreateText(Path.Combine(key, ".romba_size"));
}
if (!File.Exists(Path.Combine(key, ".romba_size.backup")))
{
File.CreateText(Path.Combine(key, ".romba_size.backup"));
}
}
}
if (port < 0)
{
port = 0;
}
if (port > 65535)
{
port = 65535;
}
// Finally set all of the fields
Globals.MaxThreads = workers;
_logdir = logdir;
_tmpdir = tmpdir;
_webdir = webdir;
_baddir = baddir;
_verbosity = verbosity;
_cores = cores;
_dats = dats;
_db = db;
_connectionString = connectionString;
_depots = depots;
_port = port;
}
// Finally set all of the fields
Globals.MaxThreads = workers;
_logdir = logdir;
_tmpdir = tmpdir;
_webdir = webdir;
_baddir = baddir;
_verbosity = verbosity;
_cores = cores;
_dats = dats;
_db = db;
_connectionString = connectionString;
_depots = depots;
_port = port;
}
/// <summary>
/// Add a new DAT to the database
/// </summary>
/// <param name="dat">DatFile hash information to add</param>
/// <param name="dbc">Database connection to use</param>
private static void AddDatToDatabase(Rom dat, SqliteConnection dbc)
{
// Get the dat full path
string fullpath = Path.Combine(_dats, (dat.MachineName == "dats" ? "" : dat.MachineName), dat.Name);
/// <summary>
/// Add a new DAT to the database
/// </summary>
/// <param name="dat">DatFile hash information to add</param>
/// <param name="dbc">Database connection to use</param>
private static void AddDatToDatabase(Rom dat, SqliteConnection dbc)
{
// Get the dat full path
string fullpath = Path.Combine(_dats, (dat.MachineName == "dats" ? "" : dat.MachineName), dat.Name);
// Parse the Dat if possible
Globals.Logger.User("Adding from '" + dat.Name + "'");
DatFile tempdat = new DatFile();
tempdat.Parse(fullpath, 0, 0);
// Parse the Dat if possible
Globals.Logger.User("Adding from '" + dat.Name + "'");
DatFile tempdat = new DatFile();
tempdat.Parse(fullpath, 0, 0);
// If the Dat wasn't empty, add the information
SqliteCommand slc = new SqliteCommand();
if (tempdat.Count != 0)
{
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES";
string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES";
string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES";
string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";
// If the Dat wasn't empty, add the information
SqliteCommand slc = new SqliteCommand();
if (tempdat.Count != 0)
{
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES";
string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES";
string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES";
string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES";
string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES";
// Loop through the parsed entries
foreach (string romkey in tempdat.Keys)
{
foreach (DatItem datItem in tempdat[romkey])
{
Globals.Logger.Verbose("Checking and adding file '{0}'", datItem.Name);
// Loop through the parsed entries
foreach (string romkey in tempdat.Keys)
{
foreach (DatItem datItem in tempdat[romkey])
{
Globals.Logger.Verbose("Checking and adding file '{0}'", datItem.Name);
if (datItem.ItemType == ItemType.Rom)
{
Rom rom = (Rom)datItem;
if (datItem.ItemType == ItemType.Rom)
{
Rom rom = (Rom)datItem;
if (!String.IsNullOrWhiteSpace(rom.CRC))
{
crcquery += " (\"" + rom.CRC + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.MD5))
{
md5query += " (\"" + rom.MD5 + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.SHA1))
{
sha1query += " (\"" + rom.SHA1 + "\"),";
if (!String.IsNullOrWhiteSpace(rom.CRC))
{
crcquery += " (\"" + rom.CRC + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.MD5))
{
md5query += " (\"" + rom.MD5 + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.SHA1))
{
sha1query += " (\"" + rom.SHA1 + "\"),";
if (!String.IsNullOrWhiteSpace(rom.CRC))
{
crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.MD5))
{
md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),";
}
}
}
else if (datItem.ItemType == ItemType.Disk)
{
Disk disk = (Disk)datItem;
if (!String.IsNullOrWhiteSpace(rom.CRC))
{
crcsha1query += " (\"" + rom.CRC + "\", \"" + rom.SHA1 + "\"),";
}
if (!String.IsNullOrWhiteSpace(rom.MD5))
{
md5sha1query += " (\"" + rom.MD5 + "\", \"" + rom.SHA1 + "\"),";
}
}
}
else if (datItem.ItemType == ItemType.Disk)
{
Disk disk = (Disk)datItem;
if (!String.IsNullOrWhiteSpace(disk.MD5))
{
md5query += " (\"" + disk.MD5 + "\"),";
}
if (!String.IsNullOrWhiteSpace(disk.SHA1))
{
sha1query += " (\"" + disk.SHA1 + "\"),";
if (!String.IsNullOrWhiteSpace(disk.MD5))
{
md5query += " (\"" + disk.MD5 + "\"),";
}
if (!String.IsNullOrWhiteSpace(disk.SHA1))
{
sha1query += " (\"" + disk.SHA1 + "\"),";
if (!String.IsNullOrWhiteSpace(disk.MD5))
{
md5sha1query += " (\"" + disk.MD5 + "\", \"" + disk.SHA1 + "\"),";
}
}
}
}
}
if (!String.IsNullOrWhiteSpace(disk.MD5))
{
md5sha1query += " (\"" + disk.MD5 + "\", \"" + disk.SHA1 + "\"),";
}
}
}
}
}
// Now run the queries after fixing them
if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
{
slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
{
slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES")
{
slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
{
slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
{
slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
}
// Now run the queries after fixing them
if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
{
slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
{
slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1) VALUES")
{
slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
{
slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
if (md5sha1query != "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES")
{
slc = new SqliteCommand(md5sha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
}
}
string datquery = "INSERT OR IGNORE INTO dat (hash) VALUES (\"" + dat.SHA1 + "\")";
slc = new SqliteCommand(datquery, dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
string datquery = "INSERT OR IGNORE INTO dat (hash) VALUES (\"" + dat.SHA1 + "\")";
slc = new SqliteCommand(datquery, dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
#endregion
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -14,389 +14,389 @@ using Alphaleonis.Win32.Filesystem;
namespace RombaSharp
{
/// <summary>
/// Entry class for the RombaSharp application
/// </summary>
/// <remarks>
/// In the database, we want to enable "offline mode". That is, when a user does an operation
/// that needs to read from the depot themselves, if the depot folder cannot be found, the
/// user is prompted to reconnect the depot OR skip that depot entirely.
/// </remarks>
public partial class RombaSharp
{
// General settings
private static string _logdir; // Log folder location
private static string _tmpdir; // Temp folder location
private static string _webdir; // Web frontend location
private static string _baddir; // Fail-to-unpack file folder location
private static int _verbosity; // Verbosity of the output
private static int _cores; // Forced CPU cores
/// <summary>
/// Entry class for the RombaSharp application
/// </summary>
/// <remarks>
/// In the database, we want to enable "offline mode". That is, when a user does an operation
/// that needs to read from the depot themselves, if the depot folder cannot be found, the
/// user is prompted to reconnect the depot OR skip that depot entirely.
/// </remarks>
public partial class RombaSharp
{
// General settings
private static string _logdir; // Log folder location
private static string _tmpdir; // Temp folder location
private static string _webdir; // Web frontend location
private static string _baddir; // Fail-to-unpack file folder location
private static int _verbosity; // Verbosity of the output
private static int _cores; // Forced CPU cores
// DatRoot settings
private static string _dats; // DatRoot folder location
private static string _db; // Database name
// DatRoot settings
private static string _dats; // DatRoot folder location
private static string _db; // Database name
// Depot settings
private static Dictionary<string, Tuple<long, bool>> _depots; // Folder location, Max size
// Depot settings
private static Dictionary<string, Tuple<long, bool>> _depots; // Folder location, Max size
// Server settings
private static int _port; // Web server port
// Server settings
private static int _port; // Web server port
// Other private variables
private static string _config = "config.xml";
private static string _dbSchema = "rombasharp";
private static string _connectionString;
private static Help _help;
// Other private variables
private static string _config = "config.xml";
private static string _dbSchema = "rombasharp";
private static string _connectionString;
private static Help _help;
/// <summary>
/// Entry class for the RombaSharp application
/// </summary>
public static void Main(string[] args)
{
// Perform initial setup and verification
Globals.Logger = new Logger(true, "romba.log");
/// <summary>
/// Entry class for the RombaSharp application
/// </summary>
public static void Main(string[] args)
{
// Perform initial setup and verification
Globals.Logger = new Logger(true, "romba.log");
InitializeConfiguration();
DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString);
InitializeConfiguration();
DatabaseTools.EnsureDatabase(_dbSchema, _db, _connectionString);
// Create a new Help object for this program
_help = RombaSharp.RetrieveHelp();
// Create a new Help object for this program
_help = RombaSharp.RetrieveHelp();
// Get the location of the script tag, if it exists
int scriptLocation = (new List<string>(args)).IndexOf("--script");
// Get the location of the script tag, if it exists
int scriptLocation = (new List<string>(args)).IndexOf("--script");
// If output is being redirected or we are in script mode, don't allow clear screens
if (!Console.IsOutputRedirected && scriptLocation == -1)
{
Console.Clear();
Build.PrepareConsole("RombaSharp");
}
// If output is being redirected or we are in script mode, don't allow clear screens
if (!Console.IsOutputRedirected && scriptLocation == -1)
{
Console.Clear();
Build.PrepareConsole("RombaSharp");
}
// Now we remove the script tag because it messes things up
if (scriptLocation > -1)
{
List<string> newargs = new List<string>(args);
newargs.RemoveAt(scriptLocation);
args = newargs.ToArray();
}
// Now we remove the script tag because it messes things up
if (scriptLocation > -1)
{
List<string> newargs = new List<string>(args);
newargs.RemoveAt(scriptLocation);
args = newargs.ToArray();
}
// Credits take precidence over all
if ((new List<string>(args)).Contains("--credits"))
{
_help.OutputCredits();
Globals.Logger.Close();
return;
}
// Credits take precidence over all
if ((new List<string>(args)).Contains("--credits"))
{
_help.OutputCredits();
Globals.Logger.Close();
return;
}
// If there's no arguments, show help
if (args.Length == 0)
{
_help.OutputGenericHelp();
Globals.Logger.Close();
return;
}
// If there's no arguments, show help
if (args.Length == 0)
{
_help.OutputGenericHelp();
Globals.Logger.Close();
return;
}
// User flags
bool copy = false,
fixdatOnly = false,
logOnly = false,
noDb = false,
onlyNeeded = false,
skipInitialScan = false,
useGolangZip = false;
// User flags
bool copy = false,
fixdatOnly = false,
logOnly = false,
noDb = false,
onlyNeeded = false,
skipInitialScan = false,
useGolangZip = false;
// User inputs
string backup = "",
description = "",
missingSha1s = "",
name = "",
newdat = "",
old = "",
outdat = "",
resume = "",
source = "";
int include7Zips = 1,
includeGZips = 1,
includeZips = 1,
subworkers = 0,
workers = 0;
long size = -1;
List<string> dats = new List<string>();
List<string> depot = new List<string>();
List<string> inputs = new List<string>();
// User inputs
string backup = "",
description = "",
missingSha1s = "",
name = "",
newdat = "",
old = "",
outdat = "",
resume = "",
source = "";
int include7Zips = 1,
includeGZips = 1,
includeZips = 1,
subworkers = 0,
workers = 0;
long size = -1;
List<string> dats = new List<string>();
List<string> depot = new List<string>();
List<string> inputs = new List<string>();
// Get the first argument as a feature flag
string feature = args[0];
// Get the first argument as a feature flag
string feature = args[0];
// Verify that the flag is valid
if (!_help.TopLevelFlag(feature))
{
Globals.Logger.User("'{0}' is not valid feature flag", feature);
_help.OutputIndividualFeature(feature);
Globals.Logger.Close();
return;
}
// Verify that the flag is valid
if (!_help.TopLevelFlag(feature))
{
Globals.Logger.User("'{0}' is not valid feature flag", feature);
_help.OutputIndividualFeature(feature);
Globals.Logger.Close();
return;
}
// Now get the proper name for the feature
feature = _help.GetFeatureName(feature);
// Now get the proper name for the feature
feature = _help.GetFeatureName(feature);
// If we had the help feature first
if (feature == "Help")
{
// If we had something else after help
if (args.Length > 1)
{
_help.OutputIndividualFeature(args[1]);
Globals.Logger.Close();
return;
}
// Otherwise, show generic help
else
{
_help.OutputGenericHelp();
Globals.Logger.Close();
return;
}
}
// If we had the help feature first
if (feature == "Help")
{
// If we had something else after help
if (args.Length > 1)
{
_help.OutputIndividualFeature(args[1]);
Globals.Logger.Close();
return;
}
// Otherwise, show generic help
else
{
_help.OutputGenericHelp();
Globals.Logger.Close();
return;
}
}
// Now verify that all other flags are valid
for (int i = 1; i < args.Length; i++)
{
// Verify that the current flag is proper for the feature
if (!_help[feature].ValidateInput(args[i]))
{
// Everything else is treated as a generic input
inputs.Add(args[i]);
}
}
// Now verify that all other flags are valid
for (int i = 1; i < args.Length; i++)
{
// Verify that the current flag is proper for the feature
if (!_help[feature].ValidateInput(args[i]))
{
// Everything else is treated as a generic input
inputs.Add(args[i]);
}
}
// Now loop through all inputs
Dictionary<string, Feature> features = _help.GetEnabledFeatures();
foreach (KeyValuePair<string, Feature> feat in features)
{
// Check all of the flag names and translate to arguments
switch (feat.Key)
{
#region User Flags
// Now loop through all inputs
Dictionary<string, Feature> features = _help.GetEnabledFeatures();
foreach (KeyValuePair<string, Feature> feat in features)
{
// Check all of the flag names and translate to arguments
switch (feat.Key)
{
#region User Flags
case "copy":
copy = true;
break;
case "fixdatOnly":
fixdatOnly = true;
break;
case "log-only":
logOnly = true;
break;
case "no-db":
noDb = true;
break;
case "only-needed":
onlyNeeded = true;
break;
case "skip-initial-scan":
skipInitialScan = true;
break;
case "use-golang-zip":
useGolangZip = true;
break;
case "copy":
copy = true;
break;
case "fixdatOnly":
fixdatOnly = true;
break;
case "log-only":
logOnly = true;
break;
case "no-db":
noDb = true;
break;
case "only-needed":
onlyNeeded = true;
break;
case "skip-initial-scan":
skipInitialScan = true;
break;
case "use-golang-zip":
useGolangZip = true;
break;
#endregion
#endregion
#region User Int32 Inputs
#region User Int32 Inputs
case "include-7zips":
include7Zips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "include-gzips":
includeGZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "include-zips":
includeZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "subworkers":
subworkers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores;
break;
case "workers":
workers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores;
break;
case "include-7zips":
include7Zips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "include-gzips":
includeGZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "include-zips":
includeZips = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : 0;
break;
case "subworkers":
subworkers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores;
break;
case "workers":
workers = (int)feat.Value.GetValue() == Int32.MinValue ? (int)feat.Value.GetValue() : _cores;
break;
#endregion
#endregion
#region User Int64 Inputs
#region User Int64 Inputs
case "size":
size = (long)feat.Value.GetValue() == Int64.MinValue ? (long)feat.Value.GetValue() : 0;
break;
case "size":
size = (long)feat.Value.GetValue() == Int64.MinValue ? (long)feat.Value.GetValue() : 0;
break;
#endregion
#endregion
#region User List<string> Inputs
#region User List<string> Inputs
case "dats":
dats.AddRange((List<string>)feat.Value.GetValue());
break;
case "depot":
depot.AddRange((List<string>)feat.Value.GetValue());
break;
case "dats":
dats.AddRange((List<string>)feat.Value.GetValue());
break;
case "depot":
depot.AddRange((List<string>)feat.Value.GetValue());
break;
#endregion
#endregion
#region User String Inputs
#region User String Inputs
case "backup":
backup = (string)feat.Value.GetValue();
break;
case "description":
description = (string)feat.Value.GetValue();
break;
case "missingSha1s":
missingSha1s = (string)feat.Value.GetValue();
break;
case "name":
name = (string)feat.Value.GetValue();
break;
case "new":
newdat = (string)feat.Value.GetValue();
break;
case "old":
old = (string)feat.Value.GetValue();
break;
case "out":
outdat = (string)feat.Value.GetValue();
break;
case "resume":
resume = (string)feat.Value.GetValue();
break;
case "source":
source = (string)feat.Value.GetValue();
break;
case "backup":
backup = (string)feat.Value.GetValue();
break;
case "description":
description = (string)feat.Value.GetValue();
break;
case "missingSha1s":
missingSha1s = (string)feat.Value.GetValue();
break;
case "name":
name = (string)feat.Value.GetValue();
break;
case "new":
newdat = (string)feat.Value.GetValue();
break;
case "old":
old = (string)feat.Value.GetValue();
break;
case "out":
outdat = (string)feat.Value.GetValue();
break;
case "resume":
resume = (string)feat.Value.GetValue();
break;
case "source":
source = (string)feat.Value.GetValue();
break;
#endregion
}
}
#endregion
}
}
// Now take care of each mode in succesion
switch(feature)
{
case "Help":
// No-op as this should be caught
break;
// Adds ROM files from the specified directories to the ROM archive
case "Archive":
VerifyInputs(inputs, feature);
InitArchive(inputs, onlyNeeded, resume, includeZips, workers, includeGZips, include7Zips, skipInitialScan, useGolangZip, noDb);
break;
// For each specified DAT file it creates the torrentzip files
case "Build":
VerifyInputs(inputs, feature);
InitBuild(inputs, outdat, fixdatOnly, copy, workers, subworkers);
break;
// Cancels current long-running job
case "Cancel":
InitCancel();
break;
// Prints dat stats
case "DatStats":
VerifyInputs(inputs, feature);
InitDatStats(inputs);
break;
// Prints db stats
case "DbStats":
InitDbStats();
break;
// Creates a DAT file with those entries that are in -new DAT
case "Diffdat":
InitDiffDat(outdat, old, newdat, name, description);
break;
// Creates a DAT file for the specified input directory and saves it to the -out filename
case "Dir2Dat":
InitDir2Dat(outdat, source, name, description);
break;
// Creates a DAT file with those entries that are in -new DAT
case "EDiffdat":
InitEDiffDat(outdat, old, newdat);
break;
// Exports db to export.csv
case "Export":
InitExport();
break;
// For each specified DAT file it creates a fix DAT
case "Fixdat":
VerifyInputs(inputs, feature);
InitFixdat(inputs, outdat, fixdatOnly, workers, subworkers);
break;
// Import a database from a formatted CSV file
case "Import":
VerifyInputs(inputs, feature);
InitImport(inputs);
break;
// For each specified hash it looks up any available information
case "Lookup":
VerifyInputs(inputs, feature);
InitLookup(inputs, size, outdat);
break;
// Prints memory stats
case "Memstats":
InitMemstats();
break;
// Merges depot
case "Merge":
VerifyInputs(inputs, feature);
InitMerge(inputs, onlyNeeded, resume, workers, skipInitialScan);
break;
// Create miss and have file
case "Miss":
VerifyInputs(inputs, feature);
InitMiss(inputs);
break;
// Shows progress of the currently running command
case "Progress":
InitProgress();
break;
// Moves DAT index entries for orphaned DATs
case "Purge Backup":
InitPurgeBackup(backup, workers, depot, dats, logOnly);
break;
// Deletes DAT index entries for orphaned DATs
case "Purge Delete":
InitPurgeDelete(workers, depot, dats, logOnly);
break;
// Refreshes the DAT index from the files in the DAT master directory tree
case "Refresh DATs":
InitRefreshDats(workers, missingSha1s);
break;
// Rescan a specific depot
case "Rescan Depots":
VerifyInputs(inputs, feature);
InitRescanDepots(inputs);
break;
// Gracefully shuts down server
case "Shutdown":
InitShutdown();
break;
// Prints version
case "Version":
InitVersion();
break;
// If nothing is set, show the help
default:
_help.OutputGenericHelp();
break;
}
// Now take care of each mode in succesion
switch(feature)
{
case "Help":
// No-op as this should be caught
break;
// Adds ROM files from the specified directories to the ROM archive
case "Archive":
VerifyInputs(inputs, feature);
InitArchive(inputs, onlyNeeded, resume, includeZips, workers, includeGZips, include7Zips, skipInitialScan, useGolangZip, noDb);
break;
// For each specified DAT file it creates the torrentzip files
case "Build":
VerifyInputs(inputs, feature);
InitBuild(inputs, outdat, fixdatOnly, copy, workers, subworkers);
break;
// Cancels current long-running job
case "Cancel":
InitCancel();
break;
// Prints dat stats
case "DatStats":
VerifyInputs(inputs, feature);
InitDatStats(inputs);
break;
// Prints db stats
case "DbStats":
InitDbStats();
break;
// Creates a DAT file with those entries that are in -new DAT
case "Diffdat":
InitDiffDat(outdat, old, newdat, name, description);
break;
// Creates a DAT file for the specified input directory and saves it to the -out filename
case "Dir2Dat":
InitDir2Dat(outdat, source, name, description);
break;
// Creates a DAT file with those entries that are in -new DAT
case "EDiffdat":
InitEDiffDat(outdat, old, newdat);
break;
// Exports db to export.csv
case "Export":
InitExport();
break;
// For each specified DAT file it creates a fix DAT
case "Fixdat":
VerifyInputs(inputs, feature);
InitFixdat(inputs, outdat, fixdatOnly, workers, subworkers);
break;
// Import a database from a formatted CSV file
case "Import":
VerifyInputs(inputs, feature);
InitImport(inputs);
break;
// For each specified hash it looks up any available information
case "Lookup":
VerifyInputs(inputs, feature);
InitLookup(inputs, size, outdat);
break;
// Prints memory stats
case "Memstats":
InitMemstats();
break;
// Merges depot
case "Merge":
VerifyInputs(inputs, feature);
InitMerge(inputs, onlyNeeded, resume, workers, skipInitialScan);
break;
// Create miss and have file
case "Miss":
VerifyInputs(inputs, feature);
InitMiss(inputs);
break;
// Shows progress of the currently running command
case "Progress":
InitProgress();
break;
// Moves DAT index entries for orphaned DATs
case "Purge Backup":
InitPurgeBackup(backup, workers, depot, dats, logOnly);
break;
// Deletes DAT index entries for orphaned DATs
case "Purge Delete":
InitPurgeDelete(workers, depot, dats, logOnly);
break;
// Refreshes the DAT index from the files in the DAT master directory tree
case "Refresh DATs":
InitRefreshDats(workers, missingSha1s);
break;
// Rescan a specific depot
case "Rescan Depots":
VerifyInputs(inputs, feature);
InitRescanDepots(inputs);
break;
// Gracefully shuts down server
case "Shutdown":
InitShutdown();
break;
// Prints version
case "Version":
InitVersion();
break;
// If nothing is set, show the help
default:
_help.OutputGenericHelp();
break;
}
Globals.Logger.Close();
return;
}
Globals.Logger.Close();
return;
}
private static void VerifyInputs(List<string> inputs, string feature)
{
if (inputs.Count == 0)
{
Globals.Logger.Error("This feature requires at least one input");
_help.OutputIndividualFeature(feature);
Environment.Exit(0);
}
}
}
private static void VerifyInputs(List<string> inputs, string feature)
{
if (inputs.Count == 0)
{
Globals.Logger.Error("This feature requires at least one input");
_help.OutputIndividualFeature(feature);
Environment.Exit(0);
}
}
}
}