2016-10-24 12:58:57 -07:00
using System ;
2016-09-02 13:59:25 -07:00
using System.Collections.Generic ;
2020-06-10 22:37:19 -07:00
using System.IO ;
2016-09-08 17:42:53 -07:00
using System.Linq ;
2016-09-02 13:59:25 -07:00
using System.Xml ;
2016-10-24 12:58:57 -07:00
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Data ;
2017-10-06 20:46:43 -07:00
using SabreTools.Library.DatFiles ;
2017-11-02 15:44:15 -07:00
using SabreTools.Library.DatItems ;
2020-08-01 13:25:32 -07:00
using SabreTools.Library.Help ;
2020-08-01 23:04:11 -07:00
using SabreTools.Library.IO ;
2017-05-04 02:41:11 -07:00
using SabreTools.Library.Tools ;
2020-07-15 09:41:59 -07:00
using Microsoft.Data.Sqlite ;
2016-09-02 13:59:25 -07:00
2020-08-01 13:25:32 -07:00
namespace RombaSharp.Features
2016-09-02 13:59:25 -07:00
{
2020-08-01 13:25:32 -07:00
internal class BaseFeature : TopLevel
2019-02-08 20:31:07 -08:00
{
2020-08-01 13:25:32 -07:00
#region Private Flag features
internal const string CopyValue = "copy" ;
internal static Feature CopyFlag
{
get
{
return new Feature (
CopyValue ,
"-copy" ,
"Copy files to output instead of rebuilding" ,
FeatureType . Flag ) ;
}
} // Unique to RombaSharp
internal const string FixdatOnlyValue = "fixdat-only" ;
internal static Feature FixdatOnlyFlag
{
get
{
return new Feature (
FixdatOnlyValue ,
"-fixdatOnly" ,
"only fix dats and don't generate torrentzips" ,
FeatureType . Flag ) ;
}
}
internal const string LogOnlyValue = "log-only" ;
internal static Feature LogOnlyFlag
{
get
{
return new Feature (
LogOnlyValue ,
"-log-only" ,
"Only write out actions to log" ,
FeatureType . Flag ) ;
}
}
internal const string NoDbValue = "no-db" ;
internal static Feature NoDbFlag
{
get
{
return new Feature (
NoDbValue ,
"-no-db" ,
"archive into depot but do not touch DB index and ignore only-needed flag" ,
FeatureType . Flag ) ;
}
}
internal const string OnlyNeededValue = "only-needed" ;
internal static Feature OnlyNeededFlag
{
get
{
return new Feature (
OnlyNeededValue ,
"-only-needed" ,
"only archive ROM files actually referenced by DAT files from the DAT index" ,
FeatureType . Flag ) ;
}
}
internal const string SkipInitialScanValue = "skip-initial-scan" ;
internal static Feature SkipInitialScanFlag
{
get
{
return new Feature (
SkipInitialScanValue ,
"-skip-initial-scan" ,
"skip the initial scan of the files to determine amount of work" ,
FeatureType . Flag ) ;
}
}
internal const string UseGolangZipValue = "use-golang-zip" ;
internal static Feature UseGolangZipFlag
{
get
{
return new Feature (
UseGolangZipValue ,
"-use-golang-zip" ,
"use go zip implementation instead of zlib" ,
FeatureType . Flag ) ;
}
}
#endregion
#region Private Int32 features
internal const string Include7ZipsInt32Value = "include-7zips" ;
internal static Feature Include7ZipsInt32Input
{
get
{
return new Feature (
Include7ZipsInt32Value ,
"-include-7zips" ,
"flag value == 0 means: add 7zip files themselves into the depot in addition to their contents, flag value == 2 means add 7zip files themselves but don't add content" ,
FeatureType . Int32 ) ;
}
}
internal const string IncludeGZipsInt32Value = "include-gzips" ;
internal static Feature IncludeGZipsInt32Input
{
get
{
return new Feature (
IncludeGZipsInt32Value ,
"-include-gzips" ,
"flag value == 0 means: add gzip files themselves into the depot in addition to their contents, flag value == 2 means add gzip files themselves but don't add content" ,
FeatureType . Int32 ) ;
}
}
internal const string IncludeZipsInt32Value = "include-zips" ;
internal static Feature IncludeZipsInt32Input
{
get
{
return new Feature (
IncludeZipsInt32Value ,
"-include-zips" ,
"flag value == 0 means: add zip files themselves into the depot in addition to their contents, flag value == 2 means add zip files themselves but don't add content" ,
FeatureType . Int32 ) ;
}
}
internal const string SubworkersInt32Value = "subworkers" ;
internal static Feature SubworkersInt32Input
{
get
{
return new Feature (
SubworkersInt32Value ,
"-subworkers" ,
"how many subworkers to launch for each worker" ,
FeatureType . Int32 ) ;
}
} // Defaults to Workers count in config
internal const string WorkersInt32Value = "workers" ;
internal static Feature WorkersInt32Input
{
get
{
return new Feature (
WorkersInt32Value ,
"-workers" ,
"how many workers to launch for the job" ,
FeatureType . Int32 ) ;
}
} // Defaults to Workers count in config
#endregion
#region Private Int64 features
internal const string SizeInt64Value = "size" ;
internal static Feature SizeInt64Input
{
get
{
return new Feature (
SizeInt64Value ,
"-size" ,
"size of the rom to lookup" ,
FeatureType . Int64 ) ;
}
}
#endregion
#region Private List < String > features
internal const string DatsListStringValue = "dats" ;
internal static Feature DatsListStringInput
{
get
{
return new Feature (
DatsListStringValue ,
"-dats" ,
"purge only roms declared in these dats" ,
FeatureType . List ) ;
}
}
internal const string DepotListStringValue = "depot" ;
internal static Feature DepotListStringInput
{
get
{
return new Feature (
DepotListStringValue ,
"-depot" ,
"work only on specified depot path" ,
FeatureType . List ) ;
}
}
#endregion
#region Private String features
internal const string BackupStringValue = "backup" ;
internal static Feature BackupStringInput
{
get
{
return new Feature (
BackupStringValue ,
"-backup" ,
"backup directory where backup files are moved to" ,
FeatureType . String ) ;
}
}
internal const string DescriptionStringValue = "description" ;
internal static Feature DescriptionStringInput
{
get
{
return new Feature (
DescriptionStringValue ,
"-description" ,
"description value in DAT header" ,
FeatureType . String ) ;
}
}
internal const string MissingSha1sStringValue = "missing-sha1s" ;
internal static Feature MissingSha1sStringInput
{
get
{
return new Feature (
MissingSha1sStringValue ,
"-missingSha1s" ,
"write paths of dats with missing sha1s into this file" ,
FeatureType . String ) ;
}
}
internal const string NameStringValue = "name" ;
internal static Feature NameStringInput
{
get
{
return new Feature (
NameStringValue ,
"-name" ,
"name value in DAT header" ,
FeatureType . String ) ;
}
}
internal const string NewStringValue = "new" ;
internal static Feature NewStringInput
{
get
{
return new Feature (
NewStringValue ,
"-new" ,
"new DAT file" ,
FeatureType . String ) ;
}
}
internal const string OldStringValue = "old" ;
internal static Feature OldStringInput
{
get
{
return new Feature (
OldStringValue ,
"-old" ,
"old DAT file" ,
FeatureType . String ) ;
}
}
internal const string OutStringValue = "out" ;
internal static Feature OutStringInput
{
get
{
return new Feature (
OutStringValue ,
"-out" ,
"output file" ,
FeatureType . String ) ;
}
}
internal const string ResumeStringValue = "resume" ;
internal static Feature ResumeStringInput
{
get
{
return new Feature (
ResumeStringValue ,
"-resume" ,
"resume a previously interrupted operation from the specified path" ,
FeatureType . String ) ;
}
}
internal const string SourceStringValue = "source" ;
internal static Feature SourceStringInput
{
get
{
return new Feature (
SourceStringValue ,
"-source" ,
"source directory" ,
FeatureType . String ) ;
}
}
#endregion
// General settings
internal static string _logdir ; // Log folder location
internal static string _webdir ; // Web frontend location
internal static string _baddir ; // Fail-to-unpack file folder location
internal static int _verbosity ; // Verbosity of the output
internal static int _cores ; // Forced CPU cores
// DatRoot settings
internal static string _dats ; // DatRoot folder location
internal static string _db ; // Database name
// Depot settings
internal static Dictionary < string , Tuple < long , bool > > _depots ; // Folder location, Max size
// Server settings
internal static int _port ; // Web server port
// Other internal variables
internal const string _config = "config.xml" ;
internal const string _dbSchema = "rombasharp" ;
internal static string _connectionString ;
public override void ProcessFeatures ( Dictionary < string , Feature > features )
{
InitializeConfiguration ( ) ;
DatabaseTools . EnsureDatabase ( _dbSchema , _db , _connectionString ) ;
}
2019-02-08 20:31:07 -08:00
#region Helper methods
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
/// <summary>
/// Gets all valid DATs that match in the DAT root
/// </summary>
/// <param name="inputs">List of input strings to check for, presumably file names</param>
/// <returns>Dictionary of hash/full path for each of the valid DATs</returns>
2020-08-01 13:25:32 -07:00
internal static Dictionary < string , string > GetValidDats ( List < string > inputs )
2019-02-08 20:31:07 -08:00
{
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
List < string > datRootDats = Directory . EnumerateFiles ( _dats , "*" , SearchOption . AllDirectories ) . ToList ( ) ;
List < string > lowerCaseDats = datRootDats . ConvertAll ( i = > Path . GetFileName ( i ) . ToLowerInvariant ( ) ) ;
Dictionary < string , string > foundDats = new Dictionary < string , string > ( ) ;
foreach ( string input in inputs )
{
if ( lowerCaseDats . Contains ( input . ToLowerInvariant ( ) ) )
{
string fullpath = Path . GetFullPath ( datRootDats [ lowerCaseDats . IndexOf ( input . ToLowerInvariant ( ) ) ] ) ;
2020-07-15 09:41:59 -07:00
string sha1 = Utilities . ByteArrayToString ( FileExtensions . GetInfo ( fullpath ) . SHA1 ) ;
2019-02-08 20:31:07 -08:00
foundDats . Add ( sha1 , fullpath ) ;
}
else
{
2020-06-10 22:37:19 -07:00
Globals . Logger . Warning ( $"The file '{input}' could not be found in the DAT root" ) ;
2019-02-08 20:31:07 -08:00
}
}
2016-10-17 11:04:07 -07:00
2019-02-08 20:31:07 -08:00
return foundDats ;
}
2016-10-17 11:04:07 -07:00
2019-02-08 20:31:07 -08:00
/// <summary>
/// Initialize the Romba application from XML config
/// </summary>
private static void InitializeConfiguration ( )
{
// Get default values if they're not written
int workers = 4 ,
verbosity = 1 ,
cores = 4 ,
port = 4003 ;
string logdir = "logs" ,
tmpdir = "tmp" ,
webdir = "web" ,
baddir = "bad" ,
dats = "dats" ,
2020-07-15 09:41:59 -07:00
db = "db" ;
2019-02-08 20:31:07 -08:00
Dictionary < string , Tuple < long , bool > > depots = new Dictionary < string , Tuple < long , bool > > ( ) ;
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
// Get the XML text reader for the configuration file, if possible
2020-07-15 09:41:59 -07:00
XmlReader xtr = _config . GetXmlTextReader ( ) ;
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
// Now parse the XML file for settings
if ( xtr ! = null )
{
xtr . MoveToContent ( ) ;
while ( ! xtr . EOF )
{
// We only want elements
if ( xtr . NodeType ! = XmlNodeType . Element )
{
xtr . Read ( ) ;
continue ;
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
switch ( xtr . Name )
{
case "workers" :
workers = xtr . ReadElementContentAsInt ( ) ;
break ;
case "logdir" :
logdir = xtr . ReadElementContentAsString ( ) ;
break ;
case "tmpdir" :
tmpdir = xtr . ReadElementContentAsString ( ) ;
break ;
case "webdir" :
webdir = xtr . ReadElementContentAsString ( ) ;
break ;
case "baddir" :
baddir = xtr . ReadElementContentAsString ( ) ;
break ;
case "verbosity" :
verbosity = xtr . ReadElementContentAsInt ( ) ;
break ;
case "cores" :
cores = xtr . ReadElementContentAsInt ( ) ;
break ;
case "dats" :
dats = xtr . ReadElementContentAsString ( ) ;
break ;
case "db" :
db = xtr . ReadElementContentAsString ( ) ;
break ;
case "depot" :
XmlReader subreader = xtr . ReadSubtree ( ) ;
if ( subreader ! = null )
{
2020-06-10 22:37:19 -07:00
string root = string . Empty ;
2019-02-08 20:31:07 -08:00
long maxsize = - 1 ;
bool online = true ;
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
while ( ! subreader . EOF )
{
// We only want elements
if ( subreader . NodeType ! = XmlNodeType . Element )
{
subreader . Read ( ) ;
continue ;
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
switch ( subreader . Name )
{
case "root" :
root = subreader . ReadElementContentAsString ( ) ;
break ;
case "maxsize" :
maxsize = subreader . ReadElementContentAsLong ( ) ;
break ;
case "online" :
online = subreader . ReadElementContentAsBoolean ( ) ;
break ;
default :
subreader . Read ( ) ;
break ;
}
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
try
{
depots . Add ( root , new Tuple < long , bool > ( maxsize , online ) ) ;
}
catch
{
// Ignore add errors
}
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
xtr . Skip ( ) ;
break ;
case "port" :
port = xtr . ReadElementContentAsInt ( ) ;
break ;
default :
xtr . Read ( ) ;
break ;
}
}
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
// Now validate the values given
if ( workers < 1 )
workers = 1 ;
if ( workers > 8 )
workers = 8 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! Directory . Exists ( logdir ) )
Directory . CreateDirectory ( logdir ) ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! Directory . Exists ( tmpdir ) )
Directory . CreateDirectory ( tmpdir ) ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! Directory . Exists ( webdir ) )
Directory . CreateDirectory ( webdir ) ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! Directory . Exists ( baddir ) )
Directory . CreateDirectory ( baddir ) ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( verbosity < 0 )
verbosity = 0 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( verbosity > 3 )
verbosity = 3 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( cores < 1 )
cores = 1 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( cores > 16 )
cores = 16 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! Directory . Exists ( dats ) )
Directory . CreateDirectory ( dats ) ;
2020-08-01 13:25:32 -07:00
2020-06-10 22:37:19 -07:00
db = $"{Path.GetFileNameWithoutExtension(db)}.sqlite" ;
2020-07-15 09:41:59 -07:00
string connectionString = $"Data Source={db};Version = 3;" ;
2019-02-08 20:31:07 -08:00
foreach ( string key in depots . Keys )
{
if ( ! Directory . Exists ( key ) )
{
Directory . CreateDirectory ( key ) ;
File . CreateText ( Path . Combine ( key , ".romba_size" ) ) ;
File . CreateText ( Path . Combine ( key , ".romba_size.backup" ) ) ;
}
else
{
if ( ! File . Exists ( Path . Combine ( key , ".romba_size" ) ) )
File . CreateText ( Path . Combine ( key , ".romba_size" ) ) ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( ! File . Exists ( Path . Combine ( key , ".romba_size.backup" ) ) )
File . CreateText ( Path . Combine ( key , ".romba_size.backup" ) ) ;
}
}
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( port < 0 )
port = 0 ;
2020-06-10 22:37:19 -07:00
2019-02-08 20:31:07 -08:00
if ( port > 65535 )
port = 65535 ;
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
// Finally set all of the fields
Globals . MaxThreads = workers ;
_logdir = logdir ;
2020-08-02 13:08:33 -07:00
Globals . TempDir = tmpdir ;
2019-02-08 20:31:07 -08:00
_webdir = webdir ;
_baddir = baddir ;
_verbosity = verbosity ;
_cores = cores ;
_dats = dats ;
_db = db ;
_connectionString = connectionString ;
_depots = depots ;
_port = port ;
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
/// <summary>
/// Add a new DAT to the database
/// </summary>
/// <param name="dat">DatFile hash information to add</param>
/// <param name="dbc">Database connection to use</param>
2020-08-01 13:25:32 -07:00
internal static void AddDatToDatabase ( Rom dat , SqliteConnection dbc )
2019-02-08 20:31:07 -08:00
{
// Get the dat full path
2020-08-20 13:17:14 -07:00
string fullpath = Path . Combine ( _dats , ( dat . Machine . Name = = "dats" ? string . Empty : dat . Machine . Name ) , dat . Name ) ;
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
// Parse the Dat if possible
2020-06-10 22:37:19 -07:00
Globals . Logger . User ( $"Adding from '{dat.Name}'" ) ;
2020-07-15 09:41:59 -07:00
DatFile tempdat = DatFile . CreateAndParse ( fullpath ) ;
2016-10-19 10:47:23 -07:00
2019-02-08 20:31:07 -08:00
// If the Dat wasn't empty, add the information
2020-07-15 10:47:13 -07:00
SqliteCommand slc = null ;
string crcquery = "INSERT OR IGNORE INTO crc (crc) VALUES" ;
string md5query = "INSERT OR IGNORE INTO md5 (md5) VALUES" ;
string sha1query = "INSERT OR IGNORE INTO sha1 (sha1) VALUES" ;
string crcsha1query = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" ;
string md5sha1query = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" ;
// Loop through the parsed entries
bool hasItems = false ;
2020-07-26 22:34:45 -07:00
foreach ( string romkey in tempdat . Items . Keys )
2019-02-08 20:31:07 -08:00
{
2020-07-26 22:34:45 -07:00
foreach ( DatItem datItem in tempdat . Items [ romkey ] )
2019-02-08 20:31:07 -08:00
{
2020-07-15 10:47:13 -07:00
Globals . Logger . Verbose ( $"Checking and adding file '{datItem.Name}'" ) ;
2020-08-27 16:57:22 -07:00
if ( datItem . ItemType = = ItemType . Disk )
{
Disk disk = ( Disk ) datItem ;
hasItems = true ;
if ( ! string . IsNullOrWhiteSpace ( disk . MD5 ) )
md5query + = $" (\" { disk . MD5 } \ ")," ;
if ( ! string . IsNullOrWhiteSpace ( disk . SHA1 ) )
{
sha1query + = $" (\" { disk . SHA1 } \ ")," ;
if ( ! string . IsNullOrWhiteSpace ( disk . MD5 ) )
md5sha1query + = $" (\" { disk . MD5 } \ ", \"{disk.SHA1}\")," ;
}
}
else if ( datItem . ItemType = = ItemType . Media )
{
Media media = ( Media ) datItem ;
hasItems = true ;
if ( ! string . IsNullOrWhiteSpace ( media . MD5 ) )
md5query + = $" (\" { media . MD5 } \ ")," ;
if ( ! string . IsNullOrWhiteSpace ( media . SHA1 ) )
{
sha1query + = $" (\" { media . SHA1 } \ ")," ;
if ( ! string . IsNullOrWhiteSpace ( media . MD5 ) )
md5sha1query + = $" (\" { media . MD5 } \ ", \"{media.SHA1}\")," ;
}
}
else if ( datItem . ItemType = = ItemType . Rom )
2019-02-08 20:31:07 -08:00
{
2020-07-15 10:47:13 -07:00
Rom rom = ( Rom ) datItem ;
hasItems = true ;
if ( ! string . IsNullOrWhiteSpace ( rom . CRC ) )
crcquery + = $" (\" { rom . CRC } \ ")," ;
if ( ! string . IsNullOrWhiteSpace ( rom . MD5 ) )
md5query + = $" (\" { rom . MD5 } \ ")," ;
2016-10-19 10:47:23 -07:00
2020-07-15 10:47:13 -07:00
if ( ! string . IsNullOrWhiteSpace ( rom . SHA1 ) )
2019-02-08 20:31:07 -08:00
{
2020-07-15 10:47:13 -07:00
sha1query + = $" (\" { rom . SHA1 } \ ")," ;
2016-10-10 13:42:52 -07:00
2020-06-10 22:37:19 -07:00
if ( ! string . IsNullOrWhiteSpace ( rom . CRC ) )
2020-07-15 10:47:13 -07:00
crcsha1query + = $" (\" { rom . CRC } \ ", \"{rom.SHA1}\")," ;
2020-06-10 22:37:19 -07:00
2020-07-15 10:47:13 -07:00
if ( ! string . IsNullOrWhiteSpace ( rom . MD5 ) )
md5sha1query + = $" (\" { rom . MD5 } \ ", \"{rom.SHA1}\")," ;
}
}
2019-02-08 20:31:07 -08:00
}
2020-07-15 10:47:13 -07:00
}
2016-10-10 13:45:41 -07:00
2020-07-15 10:47:13 -07:00
// Now run the queries after fixing them
if ( crcquery ! = "INSERT OR IGNORE INTO crc (crc) VALUES" )
{
slc = new SqliteCommand ( crcquery . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
2020-06-10 22:37:19 -07:00
2020-07-15 10:47:13 -07:00
if ( md5query ! = "INSERT OR IGNORE INTO md5 (md5) VALUES" )
{
slc = new SqliteCommand ( md5query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
2020-06-10 22:37:19 -07:00
2020-07-15 10:47:13 -07:00
if ( sha1query ! = "INSERT OR IGNORE INTO sha1 (sha1) VALUES" )
{
slc = new SqliteCommand ( sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
2020-06-10 22:37:19 -07:00
2020-07-15 10:47:13 -07:00
if ( crcsha1query ! = "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES" )
{
slc = new SqliteCommand ( crcsha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
2020-06-10 22:37:19 -07:00
2020-07-15 10:47:13 -07:00
if ( md5sha1query ! = "INSERT OR IGNORE INTO md5sha1 (md5, sha1) VALUES" )
{
slc = new SqliteCommand ( md5sha1query . TrimEnd ( ',' ) , dbc ) ;
slc . ExecuteNonQuery ( ) ;
}
// Only add the DAT if it's non-empty
if ( hasItems )
{
string datquery = $"INSERT OR IGNORE INTO dat (hash) VALUES (\" { dat . SHA1 } \ ")" ;
slc = new SqliteCommand ( datquery , dbc ) ;
slc . ExecuteNonQuery ( ) ;
2019-02-08 20:31:07 -08:00
}
2016-10-10 10:51:19 -07:00
2020-07-15 10:47:13 -07:00
slc ? . Dispose ( ) ;
2019-02-08 20:31:07 -08:00
}
2016-09-02 13:59:25 -07:00
2019-02-08 20:31:07 -08:00
#endregion
}
2016-09-02 13:59:25 -07:00
}