mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Reduce Linq usage across entire project
This commit is contained in:
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
using SabreTools.Core;
|
using SabreTools.Core;
|
||||||
using SabreTools.DatFiles;
|
using SabreTools.DatFiles;
|
||||||
@@ -61,7 +60,7 @@ have a current entry in the DAT index.";
|
|||||||
bool onlyNeeded = GetBoolean(features, OnlyNeededValue);
|
bool onlyNeeded = GetBoolean(features, OnlyNeededValue);
|
||||||
|
|
||||||
// First we want to get just all directories from the inputs
|
// First we want to get just all directories from the inputs
|
||||||
List<string> onlyDirs = new List<string>();
|
List<string> onlyDirs = [];
|
||||||
foreach (string input in Inputs)
|
foreach (string input in Inputs)
|
||||||
{
|
{
|
||||||
if (Directory.Exists(input))
|
if (Directory.Exists(input))
|
||||||
@@ -77,10 +76,15 @@ have a current entry in the DAT index.";
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create an empty Dat for files that need to be rebuilt
|
// Create an empty Dat for files that need to be rebuilt
|
||||||
DatFile need = DatFile.Create();
|
var need = DatFile.Create();
|
||||||
|
|
||||||
|
// Get the first depot as output
|
||||||
|
var depotKeyEnumerator = _depots.Keys.GetEnumerator();
|
||||||
|
depotKeyEnumerator.MoveNext();
|
||||||
|
string firstDepot = depotKeyEnumerator.Current;
|
||||||
|
|
||||||
// Open the database connection
|
// Open the database connection
|
||||||
SqliteConnection dbc = new SqliteConnection(_connectionString);
|
var dbc = new SqliteConnection(_connectionString);
|
||||||
dbc.Open();
|
dbc.Open();
|
||||||
|
|
||||||
// Now that we have the Dats, add the files to the database
|
// Now that we have the Dats, add the files to the database
|
||||||
@@ -98,34 +102,38 @@ have a current entry in the DAT index.";
|
|||||||
|
|
||||||
foreach (Rom rom in datItems)
|
foreach (Rom rom in datItems)
|
||||||
{
|
{
|
||||||
|
string? crc = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey);
|
||||||
|
string? md5 = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key);
|
||||||
|
string? sha1 = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key);
|
||||||
|
|
||||||
// If we care about if the file exists, check the databse first
|
// If we care about if the file exists, check the databse first
|
||||||
if (onlyNeeded && !noDb)
|
if (onlyNeeded && !noDb)
|
||||||
{
|
{
|
||||||
string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
|
string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
|
||||||
+ $" WHERE crcsha1.crc=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\""
|
+ $" WHERE crcsha1.crc=\"{crc}\""
|
||||||
+ $" OR md5sha1.md5=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\""
|
+ $" OR md5sha1.md5=\"{md5}\""
|
||||||
+ $" OR md5sha1.sha1=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"";
|
+ $" OR md5sha1.sha1=\"{sha1}\"";
|
||||||
SqliteCommand slc = new SqliteCommand(query, dbc);
|
var slc = new SqliteCommand(query, dbc);
|
||||||
SqliteDataReader sldr = slc.ExecuteReader();
|
SqliteDataReader sldr = slc.ExecuteReader();
|
||||||
|
|
||||||
if (sldr.HasRows)
|
if (sldr.HasRows)
|
||||||
{
|
{
|
||||||
// Add to the queries
|
// Add to the queries
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
|
if (!string.IsNullOrWhiteSpace(crc))
|
||||||
crcquery += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\"),";
|
crcquery += $" (\"{crc}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
|
if (!string.IsNullOrWhiteSpace(md5))
|
||||||
md5query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\"),";
|
md5query += $" (\"{md5}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)))
|
if (!string.IsNullOrWhiteSpace(sha1))
|
||||||
{
|
{
|
||||||
sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\", \"{_depots!.Keys.ToList()[0]}\"),";
|
sha1query += $" (\"{sha1}\", \"{firstDepot}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
|
if (!string.IsNullOrWhiteSpace(crc))
|
||||||
crcsha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
|
crcsha1query += $" (\"{crc}\", \"{sha1}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
|
if (!string.IsNullOrWhiteSpace(md5))
|
||||||
md5sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
|
md5sha1query += $" (\"{md5}\", \"{sha1}\"),";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to the Dat
|
// Add to the Dat
|
||||||
@@ -138,21 +146,21 @@ have a current entry in the DAT index.";
|
|||||||
// Add to the queries
|
// Add to the queries
|
||||||
if (!noDb)
|
if (!noDb)
|
||||||
{
|
{
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
|
if (!string.IsNullOrWhiteSpace(crc))
|
||||||
crcquery += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\"),";
|
crcquery += $" (\"{crc}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
|
if (!string.IsNullOrWhiteSpace(md5))
|
||||||
md5query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\"),";
|
md5query += $" (\"{md5}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)))
|
if (!string.IsNullOrWhiteSpace(sha1))
|
||||||
{
|
{
|
||||||
sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\", \"{_depots!.Keys.ToList()[0]}\"),";
|
sha1query += $" (\"{sha1}\", \"{firstDepot}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
|
if (!string.IsNullOrWhiteSpace(crc))
|
||||||
crcsha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
|
crcsha1query += $" (\"{crc}\", \"{sha1}\"),";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
|
if (!string.IsNullOrWhiteSpace(md5))
|
||||||
md5sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
|
md5sha1query += $" (\"{md5}\", \"{sha1}\"),";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -165,28 +173,28 @@ have a current entry in the DAT index.";
|
|||||||
// Now run the queries, if they're populated
|
// Now run the queries, if they're populated
|
||||||
if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
|
if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
|
||||||
{
|
{
|
||||||
SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
|
var slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
|
||||||
slc.ExecuteNonQuery();
|
slc.ExecuteNonQuery();
|
||||||
slc.Dispose();
|
slc.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
|
if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
|
||||||
{
|
{
|
||||||
SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
|
var slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
|
||||||
slc.ExecuteNonQuery();
|
slc.ExecuteNonQuery();
|
||||||
slc.Dispose();
|
slc.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
|
if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
|
||||||
{
|
{
|
||||||
SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
|
var slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
|
||||||
slc.ExecuteNonQuery();
|
slc.ExecuteNonQuery();
|
||||||
slc.Dispose();
|
slc.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
|
if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
|
||||||
{
|
{
|
||||||
SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
|
var slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
|
||||||
slc.ExecuteNonQuery();
|
slc.ExecuteNonQuery();
|
||||||
slc.Dispose();
|
slc.Dispose();
|
||||||
}
|
}
|
||||||
@@ -202,7 +210,7 @@ have a current entry in the DAT index.";
|
|||||||
Rebuilder.RebuildGeneric(
|
Rebuilder.RebuildGeneric(
|
||||||
need,
|
need,
|
||||||
onlyDirs,
|
onlyDirs,
|
||||||
outDir: _depots!.Keys.ToList()[0],
|
outDir: firstDepot,
|
||||||
outputFormat: OutputFormat.TorrentGzipRomba,
|
outputFormat: OutputFormat.TorrentGzipRomba,
|
||||||
asFiles: TreatAsFile.NonArchive);
|
asFiles: TreatAsFile.NonArchive);
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using System.Xml;
|
using System.Xml;
|
||||||
using System.Xml.Schema;
|
using System.Xml.Schema;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
@@ -423,7 +422,7 @@ Possible values are: Verbose, User, Warning, Error");
|
|||||||
internal static string? _db; // Database name
|
internal static string? _db; // Database name
|
||||||
|
|
||||||
// Depot settings
|
// Depot settings
|
||||||
internal static Dictionary<string, Tuple<long, bool>>? _depots; // Folder location, Max size
|
internal static Dictionary<string, Tuple<long, bool>> _depots = []; // Folder location, Max size
|
||||||
|
|
||||||
// Server settings
|
// Server settings
|
||||||
internal static int _port; // Web server port
|
internal static int _port; // Web server port
|
||||||
@@ -552,9 +551,9 @@ CREATE TABLE IF NOT EXISTS dat (
|
|||||||
{
|
{
|
||||||
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
|
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
|
||||||
#if NET20 || NET35
|
#if NET20 || NET35
|
||||||
List<string> datRootDats = Directory.GetFiles(_dats!, "*").ToList();
|
List<string> datRootDats = [.. Directory.GetFiles(_dats!, "*")];
|
||||||
#else
|
#else
|
||||||
List<string> datRootDats = Directory.EnumerateFiles(_dats!, "*", SearchOption.AllDirectories).ToList();
|
List<string> datRootDats = [.. Directory.EnumerateFiles(_dats!, "*", SearchOption.AllDirectories)];
|
||||||
#endif
|
#endif
|
||||||
List<string> lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant());
|
List<string> lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant());
|
||||||
Dictionary<string, string> foundDats = [];
|
Dictionary<string, string> foundDats = [];
|
||||||
@@ -579,7 +578,7 @@ CREATE TABLE IF NOT EXISTS dat (
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Initialize the Romba application from XML config
|
/// Initialize the Romba application from XML config
|
||||||
/// </summary>
|
/// </summary>
|
||||||
private void InitializeConfiguration()
|
private static void InitializeConfiguration()
|
||||||
{
|
{
|
||||||
// Get default values if they're not written
|
// Get default values if they're not written
|
||||||
int workers = 4,
|
int workers = 4,
|
||||||
@@ -592,7 +591,7 @@ CREATE TABLE IF NOT EXISTS dat (
|
|||||||
baddir = "bad",
|
baddir = "bad",
|
||||||
dats = "dats",
|
dats = "dats",
|
||||||
db = "db";
|
db = "db";
|
||||||
Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>();
|
Dictionary<string, Tuple<long, bool>> depots = [];
|
||||||
|
|
||||||
// Get the XML text reader for the configuration file, if possible
|
// Get the XML text reader for the configuration file, if possible
|
||||||
XmlReader xtr = XmlReader.Create(_config, new XmlReaderSettings
|
XmlReader xtr = XmlReader.Create(_config, new XmlReaderSettings
|
||||||
|
|||||||
@@ -65,7 +65,10 @@ structure according to the original DAT master directory tree structure.";
|
|||||||
outputFolder.Ensure(create: true);
|
outputFolder.Ensure(create: true);
|
||||||
|
|
||||||
// Get all online depots
|
// Get all online depots
|
||||||
List<string> onlineDepots = _depots!.Where(d => d.Value.Item2).Select(d => d.Key).ToList();
|
List<string> onlineDepots = _depots
|
||||||
|
.Where(d => d.Value.Item2)
|
||||||
|
.Select(d => d.Key)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
// Now scan all of those depots and rebuild
|
// Now scan all of those depots and rebuild
|
||||||
Rebuilder.RebuildDepot(
|
Rebuilder.RebuildDepot(
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ namespace RombaSharp.Features
|
|||||||
// Now, for each of these files, attempt to add the data found inside
|
// Now, for each of these files, attempt to add the data found inside
|
||||||
foreach (string input in Inputs)
|
foreach (string input in Inputs)
|
||||||
{
|
{
|
||||||
StreamReader sr = new StreamReader(File.OpenRead(input));
|
var sr = new StreamReader(File.OpenRead(input));
|
||||||
|
|
||||||
// The first line should be the hash header
|
// The first line should be the hash header
|
||||||
string? line = sr.ReadLine();
|
string? line = sr.ReadLine();
|
||||||
|
|||||||
@@ -51,9 +51,9 @@ namespace RombaSharp.Features
|
|||||||
foreach (string input in Inputs)
|
foreach (string input in Inputs)
|
||||||
{
|
{
|
||||||
#if NET20 || NET35
|
#if NET20 || NET35
|
||||||
List<string> depotFiles = Directory.GetFiles(input, "*.gz").ToList();
|
List<string> depotFiles = [.. Directory.GetFiles(input, "*.gz")];
|
||||||
#else
|
#else
|
||||||
List<string> depotFiles = Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories).ToList();
|
List<string> depotFiles = [.. Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories)];
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// If we are copying all that is possible but we want to scan first
|
// If we are copying all that is possible but we want to scan first
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ namespace RombaSharp.Features
|
|||||||
foreach (string depotname in Inputs)
|
foreach (string depotname in Inputs)
|
||||||
{
|
{
|
||||||
// Check that it's a valid depot first
|
// Check that it's a valid depot first
|
||||||
if (!_depots!.ContainsKey(depotname))
|
if (!_depots.ContainsKey(depotname))
|
||||||
{
|
{
|
||||||
logger.User($"'{depotname}' is not a recognized depot. Please add it to your configuration file and try again");
|
logger.User($"'{depotname}' is not a recognized depot. Please add it to your configuration file and try again");
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
|
||||||
using SabreTools.Core.Tools;
|
using SabreTools.Core.Tools;
|
||||||
using SabreTools.Models.Metadata;
|
using SabreTools.Models.Metadata;
|
||||||
|
|
||||||
@@ -59,7 +58,9 @@ namespace SabreTools.Core.Filter
|
|||||||
foreach (var filter in this.Filters)
|
foreach (var filter in this.Filters)
|
||||||
{
|
{
|
||||||
// If the filter isn't for this object type, skip
|
// If the filter isn't for this object type, skip
|
||||||
if (filter.Key[0] != itemName || (filter.Key[0] == "item" && TypeHelper.GetDatItemTypeNames().Contains(itemName)))
|
if (filter.Key[0] != itemName)
|
||||||
|
continue;
|
||||||
|
else if (filter.Key[0] == "item" && Array.IndexOf(TypeHelper.GetDatItemTypeNames(), itemName) > -1)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// If we don't get a match, it's a failure
|
// If we don't get a match, it's a failure
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
@@ -123,7 +122,6 @@ namespace SabreTools.Core.Tools
|
|||||||
if (string.IsNullOrEmpty(input))
|
if (string.IsNullOrEmpty(input))
|
||||||
return input;
|
return input;
|
||||||
|
|
||||||
List<char> invalidPath = [.. Path.GetInvalidPathChars()];
|
|
||||||
foreach (char invalid in Path.GetInvalidPathChars())
|
foreach (char invalid in Path.GetInvalidPathChars())
|
||||||
{
|
{
|
||||||
input = input!.Replace(invalid.ToString(), string.Empty);
|
input = input!.Replace(invalid.ToString(), string.Empty);
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ namespace SabreTools.DatFiles
|
|||||||
public void ConvertMetadata(Models.Metadata.MetadataFile? item, string filename, int indexId, bool keep, bool statsOnly)
|
public void ConvertMetadata(Models.Metadata.MetadataFile? item, string filename, int indexId, bool keep, bool statsOnly)
|
||||||
{
|
{
|
||||||
// If the metadata file is invalid, we can't do anything
|
// If the metadata file is invalid, we can't do anything
|
||||||
if (item == null || !item.Any())
|
if (item == null || item.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Create an internal source and add to the dictionary
|
// Create an internal source and add to the dictionary
|
||||||
@@ -46,7 +46,7 @@ namespace SabreTools.DatFiles
|
|||||||
private void ConvertHeader(Models.Metadata.Header? item, bool keep)
|
private void ConvertHeader(Models.Metadata.Header? item, bool keep)
|
||||||
{
|
{
|
||||||
// If the header is invalid, we can't do anything
|
// If the header is invalid, we can't do anything
|
||||||
if (item == null || !item.Any())
|
if (item == null || item.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Create an internal header
|
// Create an internal header
|
||||||
@@ -343,7 +343,7 @@ namespace SabreTools.DatFiles
|
|||||||
private void ConvertMachine(Models.Metadata.Machine? item, Source source, long sourceIndex, bool statsOnly)
|
private void ConvertMachine(Models.Metadata.Machine? item, Source source, long sourceIndex, bool statsOnly)
|
||||||
{
|
{
|
||||||
// If the machine is invalid, we can't do anything
|
// If the machine is invalid, we can't do anything
|
||||||
if (item == null || !item.Any())
|
if (item == null || item.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Create an internal machine
|
// Create an internal machine
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ namespace SabreTools.DatFiles
|
|||||||
private void RemoveHeaderFields(List<string> headerFieldNames)
|
private void RemoveHeaderFields(List<string> headerFieldNames)
|
||||||
{
|
{
|
||||||
// If we have an invalid input, return
|
// If we have an invalid input, return
|
||||||
if (Header == null || !headerFieldNames.Any())
|
if (Header == null || headerFieldNames.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
foreach (var fieldName in headerFieldNames)
|
foreach (var fieldName in headerFieldNames)
|
||||||
@@ -114,7 +114,7 @@ namespace SabreTools.DatFiles
|
|||||||
private static void RemoveFields(Machine? machine, List<string> machineFieldNames)
|
private static void RemoveFields(Machine? machine, List<string> machineFieldNames)
|
||||||
{
|
{
|
||||||
// If we have an invalid input, return
|
// If we have an invalid input, return
|
||||||
if (machine == null || !machineFieldNames.Any())
|
if (machine == null || machineFieldNames.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
foreach (var fieldName in machineFieldNames)
|
foreach (var fieldName in machineFieldNames)
|
||||||
@@ -136,11 +136,11 @@ namespace SabreTools.DatFiles
|
|||||||
|
|
||||||
// Handle Machine fields
|
// Handle Machine fields
|
||||||
var machine = datItem.GetFieldValue<Machine>(DatItem.MachineKey);
|
var machine = datItem.GetFieldValue<Machine>(DatItem.MachineKey);
|
||||||
if (machineFieldNames.Any() && machine != null)
|
if (machineFieldNames.Count > 0 && machine != null)
|
||||||
RemoveFields(machine, machineFieldNames);
|
RemoveFields(machine, machineFieldNames);
|
||||||
|
|
||||||
// If there are no field names, return
|
// If there are no field names, return
|
||||||
if (itemFieldNames == null || !itemFieldNames.Any())
|
if (itemFieldNames == null || itemFieldNames.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// If there are no field names for this type or generic, return
|
// If there are no field names for this type or generic, return
|
||||||
|
|||||||
@@ -504,7 +504,8 @@ namespace SabreTools.DatFiles
|
|||||||
if (dataAreaName != null)
|
if (dataAreaName != null)
|
||||||
{
|
{
|
||||||
// Get existing data areas as a list
|
// Get existing data areas as a list
|
||||||
var dataAreas = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey)?.ToList() ?? [];
|
var dataAreasArr = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey) ?? [];
|
||||||
|
var dataAreas = new List<Models.Metadata.DataArea>(dataAreasArr);
|
||||||
|
|
||||||
// Find the existing disk area to append to, otherwise create a new disk area
|
// Find the existing disk area to append to, otherwise create a new disk area
|
||||||
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
|
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
|
||||||
@@ -526,7 +527,8 @@ namespace SabreTools.DatFiles
|
|||||||
ClearEmptyKeys(aggregateDataArea);
|
ClearEmptyKeys(aggregateDataArea);
|
||||||
|
|
||||||
// Get existing roms as a list
|
// Get existing roms as a list
|
||||||
var roms = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey)?.ToList() ?? [];
|
var romsArr = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey) ?? [];
|
||||||
|
var roms = new List<Models.Metadata.Rom>(romsArr);
|
||||||
|
|
||||||
// Add the rom to the data area
|
// Add the rom to the data area
|
||||||
roms.Add(romItem);
|
roms.Add(romItem);
|
||||||
@@ -558,8 +560,9 @@ namespace SabreTools.DatFiles
|
|||||||
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
|
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
|
||||||
if (diskAreaName != null)
|
if (diskAreaName != null)
|
||||||
{
|
{
|
||||||
// Get existing data areas as a list
|
// Get existing disk areas as a list
|
||||||
var diskAreas = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey)?.ToList() ?? [];
|
var diskAreasArr = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey) ?? [];
|
||||||
|
var diskAreas = new List<Models.Metadata.DiskArea>(diskAreasArr);
|
||||||
|
|
||||||
// Find the existing disk area to append to, otherwise create a new disk area
|
// Find the existing disk area to append to, otherwise create a new disk area
|
||||||
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
|
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
|
||||||
@@ -578,7 +581,8 @@ namespace SabreTools.DatFiles
|
|||||||
ClearEmptyKeys(aggregateDiskArea);
|
ClearEmptyKeys(aggregateDiskArea);
|
||||||
|
|
||||||
// Get existing disks as a list
|
// Get existing disks as a list
|
||||||
var disks = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey)?.ToList() ?? [];
|
var disksArr = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey) ?? [];
|
||||||
|
var disks = new List<Models.Metadata.Disk>(disksArr);
|
||||||
|
|
||||||
// Add the disk to the data area
|
// Add the disk to the data area
|
||||||
disks.Add(diskItem);
|
disks.Add(diskItem);
|
||||||
@@ -601,7 +605,8 @@ namespace SabreTools.DatFiles
|
|||||||
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
|
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
|
||||||
{
|
{
|
||||||
// Get existing dipswitches as a list
|
// Get existing dipswitches as a list
|
||||||
var dipSwitches = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey)?.ToList() ?? [];
|
var dipSwitchesArr = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey) ?? [];
|
||||||
|
var dipSwitches = new List<Models.Metadata.DipSwitch>(dipSwitchesArr);
|
||||||
|
|
||||||
// Clear any empty fields
|
// Clear any empty fields
|
||||||
ClearEmptyKeys(dipSwitchItem);
|
ClearEmptyKeys(dipSwitchItem);
|
||||||
@@ -617,7 +622,8 @@ namespace SabreTools.DatFiles
|
|||||||
else if (datItem is Models.Metadata.Feature featureItem)
|
else if (datItem is Models.Metadata.Feature featureItem)
|
||||||
{
|
{
|
||||||
// Get existing features as a list
|
// Get existing features as a list
|
||||||
var features = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey)?.ToList() ?? [];
|
var featuresArr = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey) ?? [];
|
||||||
|
var features = new List<Models.Metadata.Feature>(featuresArr);
|
||||||
|
|
||||||
// Clear any empty fields
|
// Clear any empty fields
|
||||||
ClearEmptyKeys(featureItem);
|
ClearEmptyKeys(featureItem);
|
||||||
@@ -921,7 +927,8 @@ namespace SabreTools.DatFiles
|
|||||||
if (dataAreaName != null)
|
if (dataAreaName != null)
|
||||||
{
|
{
|
||||||
// Get existing data areas as a list
|
// Get existing data areas as a list
|
||||||
var dataAreas = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey)?.ToList() ?? [];
|
var dataAreasArr = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey) ?? [];
|
||||||
|
var dataAreas = new List<Models.Metadata.DataArea>(dataAreasArr);
|
||||||
|
|
||||||
// Find the existing disk area to append to, otherwise create a new disk area
|
// Find the existing disk area to append to, otherwise create a new disk area
|
||||||
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
|
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
|
||||||
@@ -943,7 +950,8 @@ namespace SabreTools.DatFiles
|
|||||||
ClearEmptyKeys(aggregateDataArea);
|
ClearEmptyKeys(aggregateDataArea);
|
||||||
|
|
||||||
// Get existing roms as a list
|
// Get existing roms as a list
|
||||||
var roms = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey)?.ToList() ?? [];
|
var romsArr = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey) ?? [];
|
||||||
|
var roms = new List<Models.Metadata.Rom>(romsArr);
|
||||||
|
|
||||||
// Add the rom to the data area
|
// Add the rom to the data area
|
||||||
roms.Add(romItem);
|
roms.Add(romItem);
|
||||||
@@ -975,8 +983,9 @@ namespace SabreTools.DatFiles
|
|||||||
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
|
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
|
||||||
if (diskAreaName != null)
|
if (diskAreaName != null)
|
||||||
{
|
{
|
||||||
// Get existing data areas as a list
|
// Get existing disk areas as a list
|
||||||
var diskAreas = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey)?.ToList() ?? [];
|
var diskAreasArr = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey) ?? [];
|
||||||
|
var diskAreas = new List<Models.Metadata.DiskArea>(diskAreasArr);
|
||||||
|
|
||||||
// Find the existing disk area to append to, otherwise create a new disk area
|
// Find the existing disk area to append to, otherwise create a new disk area
|
||||||
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
|
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
|
||||||
@@ -995,7 +1004,8 @@ namespace SabreTools.DatFiles
|
|||||||
ClearEmptyKeys(aggregateDiskArea);
|
ClearEmptyKeys(aggregateDiskArea);
|
||||||
|
|
||||||
// Get existing disks as a list
|
// Get existing disks as a list
|
||||||
var disks = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey)?.ToList() ?? [];
|
var disksArr = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey) ?? [];
|
||||||
|
var disks = new List<Models.Metadata.Disk>(disksArr);
|
||||||
|
|
||||||
// Add the disk to the data area
|
// Add the disk to the data area
|
||||||
disks.Add(diskItem);
|
disks.Add(diskItem);
|
||||||
@@ -1018,7 +1028,8 @@ namespace SabreTools.DatFiles
|
|||||||
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
|
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
|
||||||
{
|
{
|
||||||
// Get existing dipswitches as a list
|
// Get existing dipswitches as a list
|
||||||
var dipSwitches = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey)?.ToList() ?? [];
|
var dipSwitchesArr = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey) ?? [];
|
||||||
|
var dipSwitches = new List<Models.Metadata.DipSwitch>(dipSwitchesArr);
|
||||||
|
|
||||||
// Clear any empty fields
|
// Clear any empty fields
|
||||||
ClearEmptyKeys(dipSwitchItem);
|
ClearEmptyKeys(dipSwitchItem);
|
||||||
@@ -1034,7 +1045,8 @@ namespace SabreTools.DatFiles
|
|||||||
else if (datItem is Models.Metadata.Feature featureItem)
|
else if (datItem is Models.Metadata.Feature featureItem)
|
||||||
{
|
{
|
||||||
// Get existing features as a list
|
// Get existing features as a list
|
||||||
var features = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey)?.ToList() ?? [];
|
var featuresArr = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey) ?? [];
|
||||||
|
var features = new List<Models.Metadata.Feature>(featuresArr);
|
||||||
|
|
||||||
// Clear any empty fields
|
// Clear any empty fields
|
||||||
ClearEmptyKeys(featureItem);
|
ClearEmptyKeys(featureItem);
|
||||||
@@ -1421,7 +1433,7 @@ namespace SabreTools.DatFiles
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
private static void ClearEmptyKeys(Models.Metadata.DictionaryBase obj)
|
private static void ClearEmptyKeys(Models.Metadata.DictionaryBase obj)
|
||||||
{
|
{
|
||||||
string[] fieldNames = obj.Keys.ToArray();
|
string[] fieldNames = [.. obj.Keys];
|
||||||
foreach (string fieldName in fieldNames)
|
foreach (string fieldName in fieldNames)
|
||||||
{
|
{
|
||||||
if (obj[fieldName] == null)
|
if (obj[fieldName] == null)
|
||||||
|
|||||||
@@ -153,8 +153,13 @@ namespace SabreTools.DatFiles
|
|||||||
else if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
|
else if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
|
||||||
{
|
{
|
||||||
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
|
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
|
||||||
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath.Last());
|
#if NETFRAMEWORK
|
||||||
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath.Last() + (bare ? string.Empty : $" ({date})"));
|
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath[splitpath.Length - 1]);
|
||||||
|
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath[splitpath.Length - 1] + (bare ? string.Empty : $" ({date})"));
|
||||||
|
#else
|
||||||
|
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath[^1]);
|
||||||
|
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath[^1] + (bare ? string.Empty : $" ({date})"));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using SabreTools.Core;
|
using SabreTools.Core;
|
||||||
using SabreTools.DatItems;
|
using SabreTools.DatItems;
|
||||||
@@ -126,7 +125,7 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Resolve the names in the block
|
// Resolve the names in the block
|
||||||
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
|
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
|
||||||
|
|
||||||
for (int index = 0; index < items.Length; index++)
|
for (int index = 0; index < items.Length; index++)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -113,11 +113,11 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
|
|
||||||
// Read in the machine array
|
// Read in the machine array
|
||||||
jtr.Read();
|
jtr.Read();
|
||||||
JsonSerializer js = new();
|
var js = new JsonSerializer();
|
||||||
JArray? machineArray = js.Deserialize<JArray>(jtr);
|
JArray machineArray = js.Deserialize<JArray>(jtr) ?? [];
|
||||||
|
|
||||||
// Loop through each machine object and process
|
// Loop through each machine object and process
|
||||||
foreach (JObject machineObj in (machineArray ?? []).Cast<JObject>())
|
foreach (JObject machineObj in machineArray)
|
||||||
{
|
{
|
||||||
ReadMachine(machineObj, statsOnly, source, sourceIndex);
|
ReadMachine(machineObj, statsOnly, source, sourceIndex);
|
||||||
}
|
}
|
||||||
@@ -179,7 +179,7 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
// Loop through each datitem object and process
|
// Loop through each datitem object and process
|
||||||
foreach (JObject itemObj in itemsArr.Cast<JObject>())
|
foreach (JObject itemObj in itemsArr)
|
||||||
{
|
{
|
||||||
ReadItem(itemObj, statsOnly, source, sourceIndex, machine, machineIndex);
|
ReadItem(itemObj, statsOnly, source, sourceIndex, machine, machineIndex);
|
||||||
}
|
}
|
||||||
@@ -480,7 +480,7 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Resolve the names in the block
|
// Resolve the names in the block
|
||||||
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
|
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
|
||||||
|
|
||||||
for (int index = 0; index < items.Length; index++)
|
for (int index = 0; index < items.Length; index++)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Xml;
|
using System.Xml;
|
||||||
using System.Xml.Schema;
|
using System.Xml.Schema;
|
||||||
@@ -310,7 +309,7 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
// Resolve the names in the block
|
// Resolve the names in the block
|
||||||
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
|
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
|
||||||
|
|
||||||
for (int index = 0; index < items.Length; index++)
|
for (int index = 0; index < items.Length; index++)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -122,9 +122,10 @@ namespace SabreTools.DatFiles.Formats
|
|||||||
missingFields.Add(Models.Metadata.DipSwitch.MaskKey);
|
missingFields.Add(Models.Metadata.DipSwitch.MaskKey);
|
||||||
if (dipSwitch.ValuesSpecified)
|
if (dipSwitch.ValuesSpecified)
|
||||||
{
|
{
|
||||||
if (dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey)!.Any(dv => string.IsNullOrEmpty(dv.GetName())))
|
var dipValues = dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey);
|
||||||
|
if (dipValues!.Any(dv => string.IsNullOrEmpty(dv.GetName())))
|
||||||
missingFields.Add(Models.Metadata.DipValue.NameKey);
|
missingFields.Add(Models.Metadata.DipValue.NameKey);
|
||||||
if (dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey)!.Any(dv => string.IsNullOrEmpty(dv.GetStringFieldValue(Models.Metadata.DipValue.ValueKey))))
|
if (dipValues!.Any(dv => string.IsNullOrEmpty(dv.GetStringFieldValue(Models.Metadata.DipValue.ValueKey))))
|
||||||
missingFields.Add(Models.Metadata.DipValue.ValueKey);
|
missingFields.Add(Models.Metadata.DipValue.ValueKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ namespace SabreTools.DatFiles
|
|||||||
{
|
{
|
||||||
get
|
get
|
||||||
{
|
{
|
||||||
var keys = items.Keys.ToList();
|
List<string> keys = [.. items.Keys];
|
||||||
keys.Sort(new NaturalComparer());
|
keys.Sort(new NaturalComparer());
|
||||||
return keys;
|
return keys;
|
||||||
}
|
}
|
||||||
@@ -315,7 +315,7 @@ namespace SabreTools.DatFiles
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public void ClearEmpty()
|
public void ClearEmpty()
|
||||||
{
|
{
|
||||||
var keys = items.Keys.Where(k => k != null).ToList();
|
List<string> keys = [.. items.Keys];
|
||||||
foreach (string key in keys)
|
foreach (string key in keys)
|
||||||
{
|
{
|
||||||
// If the key doesn't exist, skip
|
// If the key doesn't exist, skip
|
||||||
@@ -345,7 +345,7 @@ namespace SabreTools.DatFiles
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public void ClearMarked()
|
public void ClearMarked()
|
||||||
{
|
{
|
||||||
var keys = items.Keys.ToList();
|
List<string> keys = [.. items.Keys];
|
||||||
foreach (string key in keys)
|
foreach (string key in keys)
|
||||||
{
|
{
|
||||||
ConcurrentList<DatItem>? oldItemList = items[key];
|
ConcurrentList<DatItem>? oldItemList = items[key];
|
||||||
@@ -525,11 +525,7 @@ namespace SabreTools.DatFiles
|
|||||||
public void BucketBy(ItemKey bucketBy, DedupeType dedupeType, bool lower = true, bool norename = true)
|
public void BucketBy(ItemKey bucketBy, DedupeType dedupeType, bool lower = true, bool norename = true)
|
||||||
{
|
{
|
||||||
// If we have a situation where there's no dictionary or no keys at all, we skip
|
// If we have a situation where there's no dictionary or no keys at all, we skip
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
|
||||||
if (items == null || items.IsEmpty)
|
|
||||||
#else
|
|
||||||
if (items == null || items.Count == 0)
|
if (items == null || items.Count == 0)
|
||||||
#endif
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// If the sorted type isn't the same, we want to sort the dictionary accordingly
|
// If the sorted type isn't the same, we want to sort the dictionary accordingly
|
||||||
@@ -626,7 +622,10 @@ namespace SabreTools.DatFiles
|
|||||||
|
|
||||||
// Try to find duplicates
|
// Try to find duplicates
|
||||||
ConcurrentList<DatItem>? roms = this[key];
|
ConcurrentList<DatItem>? roms = this[key];
|
||||||
return roms?.Any(r => datItem.Equals(r)) == true;
|
if (roms == null)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return roms.Any(r => datItem.Equals(r));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -739,32 +738,32 @@ namespace SabreTools.DatFiles
|
|||||||
#elif NET40_OR_GREATER
|
#elif NET40_OR_GREATER
|
||||||
Parallel.ForEach(keys, key =>
|
Parallel.ForEach(keys, key =>
|
||||||
#else
|
#else
|
||||||
foreach (var key in keys)
|
foreach (var key in keys)
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
// Get the possibly unsorted list
|
// Get the possibly unsorted list
|
||||||
ConcurrentList<DatItem>? sortedlist = this[key]?.ToConcurrentList();
|
ConcurrentList<DatItem>? sortedlist = this[key]?.ToConcurrentList();
|
||||||
if (sortedlist == null)
|
if (sortedlist == null)
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
return;
|
return;
|
||||||
#else
|
#else
|
||||||
continue;
|
continue;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Sort the list of items to be consistent
|
// Sort the list of items to be consistent
|
||||||
DatItem.Sort(ref sortedlist, false);
|
DatItem.Sort(ref sortedlist, false);
|
||||||
|
|
||||||
// If we're merging the roms, do so
|
// If we're merging the roms, do so
|
||||||
if (dedupeType == DedupeType.Full || (dedupeType == DedupeType.Game && bucketBy == ItemKey.Machine))
|
if (dedupeType == DedupeType.Full || (dedupeType == DedupeType.Game && bucketBy == ItemKey.Machine))
|
||||||
sortedlist = DatItem.Merge(sortedlist);
|
sortedlist = DatItem.Merge(sortedlist);
|
||||||
|
|
||||||
// Add the list back to the dictionary
|
// Add the list back to the dictionary
|
||||||
Reset(key);
|
Reset(key);
|
||||||
AddRange(key, sortedlist);
|
AddRange(key, sortedlist);
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
});
|
});
|
||||||
#else
|
#else
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -779,19 +778,19 @@ namespace SabreTools.DatFiles
|
|||||||
#elif NET40_OR_GREATER
|
#elif NET40_OR_GREATER
|
||||||
Parallel.ForEach(keys, key =>
|
Parallel.ForEach(keys, key =>
|
||||||
#else
|
#else
|
||||||
foreach (var key in keys)
|
foreach (var key in keys)
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
// Get the possibly unsorted list
|
// Get the possibly unsorted list
|
||||||
ConcurrentList<DatItem>? sortedlist = this[key];
|
ConcurrentList<DatItem>? sortedlist = this[key];
|
||||||
|
|
||||||
// Sort the list of items to be consistent
|
// Sort the list of items to be consistent
|
||||||
if (sortedlist != null)
|
if (sortedlist != null)
|
||||||
DatItem.Sort(ref sortedlist, false);
|
DatItem.Sort(ref sortedlist, false);
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
});
|
});
|
||||||
#else
|
#else
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1276,7 +1275,7 @@ namespace SabreTools.DatFiles
|
|||||||
.ToList();
|
.ToList();
|
||||||
|
|
||||||
// If we're checking device references
|
// If we're checking device references
|
||||||
if (deviceReferences.Any())
|
if (deviceReferences.Count > 0)
|
||||||
{
|
{
|
||||||
// Loop through all names and check the corresponding machines
|
// Loop through all names and check the corresponding machines
|
||||||
List<string> newDeviceReferences = [];
|
List<string> newDeviceReferences = [];
|
||||||
@@ -1326,7 +1325,7 @@ namespace SabreTools.DatFiles
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If we're checking slotoptions
|
// If we're checking slotoptions
|
||||||
if (useSlotOptions && slotOptions.Any())
|
if (useSlotOptions && slotOptions.Count > 0)
|
||||||
{
|
{
|
||||||
// Loop through all names and check the corresponding machines
|
// Loop through all names and check the corresponding machines
|
||||||
List<string> newSlotOptions = [];
|
List<string> newSlotOptions = [];
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ using System.Collections.Generic;
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
#endif
|
#endif
|
||||||
@@ -150,7 +149,7 @@ namespace SabreTools.DatFiles
|
|||||||
{
|
{
|
||||||
List<string> keys = [.. _buckets.Keys];
|
List<string> keys = [.. _buckets.Keys];
|
||||||
keys.Sort(new NaturalComparer());
|
keys.Sort(new NaturalComparer());
|
||||||
return keys.ToArray();
|
return [.. keys];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -343,17 +342,17 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all item to machine mappings
|
/// Get all item to machine mappings
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public (long, long)[] GetItemMachineMappings() => _itemToMachineMapping.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
|
public (long, long)[] GetItemMachineMappings() => [.. _itemToMachineMapping.Select(kvp => (kvp.Key, kvp.Value))];
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all item to source mappings
|
/// Get all item to source mappings
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public (long, long)[] GetItemSourceMappings() => _itemToSourceMapping.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
|
public (long, long)[] GetItemSourceMappings() => [.. _itemToSourceMapping.Select(kvp => (kvp.Key, kvp.Value))];
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all items and their indicies
|
/// Get all items and their indicies
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public (long, DatItem)[] GetItems() => _items.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
|
public (long, DatItem)[] GetItems() => [.. _items.Select(kvp => (kvp.Key, kvp.Value))];
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get the indices and items associated with a bucket name
|
/// Get the indices and items associated with a bucket name
|
||||||
@@ -455,7 +454,7 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all machines and their indicies
|
/// Get all machines and their indicies
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public (long, Machine)[] GetMachines() => _machines.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
|
public (long, Machine)[] GetMachines() => [.. _machines.Select(kvp => (kvp.Key, kvp.Value))];
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get a source based on the index
|
/// Get a source based on the index
|
||||||
@@ -486,7 +485,7 @@ namespace SabreTools.DatFiles
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all sources and their indicies
|
/// Get all sources and their indicies
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public (long, Source)[] GetSources() => _sources.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
|
public (long, Source)[] GetSources() => [.. _sources.Select(kvp => (kvp.Key, kvp.Value))];
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Remove an item, returning if it could be removed
|
/// Remove an item, returning if it could be removed
|
||||||
@@ -1718,7 +1717,7 @@ namespace SabreTools.DatFiles
|
|||||||
.ToList();
|
.ToList();
|
||||||
|
|
||||||
// If we're checking device references
|
// If we're checking device references
|
||||||
if (deviceReferences.Any())
|
if (deviceReferences.Count > 0)
|
||||||
{
|
{
|
||||||
// Loop through all names and check the corresponding machines
|
// Loop through all names and check the corresponding machines
|
||||||
List<string> newDeviceReferences = [];
|
List<string> newDeviceReferences = [];
|
||||||
@@ -1773,7 +1772,7 @@ namespace SabreTools.DatFiles
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If we're checking slotoptions
|
// If we're checking slotoptions
|
||||||
if (useSlotOptions && slotOptions.Any())
|
if (useSlotOptions && slotOptions.Count > 0)
|
||||||
{
|
{
|
||||||
// Loop through all names and check the corresponding machines
|
// Loop through all names and check the corresponding machines
|
||||||
List<string> newSlotOptions = [];
|
List<string> newSlotOptions = [];
|
||||||
|
|||||||
@@ -143,7 +143,7 @@ namespace SabreTools.DatFiles
|
|||||||
public void SetFields(DatHeader datHeader)
|
public void SetFields(DatHeader datHeader)
|
||||||
{
|
{
|
||||||
// If we have an invalid input, return
|
// If we have an invalid input, return
|
||||||
if (datHeader == null || !HeaderFieldMappings.Any())
|
if (datHeader == null || HeaderFieldMappings.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
foreach (var kvp in HeaderFieldMappings)
|
foreach (var kvp in HeaderFieldMappings)
|
||||||
@@ -159,7 +159,7 @@ namespace SabreTools.DatFiles
|
|||||||
public void SetFields(Machine? machine)
|
public void SetFields(Machine? machine)
|
||||||
{
|
{
|
||||||
// If we have an invalid input, return
|
// If we have an invalid input, return
|
||||||
if (machine == null || !MachineFieldMappings.Any())
|
if (machine == null || MachineFieldMappings.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
foreach (var kvp in MachineFieldMappings)
|
foreach (var kvp in MachineFieldMappings)
|
||||||
@@ -181,11 +181,11 @@ namespace SabreTools.DatFiles
|
|||||||
#region Common
|
#region Common
|
||||||
|
|
||||||
// Handle Machine fields
|
// Handle Machine fields
|
||||||
if (MachineFieldMappings.Any() && datItem.GetFieldValue<Machine>(DatItem.MachineKey) != null)
|
if (MachineFieldMappings.Count > 0 && datItem.GetFieldValue<Machine>(DatItem.MachineKey) != null)
|
||||||
SetFields(datItem.GetFieldValue<Machine>(DatItem.MachineKey)!);
|
SetFields(datItem.GetFieldValue<Machine>(DatItem.MachineKey)!);
|
||||||
|
|
||||||
// If there are no field names, return
|
// If there are no field names, return
|
||||||
if (ItemFieldMappings == null || !ItemFieldMappings.Any())
|
if (ItemFieldMappings == null || ItemFieldMappings.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// If there are no field names for this type or generic, return
|
// If there are no field names for this type or generic, return
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ namespace SabreTools.DatTools
|
|||||||
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
||||||
|
|
||||||
// If we are matching based on DatItem fields of any sort
|
// If we are matching based on DatItem fields of any sort
|
||||||
if (itemFieldNames.Any())
|
if (itemFieldNames.Count > 0)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use CRC as the base bucketing
|
// For comparison's sake, we want to use CRC as the base bucketing
|
||||||
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.Full);
|
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.Full);
|
||||||
@@ -201,7 +201,7 @@ namespace SabreTools.DatTools
|
|||||||
|
|
||||||
// Replace fields from the first duplicate, if we have one
|
// Replace fields from the first duplicate, if we have one
|
||||||
if (dupes.Count > 0)
|
if (dupes.Count > 0)
|
||||||
Replacer.ReplaceFields(newDatItem, dupes.First(), itemFieldNames);
|
Replacer.ReplaceFields(newDatItem, dupes[0], itemFieldNames);
|
||||||
|
|
||||||
newDatItems.Add(newDatItem);
|
newDatItems.Add(newDatItem);
|
||||||
}
|
}
|
||||||
@@ -217,7 +217,7 @@ namespace SabreTools.DatTools
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If we are matching based on Machine fields of any sort
|
// If we are matching based on Machine fields of any sort
|
||||||
if (machineFieldNames.Any())
|
if (machineFieldNames.Count > 0)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use Machine Name as the base bucketing
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
||||||
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
|
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
|
||||||
@@ -286,7 +286,7 @@ namespace SabreTools.DatTools
|
|||||||
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
|
||||||
|
|
||||||
// If we are matching based on DatItem fields of any sort
|
// If we are matching based on DatItem fields of any sort
|
||||||
if (itemFieldNames.Any())
|
if (itemFieldNames.Count > 0)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use CRC as the base bucketing
|
// For comparison's sake, we want to use CRC as the base bucketing
|
||||||
datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full);
|
datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full);
|
||||||
@@ -317,7 +317,7 @@ namespace SabreTools.DatTools
|
|||||||
|
|
||||||
// Replace fields from the first duplicate, if we have one
|
// Replace fields from the first duplicate, if we have one
|
||||||
if (dupes.Count > 0)
|
if (dupes.Count > 0)
|
||||||
Replacer.ReplaceFields(datItem.Item2, dupes.First().Item2, itemFieldNames);
|
Replacer.ReplaceFields(datItem.Item2, dupes[0].Item2, itemFieldNames);
|
||||||
}
|
}
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
});
|
});
|
||||||
@@ -327,7 +327,7 @@ namespace SabreTools.DatTools
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If we are matching based on Machine fields of any sort
|
// If we are matching based on Machine fields of any sort
|
||||||
if (machineFieldNames.Any())
|
if (machineFieldNames.Count > 0)
|
||||||
{
|
{
|
||||||
// For comparison's sake, we want to use Machine Name as the base bucketing
|
// For comparison's sake, we want to use Machine Name as the base bucketing
|
||||||
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
|
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
|
||||||
@@ -1209,7 +1209,7 @@ namespace SabreTools.DatTools
|
|||||||
|
|
||||||
watch.Stop();
|
watch.Stop();
|
||||||
|
|
||||||
return datFiles.Select(d => d.Header).ToList();
|
return [.. datFiles.Select(d => d.Header)];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -1221,7 +1221,7 @@ namespace SabreTools.DatTools
|
|||||||
private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
|
private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
|
||||||
{
|
{
|
||||||
// Get the list of keys from the DAT
|
// Get the list of keys from the DAT
|
||||||
var keys = addFrom.Items.Keys.ToList();
|
List<string> keys = [.. addFrom.Items.Keys];
|
||||||
foreach (string key in keys)
|
foreach (string key in keys)
|
||||||
{
|
{
|
||||||
// Add everything from the key to the internal DAT
|
// Add everything from the key to the internal DAT
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -68,9 +67,9 @@ namespace SabreTools.DatTools
|
|||||||
|
|
||||||
// Get a list of all files to process
|
// Get a list of all files to process
|
||||||
#if NET20 || NET35
|
#if NET20 || NET35
|
||||||
List<string> files = Directory.GetFiles(basePath, "*").ToList();
|
List<string> files = [.. Directory.GetFiles(basePath, "*")];
|
||||||
#else
|
#else
|
||||||
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
|
List<string> files = [.. Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories)];
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Loop through and add the file sizes
|
// Loop through and add the file sizes
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
#if NET40_OR_GREATER || NETCOREAPP
|
#if NET40_OR_GREATER || NETCOREAPP
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
#endif
|
#endif
|
||||||
@@ -111,7 +110,7 @@ namespace SabreTools.DatTools
|
|||||||
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
|
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
|
||||||
|
|
||||||
// Then we want to loop through each of the hashes and see if we can rebuild
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
||||||
var keys = datFile.Items.SortedKeys.ToList();
|
List<string> keys = [.. datFile.Items.SortedKeys];
|
||||||
foreach (string hash in keys)
|
foreach (string hash in keys)
|
||||||
{
|
{
|
||||||
// Pre-empt any issues that could arise from string length
|
// Pre-empt any issues that could arise from string length
|
||||||
|
|||||||
@@ -782,7 +782,7 @@ namespace SabreTools.DatTools
|
|||||||
{
|
{
|
||||||
// Get the current machine
|
// Get the current machine
|
||||||
var items = datFile.Items[machine];
|
var items = datFile.Items[machine];
|
||||||
if (items == null || !items.Any())
|
if (items == null || items.Count == 0)
|
||||||
{
|
{
|
||||||
logger.Error($"{machine} contains no items and will be skipped");
|
logger.Error($"{machine} contains no items and will be skipped");
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
|
||||||
using SabreTools.Core;
|
using SabreTools.Core;
|
||||||
using SabreTools.Core.Tools;
|
using SabreTools.Core.Tools;
|
||||||
using SabreTools.DatFiles;
|
using SabreTools.DatFiles;
|
||||||
@@ -59,7 +58,7 @@ namespace SabreTools.DatTools
|
|||||||
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
|
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
|
||||||
|
|
||||||
// Then we want to loop through each of the hashes and see if we can rebuild
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
||||||
var keys = datFile.Items.SortedKeys.ToList();
|
List<string> keys = [.. datFile.Items.SortedKeys];
|
||||||
foreach (string hash in keys)
|
foreach (string hash in keys)
|
||||||
{
|
{
|
||||||
// Pre-empt any issues that could arise from string length
|
// Pre-empt any issues that could arise from string length
|
||||||
@@ -144,7 +143,7 @@ namespace SabreTools.DatTools
|
|||||||
datFile.ItemsDB.BucketBy(ItemKey.SHA1, DedupeType.None);
|
datFile.ItemsDB.BucketBy(ItemKey.SHA1, DedupeType.None);
|
||||||
|
|
||||||
// Then we want to loop through each of the hashes and see if we can rebuild
|
// Then we want to loop through each of the hashes and see if we can rebuild
|
||||||
var keys = datFile.ItemsDB.SortedKeys.ToList();
|
List<string> keys = [.. datFile.ItemsDB.SortedKeys];
|
||||||
foreach (string hash in keys)
|
foreach (string hash in keys)
|
||||||
{
|
{
|
||||||
// Pre-empt any issues that could arise from string length
|
// Pre-empt any issues that could arise from string length
|
||||||
@@ -217,7 +216,7 @@ namespace SabreTools.DatTools
|
|||||||
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
|
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
|
||||||
|
|
||||||
// Then mark items for removal
|
// Then mark items for removal
|
||||||
var keys = datFile.Items.SortedKeys.ToList();
|
List<string> keys = [.. datFile.Items.SortedKeys];
|
||||||
foreach (string key in keys)
|
foreach (string key in keys)
|
||||||
{
|
{
|
||||||
ConcurrentList<DatItem>? items = datFile.Items[key];
|
ConcurrentList<DatItem>? items = datFile.Items[key];
|
||||||
@@ -265,7 +264,7 @@ namespace SabreTools.DatTools
|
|||||||
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
|
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
|
||||||
|
|
||||||
// Then mark items for removal
|
// Then mark items for removal
|
||||||
var keys = datFile.ItemsDB.SortedKeys.ToList();
|
List<string> keys = [.. datFile.ItemsDB.SortedKeys];
|
||||||
foreach (string key in keys)
|
foreach (string key in keys)
|
||||||
{
|
{
|
||||||
var items = datFile.ItemsDB.GetItemsForBucket(key);
|
var items = datFile.ItemsDB.GetItemsForBucket(key);
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
|
|
||||||
using SabreTools.Core;
|
|
||||||
|
|
||||||
namespace SabreTools.FileTypes.Aaru
|
namespace SabreTools.FileTypes.Aaru
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@@ -484,7 +484,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
ds.Dispose();
|
ds.Dispose();
|
||||||
|
|
||||||
// Now write the standard footer
|
// Now write the standard footer
|
||||||
sw.Write(baseFile.CRC!.Reverse().ToArray());
|
sw.Write([.. baseFile.CRC!.Reverse()]);
|
||||||
sw.Write((uint)(baseFile.Size ?? 0));
|
sw.Write((uint)(baseFile.Size ?? 0));
|
||||||
|
|
||||||
// Dispose of everything
|
// Dispose of everything
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ using System.IO;
|
|||||||
#if NET462_OR_GREATER || NETCOREAPP
|
#if NET462_OR_GREATER || NETCOREAPP
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using SabreTools.Hashing;
|
using SabreTools.Hashing;
|
||||||
using SabreTools.Matching;
|
|
||||||
using SabreTools.Matching.Compare;
|
using SabreTools.Matching.Compare;
|
||||||
using SharpCompress.Archives;
|
using SharpCompress.Archives;
|
||||||
using SharpCompress.Archives.Rar;
|
using SharpCompress.Archives.Rar;
|
||||||
@@ -252,7 +251,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
SharpCompress.Archives.Rar.RarArchive ra = SharpCompress.Archives.Rar.RarArchive.Open(this.Filename, new ReaderOptions { LeaveStreamOpen = false });
|
SharpCompress.Archives.Rar.RarArchive ra = SharpCompress.Archives.Rar.RarArchive.Open(this.Filename, new ReaderOptions { LeaveStreamOpen = false });
|
||||||
List<RarArchiveEntry> rarEntries = ra.Entries.OrderBy(e => e.Key, new NaturalReversedComparer()).ToList();
|
List<RarArchiveEntry> rarEntries = [.. ra.Entries.OrderBy(e => e.Key ?? string.Empty, new NaturalReversedComparer())];
|
||||||
string? lastRarEntry = null;
|
string? lastRarEntry = null;
|
||||||
foreach (RarArchiveEntry entry in rarEntries)
|
foreach (RarArchiveEntry entry in rarEntries)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -705,7 +705,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
zipFile.ZipFileCreate(tempFile);
|
zipFile.ZipFileCreate(tempFile);
|
||||||
|
|
||||||
// Get the order for the entries with the new file
|
// Get the order for the entries with the new file
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Copy over all files to the new archive
|
// Copy over all files to the new archive
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ using System.Linq;
|
|||||||
using Compress;
|
using Compress;
|
||||||
using SabreTools.Core.Tools;
|
using SabreTools.Core.Tools;
|
||||||
using SabreTools.Hashing;
|
using SabreTools.Hashing;
|
||||||
using SabreTools.Matching;
|
|
||||||
using SabreTools.Matching.Compare;
|
using SabreTools.Matching.Compare;
|
||||||
using SharpCompress.Archives;
|
using SharpCompress.Archives;
|
||||||
using SharpCompress.Archives.Tar;
|
using SharpCompress.Archives.Tar;
|
||||||
@@ -241,7 +240,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
TarArchive ta = TarArchive.Open(this.Filename!, new ReaderOptions { LeaveStreamOpen = false });
|
TarArchive ta = TarArchive.Open(this.Filename!, new ReaderOptions { LeaveStreamOpen = false });
|
||||||
List<TarArchiveEntry> tarEntries = ta.Entries.OrderBy(e => e.Key, new NaturalReversedComparer()).ToList();
|
List<TarArchiveEntry> tarEntries = ta.Entries.OrderBy(e => e.Key ?? string.Empty, new NaturalReversedComparer()).ToList();
|
||||||
string? lastTarEntry = null;
|
string? lastTarEntry = null;
|
||||||
foreach (TarArchiveEntry entry in tarEntries)
|
foreach (TarArchiveEntry entry in tarEntries)
|
||||||
{
|
{
|
||||||
@@ -340,7 +339,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
oldTarFile = TarArchive.Open(archiveFileName);
|
oldTarFile = TarArchive.Open(archiveFileName);
|
||||||
|
|
||||||
// Get a list of all current entries
|
// Get a list of all current entries
|
||||||
var entries = oldTarFile.Entries.Select(i => i.Key).ToList();
|
List<string> entries = [.. oldTarFile.Entries.Select(i => i.Key)];
|
||||||
|
|
||||||
// Map all inputs to index
|
// Map all inputs to index
|
||||||
var inputIndexMap = new Dictionary<string, int>();
|
var inputIndexMap = new Dictionary<string, int>();
|
||||||
@@ -367,7 +366,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the order for the entries with the new file
|
// Get the order for the entries with the new file
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Copy over all files to the new archive
|
// Copy over all files to the new archive
|
||||||
@@ -480,7 +479,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sort the keys in TZIP order
|
// Sort the keys in TZIP order
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Now add all of the files in order
|
// Now add all of the files in order
|
||||||
@@ -506,7 +505,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
oldTarFile = TarArchive.Open(archiveFileName);
|
oldTarFile = TarArchive.Open(archiveFileName);
|
||||||
|
|
||||||
// Get a list of all current entries
|
// Get a list of all current entries
|
||||||
var entries = oldTarFile.Entries.Select(i => i.Key).ToList();
|
List<string> entries = [.. oldTarFile.Entries.Select(i => i.Key)];
|
||||||
|
|
||||||
// Map all inputs to index
|
// Map all inputs to index
|
||||||
var inputIndexMap = new Dictionary<string, int>();
|
var inputIndexMap = new Dictionary<string, int>();
|
||||||
@@ -539,7 +538,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the order for the entries with the new file
|
// Get the order for the entries with the new file
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Copy over all files to the new archive
|
// Copy over all files to the new archive
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
#if NET20 || NET35 || NET40
|
#if NET20 || NET35 || NET40
|
||||||
// Extract all files to the temp directory
|
// Extract all files to the temp directory
|
||||||
var zf = new Zip();
|
var zf = new Zip();
|
||||||
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
|
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
|
||||||
if (zr != ZipReturn.ZipGood)
|
if (zr != ZipReturn.ZipGood)
|
||||||
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
||||||
|
|
||||||
@@ -124,7 +124,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
encounteredErrors = false;
|
encounteredErrors = false;
|
||||||
#else
|
#else
|
||||||
// Extract all files to the temp directory
|
// Extract all files to the temp directory
|
||||||
var zf = ZipFile.OpenRead(this.Filename);
|
var zf = ZipFile.OpenRead(Filename!);
|
||||||
if (zf == null)
|
if (zf == null)
|
||||||
throw new Exception($"Could not open {Filename} as a zip file");
|
throw new Exception($"Could not open {Filename} as a zip file");
|
||||||
|
|
||||||
@@ -221,7 +221,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
public override (Stream?, string?) GetEntryStream(string entryName)
|
public override (Stream?, string?) GetEntryStream(string entryName)
|
||||||
{
|
{
|
||||||
// If we have an invalid file
|
// If we have an invalid file
|
||||||
if (this.Filename == null)
|
if (Filename == null)
|
||||||
return (null, null);
|
return (null, null);
|
||||||
|
|
||||||
try
|
try
|
||||||
@@ -231,7 +231,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
|
|
||||||
#if NET20 || NET35 || NET40
|
#if NET20 || NET35 || NET40
|
||||||
var zf = new Zip();
|
var zf = new Zip();
|
||||||
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
|
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
|
||||||
if (zr != ZipReturn.ZipGood)
|
if (zr != ZipReturn.ZipGood)
|
||||||
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
||||||
|
|
||||||
@@ -261,7 +261,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
zf.ZipFileClose();
|
zf.ZipFileClose();
|
||||||
return (stream, realEntry);
|
return (stream, realEntry);
|
||||||
#else
|
#else
|
||||||
var zf = ZipFile.OpenRead(this.Filename);
|
var zf = ZipFile.OpenRead(Filename);
|
||||||
if (zf == null)
|
if (zf == null)
|
||||||
throw new Exception($"Could not open {Filename} as a zip file");
|
throw new Exception($"Could not open {Filename} as a zip file");
|
||||||
|
|
||||||
@@ -311,17 +311,17 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
public override List<BaseFile>? GetChildren()
|
public override List<BaseFile>? GetChildren()
|
||||||
{
|
{
|
||||||
// If we have an invalid file
|
// If we have an invalid file
|
||||||
if (this.Filename == null)
|
if (Filename == null)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
var found = new List<BaseFile>();
|
var found = new List<BaseFile>();
|
||||||
string? gamename = Path.GetFileNameWithoutExtension(this.Filename);
|
string? gamename = Path.GetFileNameWithoutExtension(Filename);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
#if NET20 || NET35 || NET40
|
#if NET20 || NET35 || NET40
|
||||||
var zf = new Zip();
|
var zf = new Zip();
|
||||||
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
|
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
|
||||||
if (zr != ZipReturn.ZipGood)
|
if (zr != ZipReturn.ZipGood)
|
||||||
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
||||||
|
|
||||||
@@ -347,7 +347,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
// If we get a read error, log it and continue
|
// If we get a read error, log it and continue
|
||||||
if (zr != ZipReturn.ZipGood || readStream == null)
|
if (zr != ZipReturn.ZipGood || readStream == null)
|
||||||
{
|
{
|
||||||
logger.Warning($"An error occurred while reading archive {this.Filename}: Zip Error - {zr}");
|
logger.Warning($"An error occurred while reading archive {Filename}: Zip Error - {zr}");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -355,7 +355,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
var zipEntryRom = new BaseFile();
|
var zipEntryRom = new BaseFile();
|
||||||
|
|
||||||
// Perform a quickscan, if flagged to
|
// Perform a quickscan, if flagged to
|
||||||
if (this.AvailableHashTypes.Length == 1 && this.AvailableHashTypes[0] == HashType.CRC32)
|
if (AvailableHashTypes.Length == 1 && AvailableHashTypes[0] == HashType.CRC32)
|
||||||
{
|
{
|
||||||
zipEntryRom.Size = (long)localFile.UncompressedSize;
|
zipEntryRom.Size = (long)localFile.UncompressedSize;
|
||||||
zipEntryRom.CRC = localFile.CRC;
|
zipEntryRom.CRC = localFile.CRC;
|
||||||
@@ -365,7 +365,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
{
|
{
|
||||||
zipEntryRom = GetInfo(readStream,
|
zipEntryRom = GetInfo(readStream,
|
||||||
size: (long)localFile.UncompressedSize,
|
size: (long)localFile.UncompressedSize,
|
||||||
hashes: this.AvailableHashTypes,
|
hashes: AvailableHashTypes,
|
||||||
keepReadOpen: true);
|
keepReadOpen: true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -380,7 +380,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
zr = zf.ZipFileCloseReadStream();
|
zr = zf.ZipFileCloseReadStream();
|
||||||
zf.ZipFileClose();
|
zf.ZipFileClose();
|
||||||
#else
|
#else
|
||||||
var zf = ZipFile.OpenRead(this.Filename);
|
var zf = ZipFile.OpenRead(Filename);
|
||||||
if (zf == null)
|
if (zf == null)
|
||||||
throw new Exception($"Could not open {Filename} as a zip file");
|
throw new Exception($"Could not open {Filename} as a zip file");
|
||||||
|
|
||||||
@@ -408,7 +408,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
var zipEntryRom = new BaseFile();
|
var zipEntryRom = new BaseFile();
|
||||||
|
|
||||||
// Perform a quickscan, if flagged to
|
// Perform a quickscan, if flagged to
|
||||||
if (this.AvailableHashTypes.Length == 1 && this.AvailableHashTypes[0] == HashType.CRC32)
|
if (AvailableHashTypes.Length == 1 && AvailableHashTypes[0] == HashType.CRC32)
|
||||||
{
|
{
|
||||||
zipEntryRom.Size = localFile.Length;
|
zipEntryRom.Size = localFile.Length;
|
||||||
#if NETCOREAPP
|
#if NETCOREAPP
|
||||||
@@ -422,7 +422,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
{
|
{
|
||||||
zipEntryRom = GetInfo(readStream,
|
zipEntryRom = GetInfo(readStream,
|
||||||
size: localFile.Length,
|
size: localFile.Length,
|
||||||
hashes: this.AvailableHashTypes,
|
hashes: AvailableHashTypes,
|
||||||
keepReadOpen: true);
|
keepReadOpen: true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -450,7 +450,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
public override List<string> GetEmptyFolders()
|
public override List<string> GetEmptyFolders()
|
||||||
{
|
{
|
||||||
// If we have an invalid file
|
// If we have an invalid file
|
||||||
if (this.Filename == null)
|
if (Filename == null)
|
||||||
return [];
|
return [];
|
||||||
|
|
||||||
List<string> empties = [];
|
List<string> empties = [];
|
||||||
@@ -459,7 +459,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
{
|
{
|
||||||
#if NET20 || NET35 || NET40
|
#if NET20 || NET35 || NET40
|
||||||
var zf = new Zip();
|
var zf = new Zip();
|
||||||
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
|
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
|
||||||
if (zr != ZipReturn.ZipGood)
|
if (zr != ZipReturn.ZipGood)
|
||||||
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
||||||
|
|
||||||
@@ -488,7 +488,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
var zf = ZipFile.OpenRead(this.Filename);
|
var zf = ZipFile.OpenRead(Filename);
|
||||||
if (zf == null)
|
if (zf == null)
|
||||||
throw new Exception($"Could not open {Filename} as a zip file");
|
throw new Exception($"Could not open {Filename} as a zip file");
|
||||||
|
|
||||||
@@ -544,7 +544,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
public override bool IsTorrent()
|
public override bool IsTorrent()
|
||||||
{
|
{
|
||||||
Zip zf = new();
|
Zip zf = new();
|
||||||
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
|
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
|
||||||
if (zr != ZipReturn.ZipGood)
|
if (zr != ZipReturn.ZipGood)
|
||||||
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
|
||||||
|
|
||||||
@@ -665,7 +665,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
zipFile.ZipFileCreate(tempFile);
|
zipFile.ZipFileCreate(tempFile);
|
||||||
|
|
||||||
// Get the order for the entries with the new file
|
// Get the order for the entries with the new file
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Copy over all files to the new archive
|
// Copy over all files to the new archive
|
||||||
@@ -801,7 +801,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sort the keys in TZIP order
|
// Sort the keys in TZIP order
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Now add all of the files in order
|
// Now add all of the files in order
|
||||||
@@ -880,7 +880,7 @@ namespace SabreTools.FileTypes.Archives
|
|||||||
zipFile.ZipFileCreate(tempFile);
|
zipFile.ZipFileCreate(tempFile);
|
||||||
|
|
||||||
// Get the order for the entries with the new file
|
// Get the order for the entries with the new file
|
||||||
List<string> keys = inputIndexMap.Keys.ToList();
|
List<string> keys = [.. inputIndexMap.Keys];
|
||||||
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
keys.Sort(CompressUtils.TrrntZipStringCompare);
|
||||||
|
|
||||||
// Copy over all files to the new archive
|
// Copy over all files to the new archive
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
using System.IO;
|
using System;
|
||||||
using System.Linq;
|
using System.IO;
|
||||||
using SabreTools.Core.Tools;
|
using SabreTools.Core.Tools;
|
||||||
using SabreTools.FileTypes.Aaru;
|
using SabreTools.FileTypes.Aaru;
|
||||||
using SabreTools.FileTypes.CHD;
|
using SabreTools.FileTypes.CHD;
|
||||||
@@ -328,13 +328,13 @@ namespace SabreTools.FileTypes
|
|||||||
var baseFile = new BaseFile()
|
var baseFile = new BaseFile()
|
||||||
{
|
{
|
||||||
Size = size,
|
Size = size,
|
||||||
CRC = hashes.Contains(HashType.CRC32) ? TextHelper.StringToByteArray(hashDict[HashType.CRC32]) : null,
|
CRC = hashDict.ContainsKey(HashType.CRC32) ? TextHelper.StringToByteArray(hashDict[HashType.CRC32]) : null,
|
||||||
MD5 = hashes.Contains(HashType.MD5) ? TextHelper.StringToByteArray(hashDict[HashType.MD5]) : null,
|
MD5 = hashDict.ContainsKey(HashType.MD5) ? TextHelper.StringToByteArray(hashDict[HashType.MD5]) : null,
|
||||||
SHA1 = hashes.Contains(HashType.SHA1) ? TextHelper.StringToByteArray(hashDict[HashType.SHA1]) : null,
|
SHA1 = hashDict.ContainsKey(HashType.SHA1) ? TextHelper.StringToByteArray(hashDict[HashType.SHA1]) : null,
|
||||||
SHA256 = hashes.Contains(HashType.SHA256) ? TextHelper.StringToByteArray(hashDict[HashType.SHA256]) : null,
|
SHA256 = hashDict.ContainsKey(HashType.SHA256) ? TextHelper.StringToByteArray(hashDict[HashType.SHA256]) : null,
|
||||||
SHA384 = hashes.Contains(HashType.SHA384) ? TextHelper.StringToByteArray(hashDict[HashType.SHA384]) : null,
|
SHA384 = hashDict.ContainsKey(HashType.SHA384) ? TextHelper.StringToByteArray(hashDict[HashType.SHA384]) : null,
|
||||||
SHA512 = hashes.Contains(HashType.SHA512) ? TextHelper.StringToByteArray(hashDict[HashType.SHA512]) : null,
|
SHA512 = hashDict.ContainsKey(HashType.SHA512) ? TextHelper.StringToByteArray(hashDict[HashType.SHA512]) : null,
|
||||||
SpamSum = hashes.Contains(HashType.SpamSum) ? TextHelper.StringToByteArray(hashDict[HashType.SpamSum]) : null,
|
SpamSum = hashDict.ContainsKey(HashType.SpamSum) ? TextHelper.StringToByteArray(hashDict[HashType.SpamSum]) : null,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Deal with the input stream
|
// Deal with the input stream
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
|
||||||
using SabreTools.Core.Filter;
|
using SabreTools.Core.Filter;
|
||||||
using SabreTools.DatFiles;
|
using SabreTools.DatFiles;
|
||||||
using SabreTools.DatItems;
|
using SabreTools.DatItems;
|
||||||
@@ -49,7 +48,7 @@ namespace SabreTools.Filtering
|
|||||||
public void PopulateFromList(List<string> inputs)
|
public void PopulateFromList(List<string> inputs)
|
||||||
{
|
{
|
||||||
// If there are no inputs, just skip
|
// If there are no inputs, just skip
|
||||||
if (inputs == null || !inputs.Any())
|
if (inputs == null || inputs.Count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
InternalStopwatch watch = new("Populating extras from list");
|
InternalStopwatch watch = new("Populating extras from list");
|
||||||
@@ -59,7 +58,7 @@ namespace SabreTools.Filtering
|
|||||||
ExtraIniItem item = new();
|
ExtraIniItem item = new();
|
||||||
|
|
||||||
// If we don't even have a possible field and file combination
|
// If we don't even have a possible field and file combination
|
||||||
if (!input.Contains(':'))
|
if (!input.Contains(":"))
|
||||||
{
|
{
|
||||||
logger.Warning($"'{input}` is not a valid INI extras string. Valid INI extras strings are of the form 'key:value'. Please refer to README.1ST or the help feature for more details.");
|
logger.Warning($"'{input}` is not a valid INI extras string. Valid INI extras strings are of the form 'key:value'. Please refer to README.1ST or the help feature for more details.");
|
||||||
return;
|
return;
|
||||||
@@ -90,7 +89,7 @@ namespace SabreTools.Filtering
|
|||||||
public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
|
public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
|
||||||
{
|
{
|
||||||
// If we have no extras, don't attempt to apply and just return true
|
// If we have no extras, don't attempt to apply and just return true
|
||||||
if (Items == null || !Items.Any())
|
if (Items == null || Items.Count == 0)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
var watch = new InternalStopwatch("Applying extra mappings to DAT");
|
var watch = new InternalStopwatch("Applying extra mappings to DAT");
|
||||||
@@ -153,7 +152,7 @@ namespace SabreTools.Filtering
|
|||||||
public bool ApplyExtrasDB(DatFile datFile, bool throwOnError = false)
|
public bool ApplyExtrasDB(DatFile datFile, bool throwOnError = false)
|
||||||
{
|
{
|
||||||
// If we have no extras, don't attempt to apply and just return true
|
// If we have no extras, don't attempt to apply and just return true
|
||||||
if (Items == null || !Items.Any())
|
if (Items == null || Items.Count == 0)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
var watch = new InternalStopwatch("Applying extra mappings to DAT");
|
var watch = new InternalStopwatch("Applying extra mappings to DAT");
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
using SabreTools.Core;
|
|
||||||
using SabreTools.Core.Tools;
|
using SabreTools.Core.Tools;
|
||||||
using SabreTools.DatItems;
|
using SabreTools.DatItems;
|
||||||
using SabreTools.DatItems.Formats;
|
using SabreTools.DatItems.Formats;
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
using SabreTools.DatItems;
|
using SabreTools.DatItems;
|
||||||
using SabreTools.DatItems.Formats;
|
using SabreTools.DatItems.Formats;
|
||||||
using SabreTools.Filtering;
|
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace SabreTools.Test.Filtering
|
namespace SabreTools.Test.Filtering
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
using SabreTools.Core;
|
|
||||||
using SabreTools.DatItems;
|
using SabreTools.DatItems;
|
||||||
using SabreTools.DatItems.Formats;
|
using SabreTools.DatItems.Formats;
|
||||||
using SabreTools.Filtering;
|
using SabreTools.Filtering;
|
||||||
|
|||||||
@@ -468,7 +468,7 @@ Reset the internal state: reset();";
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If we had any unmapped formats, return an issue
|
// If we had any unmapped formats, return an issue
|
||||||
if (unmappedFormats.Any())
|
if (unmappedFormats.Count > 0)
|
||||||
{
|
{
|
||||||
string message = $"The following inputs were invalid formats: {string.Join(", ", unmappedFormats)}";
|
string message = $"The following inputs were invalid formats: {string.Join(", ", unmappedFormats)}";
|
||||||
return (false, message);
|
return (false, message);
|
||||||
|
|||||||
Reference in New Issue
Block a user