Reduce Linq usage across entire project

This commit is contained in:
Matt Nadareski
2024-10-19 21:41:08 -04:00
parent 1c079aab18
commit b87b05f828
36 changed files with 215 additions and 205 deletions

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.Data.Sqlite;
using SabreTools.Core;
using SabreTools.DatFiles;
@@ -61,7 +60,7 @@ have a current entry in the DAT index.";
bool onlyNeeded = GetBoolean(features, OnlyNeededValue);
// First we want to get just all directories from the inputs
List<string> onlyDirs = new List<string>();
List<string> onlyDirs = [];
foreach (string input in Inputs)
{
if (Directory.Exists(input))
@@ -77,10 +76,15 @@ have a current entry in the DAT index.";
}
// Create an empty Dat for files that need to be rebuilt
DatFile need = DatFile.Create();
var need = DatFile.Create();
// Get the first depot as output
var depotKeyEnumerator = _depots.Keys.GetEnumerator();
depotKeyEnumerator.MoveNext();
string firstDepot = depotKeyEnumerator.Current;
// Open the database connection
SqliteConnection dbc = new SqliteConnection(_connectionString);
var dbc = new SqliteConnection(_connectionString);
dbc.Open();
// Now that we have the Dats, add the files to the database
@@ -98,34 +102,38 @@ have a current entry in the DAT index.";
foreach (Rom rom in datItems)
{
string? crc = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey);
string? md5 = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key);
string? sha1 = rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key);
// If we care about if the file exists, check the databse first
if (onlyNeeded && !noDb)
{
string query = "SELECT * FROM crcsha1 JOIN md5sha1 ON crcsha1.sha1=md5sha1.sha1"
+ $" WHERE crcsha1.crc=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\""
+ $" OR md5sha1.md5=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\""
+ $" OR md5sha1.sha1=\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"";
SqliteCommand slc = new SqliteCommand(query, dbc);
+ $" WHERE crcsha1.crc=\"{crc}\""
+ $" OR md5sha1.md5=\"{md5}\""
+ $" OR md5sha1.sha1=\"{sha1}\"";
var slc = new SqliteCommand(query, dbc);
SqliteDataReader sldr = slc.ExecuteReader();
if (sldr.HasRows)
{
// Add to the queries
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
crcquery += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\"),";
if (!string.IsNullOrWhiteSpace(crc))
crcquery += $" (\"{crc}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
md5query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\"),";
if (!string.IsNullOrWhiteSpace(md5))
md5query += $" (\"{md5}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)))
if (!string.IsNullOrWhiteSpace(sha1))
{
sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\", \"{_depots!.Keys.ToList()[0]}\"),";
sha1query += $" (\"{sha1}\", \"{firstDepot}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
crcsha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
if (!string.IsNullOrWhiteSpace(crc))
crcsha1query += $" (\"{crc}\", \"{sha1}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
md5sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
if (!string.IsNullOrWhiteSpace(md5))
md5sha1query += $" (\"{md5}\", \"{sha1}\"),";
}
// Add to the Dat
@@ -138,21 +146,21 @@ have a current entry in the DAT index.";
// Add to the queries
if (!noDb)
{
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
crcquery += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\"),";
if (!string.IsNullOrWhiteSpace(crc))
crcquery += $" (\"{crc}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
md5query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\"),";
if (!string.IsNullOrWhiteSpace(md5))
md5query += $" (\"{md5}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)))
if (!string.IsNullOrWhiteSpace(sha1))
{
sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\", \"{_depots!.Keys.ToList()[0]}\"),";
sha1query += $" (\"{sha1}\", \"{firstDepot}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)))
crcsha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.CRCKey)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
if (!string.IsNullOrWhiteSpace(crc))
crcsha1query += $" (\"{crc}\", \"{sha1}\"),";
if (!string.IsNullOrWhiteSpace(rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)))
md5sha1query += $" (\"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.MD5Key)}\", \"{rom.GetStringFieldValue(SabreTools.Models.Metadata.Rom.SHA1Key)}\"),";
if (!string.IsNullOrWhiteSpace(md5))
md5sha1query += $" (\"{md5}\", \"{sha1}\"),";
}
}
@@ -165,28 +173,28 @@ have a current entry in the DAT index.";
// Now run the queries, if they're populated
if (crcquery != "INSERT OR IGNORE INTO crc (crc) VALUES")
{
SqliteCommand slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
var slc = new SqliteCommand(crcquery.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
if (md5query != "INSERT OR IGNORE INTO md5 (md5) VALUES")
{
SqliteCommand slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
var slc = new SqliteCommand(md5query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
if (sha1query != "INSERT OR IGNORE INTO sha1 (sha1, depot) VALUES")
{
SqliteCommand slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
var slc = new SqliteCommand(sha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
if (crcsha1query != "INSERT OR IGNORE INTO crcsha1 (crc, sha1) VALUES")
{
SqliteCommand slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
var slc = new SqliteCommand(crcsha1query.TrimEnd(','), dbc);
slc.ExecuteNonQuery();
slc.Dispose();
}
@@ -202,7 +210,7 @@ have a current entry in the DAT index.";
Rebuilder.RebuildGeneric(
need,
onlyDirs,
outDir: _depots!.Keys.ToList()[0],
outDir: firstDepot,
outputFormat: OutputFormat.TorrentGzipRomba,
asFiles: TreatAsFile.NonArchive);

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml;
using System.Xml.Schema;
using Microsoft.Data.Sqlite;
@@ -423,7 +422,7 @@ Possible values are: Verbose, User, Warning, Error");
internal static string? _db; // Database name
// Depot settings
internal static Dictionary<string, Tuple<long, bool>>? _depots; // Folder location, Max size
internal static Dictionary<string, Tuple<long, bool>> _depots = []; // Folder location, Max size
// Server settings
internal static int _port; // Web server port
@@ -552,9 +551,9 @@ CREATE TABLE IF NOT EXISTS dat (
{
// Get a dictionary of filenames that actually exist in the DATRoot, logging which ones are not
#if NET20 || NET35
List<string> datRootDats = Directory.GetFiles(_dats!, "*").ToList();
List<string> datRootDats = [.. Directory.GetFiles(_dats!, "*")];
#else
List<string> datRootDats = Directory.EnumerateFiles(_dats!, "*", SearchOption.AllDirectories).ToList();
List<string> datRootDats = [.. Directory.EnumerateFiles(_dats!, "*", SearchOption.AllDirectories)];
#endif
List<string> lowerCaseDats = datRootDats.ConvertAll(i => Path.GetFileName(i).ToLowerInvariant());
Dictionary<string, string> foundDats = [];
@@ -579,7 +578,7 @@ CREATE TABLE IF NOT EXISTS dat (
/// <summary>
/// Initialize the Romba application from XML config
/// </summary>
private void InitializeConfiguration()
private static void InitializeConfiguration()
{
// Get default values if they're not written
int workers = 4,
@@ -592,7 +591,7 @@ CREATE TABLE IF NOT EXISTS dat (
baddir = "bad",
dats = "dats",
db = "db";
Dictionary<string, Tuple<long, bool>> depots = new Dictionary<string, Tuple<long, bool>>();
Dictionary<string, Tuple<long, bool>> depots = [];
// Get the XML text reader for the configuration file, if possible
XmlReader xtr = XmlReader.Create(_config, new XmlReaderSettings

View File

@@ -65,7 +65,10 @@ structure according to the original DAT master directory tree structure.";
outputFolder.Ensure(create: true);
// Get all online depots
List<string> onlineDepots = _depots!.Where(d => d.Value.Item2).Select(d => d.Key).ToList();
List<string> onlineDepots = _depots
.Where(d => d.Value.Item2)
.Select(d => d.Key)
.ToList();
// Now scan all of those depots and rebuild
Rebuilder.RebuildDepot(

View File

@@ -45,7 +45,7 @@ namespace RombaSharp.Features
// Now, for each of these files, attempt to add the data found inside
foreach (string input in Inputs)
{
StreamReader sr = new StreamReader(File.OpenRead(input));
var sr = new StreamReader(File.OpenRead(input));
// The first line should be the hash header
string? line = sr.ReadLine();

View File

@@ -51,9 +51,9 @@ namespace RombaSharp.Features
foreach (string input in Inputs)
{
#if NET20 || NET35
List<string> depotFiles = Directory.GetFiles(input, "*.gz").ToList();
List<string> depotFiles = [.. Directory.GetFiles(input, "*.gz")];
#else
List<string> depotFiles = Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories).ToList();
List<string> depotFiles = [.. Directory.EnumerateFiles(input, "*.gz", SearchOption.AllDirectories)];
#endif
// If we are copying all that is possible but we want to scan first

View File

@@ -40,7 +40,7 @@ namespace RombaSharp.Features
foreach (string depotname in Inputs)
{
// Check that it's a valid depot first
if (!_depots!.ContainsKey(depotname))
if (!_depots.ContainsKey(depotname))
{
logger.User($"'{depotname}' is not a recognized depot. Please add it to your configuration file and try again");
return false;

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Core.Tools;
using SabreTools.Models.Metadata;
@@ -59,7 +58,9 @@ namespace SabreTools.Core.Filter
foreach (var filter in this.Filters)
{
// If the filter isn't for this object type, skip
if (filter.Key[0] != itemName || (filter.Key[0] == "item" && TypeHelper.GetDatItemTypeNames().Contains(itemName)))
if (filter.Key[0] != itemName)
continue;
else if (filter.Key[0] == "item" && Array.IndexOf(TypeHelper.GetDatItemTypeNames(), itemName) > -1)
continue;
// If we don't get a match, it's a failure

View File

@@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
@@ -123,7 +122,6 @@ namespace SabreTools.Core.Tools
if (string.IsNullOrEmpty(input))
return input;
List<char> invalidPath = [.. Path.GetInvalidPathChars()];
foreach (char invalid in Path.GetInvalidPathChars())
{
input = input!.Replace(invalid.ToString(), string.Empty);

View File

@@ -20,7 +20,7 @@ namespace SabreTools.DatFiles
public void ConvertMetadata(Models.Metadata.MetadataFile? item, string filename, int indexId, bool keep, bool statsOnly)
{
// If the metadata file is invalid, we can't do anything
if (item == null || !item.Any())
if (item == null || item.Count == 0)
return;
// Create an internal source and add to the dictionary
@@ -46,7 +46,7 @@ namespace SabreTools.DatFiles
private void ConvertHeader(Models.Metadata.Header? item, bool keep)
{
// If the header is invalid, we can't do anything
if (item == null || !item.Any())
if (item == null || item.Count == 0)
return;
// Create an internal header
@@ -343,7 +343,7 @@ namespace SabreTools.DatFiles
private void ConvertMachine(Models.Metadata.Machine? item, Source source, long sourceIndex, bool statsOnly)
{
// If the machine is invalid, we can't do anything
if (item == null || !item.Any())
if (item == null || item.Count == 0)
return;
// Create an internal machine

View File

@@ -99,7 +99,7 @@ namespace SabreTools.DatFiles
private void RemoveHeaderFields(List<string> headerFieldNames)
{
// If we have an invalid input, return
if (Header == null || !headerFieldNames.Any())
if (Header == null || headerFieldNames.Count == 0)
return;
foreach (var fieldName in headerFieldNames)
@@ -114,7 +114,7 @@ namespace SabreTools.DatFiles
private static void RemoveFields(Machine? machine, List<string> machineFieldNames)
{
// If we have an invalid input, return
if (machine == null || !machineFieldNames.Any())
if (machine == null || machineFieldNames.Count == 0)
return;
foreach (var fieldName in machineFieldNames)
@@ -136,11 +136,11 @@ namespace SabreTools.DatFiles
// Handle Machine fields
var machine = datItem.GetFieldValue<Machine>(DatItem.MachineKey);
if (machineFieldNames.Any() && machine != null)
if (machineFieldNames.Count > 0 && machine != null)
RemoveFields(machine, machineFieldNames);
// If there are no field names, return
if (itemFieldNames == null || !itemFieldNames.Any())
if (itemFieldNames == null || itemFieldNames.Count == 0)
return;
// If there are no field names for this type or generic, return

View File

@@ -504,7 +504,8 @@ namespace SabreTools.DatFiles
if (dataAreaName != null)
{
// Get existing data areas as a list
var dataAreas = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey)?.ToList() ?? [];
var dataAreasArr = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey) ?? [];
var dataAreas = new List<Models.Metadata.DataArea>(dataAreasArr);
// Find the existing disk area to append to, otherwise create a new disk area
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
@@ -526,7 +527,8 @@ namespace SabreTools.DatFiles
ClearEmptyKeys(aggregateDataArea);
// Get existing roms as a list
var roms = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey)?.ToList() ?? [];
var romsArr = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey) ?? [];
var roms = new List<Models.Metadata.Rom>(romsArr);
// Add the rom to the data area
roms.Add(romItem);
@@ -558,8 +560,9 @@ namespace SabreTools.DatFiles
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
if (diskAreaName != null)
{
// Get existing data areas as a list
var diskAreas = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey)?.ToList() ?? [];
// Get existing disk areas as a list
var diskAreasArr = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey) ?? [];
var diskAreas = new List<Models.Metadata.DiskArea>(diskAreasArr);
// Find the existing disk area to append to, otherwise create a new disk area
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
@@ -578,7 +581,8 @@ namespace SabreTools.DatFiles
ClearEmptyKeys(aggregateDiskArea);
// Get existing disks as a list
var disks = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey)?.ToList() ?? [];
var disksArr = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey) ?? [];
var disks = new List<Models.Metadata.Disk>(disksArr);
// Add the disk to the data area
disks.Add(diskItem);
@@ -601,7 +605,8 @@ namespace SabreTools.DatFiles
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
{
// Get existing dipswitches as a list
var dipSwitches = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey)?.ToList() ?? [];
var dipSwitchesArr = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey) ?? [];
var dipSwitches = new List<Models.Metadata.DipSwitch>(dipSwitchesArr);
// Clear any empty fields
ClearEmptyKeys(dipSwitchItem);
@@ -617,7 +622,8 @@ namespace SabreTools.DatFiles
else if (datItem is Models.Metadata.Feature featureItem)
{
// Get existing features as a list
var features = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey)?.ToList() ?? [];
var featuresArr = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey) ?? [];
var features = new List<Models.Metadata.Feature>(featuresArr);
// Clear any empty fields
ClearEmptyKeys(featureItem);
@@ -921,7 +927,8 @@ namespace SabreTools.DatFiles
if (dataAreaName != null)
{
// Get existing data areas as a list
var dataAreas = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey)?.ToList() ?? [];
var dataAreasArr = partItems[partName].Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey) ?? [];
var dataAreas = new List<Models.Metadata.DataArea>(dataAreasArr);
// Find the existing disk area to append to, otherwise create a new disk area
int dataAreaIndex = dataAreas.FindIndex(da => da.ReadString(Models.Metadata.DataArea.NameKey) == dataAreaName);
@@ -943,7 +950,8 @@ namespace SabreTools.DatFiles
ClearEmptyKeys(aggregateDataArea);
// Get existing roms as a list
var roms = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey)?.ToList() ?? [];
var romsArr = aggregateDataArea.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey) ?? [];
var roms = new List<Models.Metadata.Rom>(romsArr);
// Add the rom to the data area
roms.Add(romItem);
@@ -975,8 +983,9 @@ namespace SabreTools.DatFiles
string? diskAreaName = diskArea.ReadString(Models.Metadata.DiskArea.NameKey);
if (diskAreaName != null)
{
// Get existing data areas as a list
var diskAreas = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey)?.ToList() ?? [];
// Get existing disk areas as a list
var diskAreasArr = partItems[partName].Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey) ?? [];
var diskAreas = new List<Models.Metadata.DiskArea>(diskAreasArr);
// Find the existing disk area to append to, otherwise create a new disk area
int diskAreaIndex = diskAreas.FindIndex(da => da.ReadString(Models.Metadata.DiskArea.NameKey) == diskAreaName);
@@ -995,7 +1004,8 @@ namespace SabreTools.DatFiles
ClearEmptyKeys(aggregateDiskArea);
// Get existing disks as a list
var disks = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey)?.ToList() ?? [];
var disksArr = aggregateDiskArea.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey) ?? [];
var disks = new List<Models.Metadata.Disk>(disksArr);
// Add the disk to the data area
disks.Add(diskItem);
@@ -1018,7 +1028,8 @@ namespace SabreTools.DatFiles
if (datItem is Models.Metadata.DipSwitch dipSwitchItem)
{
// Get existing dipswitches as a list
var dipSwitches = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey)?.ToList() ?? [];
var dipSwitchesArr = partItems[partName].Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey) ?? [];
var dipSwitches = new List<Models.Metadata.DipSwitch>(dipSwitchesArr);
// Clear any empty fields
ClearEmptyKeys(dipSwitchItem);
@@ -1034,7 +1045,8 @@ namespace SabreTools.DatFiles
else if (datItem is Models.Metadata.Feature featureItem)
{
// Get existing features as a list
var features = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey)?.ToList() ?? [];
var featuresArr = partItems[partName].Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey) ?? [];
var features = new List<Models.Metadata.Feature>(featuresArr);
// Clear any empty fields
ClearEmptyKeys(featureItem);
@@ -1421,7 +1433,7 @@ namespace SabreTools.DatFiles
/// </summary>
private static void ClearEmptyKeys(Models.Metadata.DictionaryBase obj)
{
string[] fieldNames = obj.Keys.ToArray();
string[] fieldNames = [.. obj.Keys];
foreach (string fieldName in fieldNames)
{
if (obj[fieldName] == null)

View File

@@ -153,8 +153,13 @@ namespace SabreTools.DatFiles
else if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(description))
{
string[] splitpath = path.TrimEnd(Path.DirectorySeparatorChar).Split(Path.DirectorySeparatorChar);
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath.Last());
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath.Last() + (bare ? string.Empty : $" ({date})"));
#if NETFRAMEWORK
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath[splitpath.Length - 1]);
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath[splitpath.Length - 1] + (bare ? string.Empty : $" ({date})"));
#else
Header.SetFieldValue<string?>(Models.Metadata.Header.NameKey, splitpath[^1]);
Header.SetFieldValue<string?>(Models.Metadata.Header.DescriptionKey, splitpath[^1] + (bare ? string.Empty : $" ({date})"));
#endif
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.Core;
using SabreTools.DatItems;
@@ -126,7 +125,7 @@ namespace SabreTools.DatFiles.Formats
continue;
// Resolve the names in the block
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
for (int index = 0; index < items.Length; index++)
{

View File

@@ -113,11 +113,11 @@ namespace SabreTools.DatFiles.Formats
// Read in the machine array
jtr.Read();
JsonSerializer js = new();
JArray? machineArray = js.Deserialize<JArray>(jtr);
var js = new JsonSerializer();
JArray machineArray = js.Deserialize<JArray>(jtr) ?? [];
// Loop through each machine object and process
foreach (JObject machineObj in (machineArray ?? []).Cast<JObject>())
foreach (JObject machineObj in machineArray)
{
ReadMachine(machineObj, statsOnly, source, sourceIndex);
}
@@ -179,7 +179,7 @@ namespace SabreTools.DatFiles.Formats
return;
// Loop through each datitem object and process
foreach (JObject itemObj in itemsArr.Cast<JObject>())
foreach (JObject itemObj in itemsArr)
{
ReadItem(itemObj, statsOnly, source, sourceIndex, machine, machineIndex);
}
@@ -480,7 +480,7 @@ namespace SabreTools.DatFiles.Formats
continue;
// Resolve the names in the block
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
for (int index = 0; index < items.Length; index++)
{

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
using System.Xml.Schema;
@@ -310,7 +309,7 @@ namespace SabreTools.DatFiles.Formats
continue;
// Resolve the names in the block
items = DatItem.ResolveNamesDB(items.ToConcurrentList()).ToArray();
items = [.. DatItem.ResolveNamesDB(items.ToConcurrentList())];
for (int index = 0; index < items.Length; index++)
{

View File

@@ -122,9 +122,10 @@ namespace SabreTools.DatFiles.Formats
missingFields.Add(Models.Metadata.DipSwitch.MaskKey);
if (dipSwitch.ValuesSpecified)
{
if (dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey)!.Any(dv => string.IsNullOrEmpty(dv.GetName())))
var dipValues = dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey);
if (dipValues!.Any(dv => string.IsNullOrEmpty(dv.GetName())))
missingFields.Add(Models.Metadata.DipValue.NameKey);
if (dipSwitch.GetFieldValue<DipValue[]?>(Models.Metadata.DipSwitch.DipValueKey)!.Any(dv => string.IsNullOrEmpty(dv.GetStringFieldValue(Models.Metadata.DipValue.ValueKey))))
if (dipValues!.Any(dv => string.IsNullOrEmpty(dv.GetStringFieldValue(Models.Metadata.DipValue.ValueKey))))
missingFields.Add(Models.Metadata.DipValue.ValueKey);
}

View File

@@ -80,7 +80,7 @@ namespace SabreTools.DatFiles
{
get
{
var keys = items.Keys.ToList();
List<string> keys = [.. items.Keys];
keys.Sort(new NaturalComparer());
return keys;
}
@@ -315,7 +315,7 @@ namespace SabreTools.DatFiles
/// </summary>
public void ClearEmpty()
{
var keys = items.Keys.Where(k => k != null).ToList();
List<string> keys = [.. items.Keys];
foreach (string key in keys)
{
// If the key doesn't exist, skip
@@ -345,7 +345,7 @@ namespace SabreTools.DatFiles
/// </summary>
public void ClearMarked()
{
var keys = items.Keys.ToList();
List<string> keys = [.. items.Keys];
foreach (string key in keys)
{
ConcurrentList<DatItem>? oldItemList = items[key];
@@ -525,11 +525,7 @@ namespace SabreTools.DatFiles
public void BucketBy(ItemKey bucketBy, DedupeType dedupeType, bool lower = true, bool norename = true)
{
// If we have a situation where there's no dictionary or no keys at all, we skip
#if NET40_OR_GREATER || NETCOREAPP
if (items == null || items.IsEmpty)
#else
if (items == null || items.Count == 0)
#endif
return;
// If the sorted type isn't the same, we want to sort the dictionary accordingly
@@ -626,7 +622,10 @@ namespace SabreTools.DatFiles
// Try to find duplicates
ConcurrentList<DatItem>? roms = this[key];
return roms?.Any(r => datItem.Equals(r)) == true;
if (roms == null)
return false;
return roms.Any(r => datItem.Equals(r));
}
/// <summary>
@@ -739,32 +738,32 @@ namespace SabreTools.DatFiles
#elif NET40_OR_GREATER
Parallel.ForEach(keys, key =>
#else
foreach (var key in keys)
foreach (var key in keys)
#endif
{
// Get the possibly unsorted list
ConcurrentList<DatItem>? sortedlist = this[key]?.ToConcurrentList();
if (sortedlist == null)
{
// Get the possibly unsorted list
ConcurrentList<DatItem>? sortedlist = this[key]?.ToConcurrentList();
if (sortedlist == null)
#if NET40_OR_GREATER || NETCOREAPP
return;
return;
#else
continue;
continue;
#endif
// Sort the list of items to be consistent
DatItem.Sort(ref sortedlist, false);
// Sort the list of items to be consistent
DatItem.Sort(ref sortedlist, false);
// If we're merging the roms, do so
if (dedupeType == DedupeType.Full || (dedupeType == DedupeType.Game && bucketBy == ItemKey.Machine))
sortedlist = DatItem.Merge(sortedlist);
// If we're merging the roms, do so
if (dedupeType == DedupeType.Full || (dedupeType == DedupeType.Game && bucketBy == ItemKey.Machine))
sortedlist = DatItem.Merge(sortedlist);
// Add the list back to the dictionary
Reset(key);
AddRange(key, sortedlist);
// Add the list back to the dictionary
Reset(key);
AddRange(key, sortedlist);
#if NET40_OR_GREATER || NETCOREAPP
});
});
#else
}
}
#endif
}
@@ -779,19 +778,19 @@ namespace SabreTools.DatFiles
#elif NET40_OR_GREATER
Parallel.ForEach(keys, key =>
#else
foreach (var key in keys)
foreach (var key in keys)
#endif
{
// Get the possibly unsorted list
ConcurrentList<DatItem>? sortedlist = this[key];
{
// Get the possibly unsorted list
ConcurrentList<DatItem>? sortedlist = this[key];
// Sort the list of items to be consistent
if (sortedlist != null)
DatItem.Sort(ref sortedlist, false);
// Sort the list of items to be consistent
if (sortedlist != null)
DatItem.Sort(ref sortedlist, false);
#if NET40_OR_GREATER || NETCOREAPP
});
});
#else
}
}
#endif
}
@@ -1276,7 +1275,7 @@ namespace SabreTools.DatFiles
.ToList();
// If we're checking device references
if (deviceReferences.Any())
if (deviceReferences.Count > 0)
{
// Loop through all names and check the corresponding machines
List<string> newDeviceReferences = [];
@@ -1326,7 +1325,7 @@ namespace SabreTools.DatFiles
}
// If we're checking slotoptions
if (useSlotOptions && slotOptions.Any())
if (useSlotOptions && slotOptions.Count > 0)
{
// Loop through all names and check the corresponding machines
List<string> newSlotOptions = [];

View File

@@ -6,7 +6,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
@@ -150,7 +149,7 @@ namespace SabreTools.DatFiles
{
List<string> keys = [.. _buckets.Keys];
keys.Sort(new NaturalComparer());
return keys.ToArray();
return [.. keys];
}
}
@@ -343,17 +342,17 @@ namespace SabreTools.DatFiles
/// <summary>
/// Get all item to machine mappings
/// </summary>
public (long, long)[] GetItemMachineMappings() => _itemToMachineMapping.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
public (long, long)[] GetItemMachineMappings() => [.. _itemToMachineMapping.Select(kvp => (kvp.Key, kvp.Value))];
/// <summary>
/// Get all item to source mappings
/// </summary>
public (long, long)[] GetItemSourceMappings() => _itemToSourceMapping.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
public (long, long)[] GetItemSourceMappings() => [.. _itemToSourceMapping.Select(kvp => (kvp.Key, kvp.Value))];
/// <summary>
/// Get all items and their indicies
/// </summary>
public (long, DatItem)[] GetItems() => _items.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
public (long, DatItem)[] GetItems() => [.. _items.Select(kvp => (kvp.Key, kvp.Value))];
/// <summary>
/// Get the indices and items associated with a bucket name
@@ -455,7 +454,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Get all machines and their indicies
/// </summary>
public (long, Machine)[] GetMachines() => _machines.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
public (long, Machine)[] GetMachines() => [.. _machines.Select(kvp => (kvp.Key, kvp.Value))];
/// <summary>
/// Get a source based on the index
@@ -486,7 +485,7 @@ namespace SabreTools.DatFiles
/// <summary>
/// Get all sources and their indicies
/// </summary>
public (long, Source)[] GetSources() => _sources.Select(kvp => (kvp.Key, kvp.Value)).ToArray();
public (long, Source)[] GetSources() => [.. _sources.Select(kvp => (kvp.Key, kvp.Value))];
/// <summary>
/// Remove an item, returning if it could be removed
@@ -1718,7 +1717,7 @@ namespace SabreTools.DatFiles
.ToList();
// If we're checking device references
if (deviceReferences.Any())
if (deviceReferences.Count > 0)
{
// Loop through all names and check the corresponding machines
List<string> newDeviceReferences = [];
@@ -1773,7 +1772,7 @@ namespace SabreTools.DatFiles
}
// If we're checking slotoptions
if (useSlotOptions && slotOptions.Any())
if (useSlotOptions && slotOptions.Count > 0)
{
// Loop through all names and check the corresponding machines
List<string> newSlotOptions = [];

View File

@@ -143,7 +143,7 @@ namespace SabreTools.DatFiles
public void SetFields(DatHeader datHeader)
{
// If we have an invalid input, return
if (datHeader == null || !HeaderFieldMappings.Any())
if (datHeader == null || HeaderFieldMappings.Count == 0)
return;
foreach (var kvp in HeaderFieldMappings)
@@ -159,7 +159,7 @@ namespace SabreTools.DatFiles
public void SetFields(Machine? machine)
{
// If we have an invalid input, return
if (machine == null || !MachineFieldMappings.Any())
if (machine == null || MachineFieldMappings.Count == 0)
return;
foreach (var kvp in MachineFieldMappings)
@@ -181,11 +181,11 @@ namespace SabreTools.DatFiles
#region Common
// Handle Machine fields
if (MachineFieldMappings.Any() && datItem.GetFieldValue<Machine>(DatItem.MachineKey) != null)
if (MachineFieldMappings.Count > 0 && datItem.GetFieldValue<Machine>(DatItem.MachineKey) != null)
SetFields(datItem.GetFieldValue<Machine>(DatItem.MachineKey)!);
// If there are no field names, return
if (ItemFieldMappings == null || !ItemFieldMappings.Any())
if (ItemFieldMappings == null || ItemFieldMappings.Count == 0)
return;
// If there are no field names for this type or generic, return

View File

@@ -169,7 +169,7 @@ namespace SabreTools.DatTools
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
// If we are matching based on DatItem fields of any sort
if (itemFieldNames.Any())
if (itemFieldNames.Count > 0)
{
// For comparison's sake, we want to use CRC as the base bucketing
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.Full);
@@ -201,7 +201,7 @@ namespace SabreTools.DatTools
// Replace fields from the first duplicate, if we have one
if (dupes.Count > 0)
Replacer.ReplaceFields(newDatItem, dupes.First(), itemFieldNames);
Replacer.ReplaceFields(newDatItem, dupes[0], itemFieldNames);
newDatItems.Add(newDatItem);
}
@@ -217,7 +217,7 @@ namespace SabreTools.DatTools
}
// If we are matching based on Machine fields of any sort
if (machineFieldNames.Any())
if (machineFieldNames.Count > 0)
{
// For comparison's sake, we want to use Machine Name as the base bucketing
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
@@ -286,7 +286,7 @@ namespace SabreTools.DatTools
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.GetStringFieldValue(DatHeader.FileNameKey)}' from the base DAT");
// If we are matching based on DatItem fields of any sort
if (itemFieldNames.Any())
if (itemFieldNames.Count > 0)
{
// For comparison's sake, we want to use CRC as the base bucketing
datFile.ItemsDB.BucketBy(ItemKey.CRC, DedupeType.Full);
@@ -317,7 +317,7 @@ namespace SabreTools.DatTools
// Replace fields from the first duplicate, if we have one
if (dupes.Count > 0)
Replacer.ReplaceFields(datItem.Item2, dupes.First().Item2, itemFieldNames);
Replacer.ReplaceFields(datItem.Item2, dupes[0].Item2, itemFieldNames);
}
#if NET40_OR_GREATER || NETCOREAPP
});
@@ -327,7 +327,7 @@ namespace SabreTools.DatTools
}
// If we are matching based on Machine fields of any sort
if (machineFieldNames.Any())
if (machineFieldNames.Count > 0)
{
// For comparison's sake, we want to use Machine Name as the base bucketing
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
@@ -1209,7 +1209,7 @@ namespace SabreTools.DatTools
watch.Stop();
return datFiles.Select(d => d.Header).ToList();
return [.. datFiles.Select(d => d.Header)];
}
/// <summary>
@@ -1221,7 +1221,7 @@ namespace SabreTools.DatTools
private static void AddFromExisting(DatFile addTo, DatFile addFrom, bool delete = false)
{
// Get the list of keys from the DAT
var keys = addFrom.Items.Keys.ToList();
List<string> keys = [.. addFrom.Items.Keys];
foreach (string key in keys)
{
// Add everything from the key to the internal DAT

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
@@ -68,9 +67,9 @@ namespace SabreTools.DatTools
// Get a list of all files to process
#if NET20 || NET35
List<string> files = Directory.GetFiles(basePath, "*").ToList();
List<string> files = [.. Directory.GetFiles(basePath, "*")];
#else
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
List<string> files = [.. Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories)];
#endif
// Loop through and add the file sizes

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
#if NET40_OR_GREATER || NETCOREAPP
using System.Threading.Tasks;
#endif
@@ -111,7 +110,7 @@ namespace SabreTools.DatTools
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
var keys = datFile.Items.SortedKeys.ToList();
List<string> keys = [.. datFile.Items.SortedKeys];
foreach (string hash in keys)
{
// Pre-empt any issues that could arise from string length

View File

@@ -782,7 +782,7 @@ namespace SabreTools.DatTools
{
// Get the current machine
var items = datFile.Items[machine];
if (items == null || !items.Any())
if (items == null || items.Count == 0)
{
logger.Error($"{machine} contains no items and will be skipped");
continue;

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Core;
using SabreTools.Core.Tools;
using SabreTools.DatFiles;
@@ -59,7 +58,7 @@ namespace SabreTools.DatTools
datFile.Items.BucketBy(ItemKey.SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
var keys = datFile.Items.SortedKeys.ToList();
List<string> keys = [.. datFile.Items.SortedKeys];
foreach (string hash in keys)
{
// Pre-empt any issues that could arise from string length
@@ -144,7 +143,7 @@ namespace SabreTools.DatTools
datFile.ItemsDB.BucketBy(ItemKey.SHA1, DedupeType.None);
// Then we want to loop through each of the hashes and see if we can rebuild
var keys = datFile.ItemsDB.SortedKeys.ToList();
List<string> keys = [.. datFile.ItemsDB.SortedKeys];
foreach (string hash in keys)
{
// Pre-empt any issues that could arise from string length
@@ -217,7 +216,7 @@ namespace SabreTools.DatTools
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.Full);
// Then mark items for removal
var keys = datFile.Items.SortedKeys.ToList();
List<string> keys = [.. datFile.Items.SortedKeys];
foreach (string key in keys)
{
ConcurrentList<DatItem>? items = datFile.Items[key];
@@ -265,7 +264,7 @@ namespace SabreTools.DatTools
datFile.ItemsDB.BucketBy(ItemKey.Machine, DedupeType.Full);
// Then mark items for removal
var keys = datFile.ItemsDB.SortedKeys.ToList();
List<string> keys = [.. datFile.ItemsDB.SortedKeys];
foreach (string key in keys)
{
var items = datFile.ItemsDB.GetItemsForBucket(key);

View File

@@ -1,8 +1,6 @@
using System.IO;
using System.Text;
using SabreTools.Core;
namespace SabreTools.FileTypes.Aaru
{
/// <summary>

View File

@@ -484,7 +484,7 @@ namespace SabreTools.FileTypes.Archives
ds.Dispose();
// Now write the standard footer
sw.Write(baseFile.CRC!.Reverse().ToArray());
sw.Write([.. baseFile.CRC!.Reverse()]);
sw.Write((uint)(baseFile.Size ?? 0));
// Dispose of everything

View File

@@ -4,7 +4,6 @@ using System.IO;
#if NET462_OR_GREATER || NETCOREAPP
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Matching;
using SabreTools.Matching.Compare;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
@@ -252,7 +251,7 @@ namespace SabreTools.FileTypes.Archives
try
{
SharpCompress.Archives.Rar.RarArchive ra = SharpCompress.Archives.Rar.RarArchive.Open(this.Filename, new ReaderOptions { LeaveStreamOpen = false });
List<RarArchiveEntry> rarEntries = ra.Entries.OrderBy(e => e.Key, new NaturalReversedComparer()).ToList();
List<RarArchiveEntry> rarEntries = [.. ra.Entries.OrderBy(e => e.Key ?? string.Empty, new NaturalReversedComparer())];
string? lastRarEntry = null;
foreach (RarArchiveEntry entry in rarEntries)
{

View File

@@ -705,7 +705,7 @@ namespace SabreTools.FileTypes.Archives
zipFile.ZipFileCreate(tempFile);
// Get the order for the entries with the new file
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Copy over all files to the new archive

View File

@@ -6,7 +6,6 @@ using System.Linq;
using Compress;
using SabreTools.Core.Tools;
using SabreTools.Hashing;
using SabreTools.Matching;
using SabreTools.Matching.Compare;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
@@ -241,7 +240,7 @@ namespace SabreTools.FileTypes.Archives
try
{
TarArchive ta = TarArchive.Open(this.Filename!, new ReaderOptions { LeaveStreamOpen = false });
List<TarArchiveEntry> tarEntries = ta.Entries.OrderBy(e => e.Key, new NaturalReversedComparer()).ToList();
List<TarArchiveEntry> tarEntries = ta.Entries.OrderBy(e => e.Key ?? string.Empty, new NaturalReversedComparer()).ToList();
string? lastTarEntry = null;
foreach (TarArchiveEntry entry in tarEntries)
{
@@ -340,7 +339,7 @@ namespace SabreTools.FileTypes.Archives
oldTarFile = TarArchive.Open(archiveFileName);
// Get a list of all current entries
var entries = oldTarFile.Entries.Select(i => i.Key).ToList();
List<string> entries = [.. oldTarFile.Entries.Select(i => i.Key)];
// Map all inputs to index
var inputIndexMap = new Dictionary<string, int>();
@@ -367,7 +366,7 @@ namespace SabreTools.FileTypes.Archives
}
// Get the order for the entries with the new file
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Copy over all files to the new archive
@@ -480,7 +479,7 @@ namespace SabreTools.FileTypes.Archives
}
// Sort the keys in TZIP order
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Now add all of the files in order
@@ -506,7 +505,7 @@ namespace SabreTools.FileTypes.Archives
oldTarFile = TarArchive.Open(archiveFileName);
// Get a list of all current entries
var entries = oldTarFile.Entries.Select(i => i.Key).ToList();
List<string> entries = [.. oldTarFile.Entries.Select(i => i.Key)];
// Map all inputs to index
var inputIndexMap = new Dictionary<string, int>();
@@ -539,7 +538,7 @@ namespace SabreTools.FileTypes.Archives
}
// Get the order for the entries with the new file
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Copy over all files to the new archive

View File

@@ -68,7 +68,7 @@ namespace SabreTools.FileTypes.Archives
#if NET20 || NET35 || NET40
// Extract all files to the temp directory
var zf = new Zip();
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
if (zr != ZipReturn.ZipGood)
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
@@ -124,7 +124,7 @@ namespace SabreTools.FileTypes.Archives
encounteredErrors = false;
#else
// Extract all files to the temp directory
var zf = ZipFile.OpenRead(this.Filename);
var zf = ZipFile.OpenRead(Filename!);
if (zf == null)
throw new Exception($"Could not open {Filename} as a zip file");
@@ -221,7 +221,7 @@ namespace SabreTools.FileTypes.Archives
public override (Stream?, string?) GetEntryStream(string entryName)
{
// If we have an invalid file
if (this.Filename == null)
if (Filename == null)
return (null, null);
try
@@ -231,7 +231,7 @@ namespace SabreTools.FileTypes.Archives
#if NET20 || NET35 || NET40
var zf = new Zip();
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
if (zr != ZipReturn.ZipGood)
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
@@ -261,7 +261,7 @@ namespace SabreTools.FileTypes.Archives
zf.ZipFileClose();
return (stream, realEntry);
#else
var zf = ZipFile.OpenRead(this.Filename);
var zf = ZipFile.OpenRead(Filename);
if (zf == null)
throw new Exception($"Could not open {Filename} as a zip file");
@@ -311,17 +311,17 @@ namespace SabreTools.FileTypes.Archives
public override List<BaseFile>? GetChildren()
{
// If we have an invalid file
if (this.Filename == null)
if (Filename == null)
return null;
var found = new List<BaseFile>();
string? gamename = Path.GetFileNameWithoutExtension(this.Filename);
string? gamename = Path.GetFileNameWithoutExtension(Filename);
try
{
#if NET20 || NET35 || NET40
var zf = new Zip();
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
if (zr != ZipReturn.ZipGood)
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
@@ -347,7 +347,7 @@ namespace SabreTools.FileTypes.Archives
// If we get a read error, log it and continue
if (zr != ZipReturn.ZipGood || readStream == null)
{
logger.Warning($"An error occurred while reading archive {this.Filename}: Zip Error - {zr}");
logger.Warning($"An error occurred while reading archive {Filename}: Zip Error - {zr}");
continue;
}
@@ -355,7 +355,7 @@ namespace SabreTools.FileTypes.Archives
var zipEntryRom = new BaseFile();
// Perform a quickscan, if flagged to
if (this.AvailableHashTypes.Length == 1 && this.AvailableHashTypes[0] == HashType.CRC32)
if (AvailableHashTypes.Length == 1 && AvailableHashTypes[0] == HashType.CRC32)
{
zipEntryRom.Size = (long)localFile.UncompressedSize;
zipEntryRom.CRC = localFile.CRC;
@@ -365,7 +365,7 @@ namespace SabreTools.FileTypes.Archives
{
zipEntryRom = GetInfo(readStream,
size: (long)localFile.UncompressedSize,
hashes: this.AvailableHashTypes,
hashes: AvailableHashTypes,
keepReadOpen: true);
}
@@ -380,7 +380,7 @@ namespace SabreTools.FileTypes.Archives
zr = zf.ZipFileCloseReadStream();
zf.ZipFileClose();
#else
var zf = ZipFile.OpenRead(this.Filename);
var zf = ZipFile.OpenRead(Filename);
if (zf == null)
throw new Exception($"Could not open {Filename} as a zip file");
@@ -408,7 +408,7 @@ namespace SabreTools.FileTypes.Archives
var zipEntryRom = new BaseFile();
// Perform a quickscan, if flagged to
if (this.AvailableHashTypes.Length == 1 && this.AvailableHashTypes[0] == HashType.CRC32)
if (AvailableHashTypes.Length == 1 && AvailableHashTypes[0] == HashType.CRC32)
{
zipEntryRom.Size = localFile.Length;
#if NETCOREAPP
@@ -422,7 +422,7 @@ namespace SabreTools.FileTypes.Archives
{
zipEntryRom = GetInfo(readStream,
size: localFile.Length,
hashes: this.AvailableHashTypes,
hashes: AvailableHashTypes,
keepReadOpen: true);
}
@@ -450,7 +450,7 @@ namespace SabreTools.FileTypes.Archives
public override List<string> GetEmptyFolders()
{
// If we have an invalid file
if (this.Filename == null)
if (Filename == null)
return [];
List<string> empties = [];
@@ -459,7 +459,7 @@ namespace SabreTools.FileTypes.Archives
{
#if NET20 || NET35 || NET40
var zf = new Zip();
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
if (zr != ZipReturn.ZipGood)
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
@@ -488,7 +488,7 @@ namespace SabreTools.FileTypes.Archives
}
}
#else
var zf = ZipFile.OpenRead(this.Filename);
var zf = ZipFile.OpenRead(Filename);
if (zf == null)
throw new Exception($"Could not open {Filename} as a zip file");
@@ -544,7 +544,7 @@ namespace SabreTools.FileTypes.Archives
public override bool IsTorrent()
{
Zip zf = new();
ZipReturn zr = zf.ZipFileOpen(this.Filename!, -1, true);
ZipReturn zr = zf.ZipFileOpen(Filename!, -1, true);
if (zr != ZipReturn.ZipGood)
throw new Exception(CompressUtils.ZipErrorMessageText(zr));
@@ -665,7 +665,7 @@ namespace SabreTools.FileTypes.Archives
zipFile.ZipFileCreate(tempFile);
// Get the order for the entries with the new file
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Copy over all files to the new archive
@@ -801,7 +801,7 @@ namespace SabreTools.FileTypes.Archives
}
// Sort the keys in TZIP order
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Now add all of the files in order
@@ -880,7 +880,7 @@ namespace SabreTools.FileTypes.Archives
zipFile.ZipFileCreate(tempFile);
// Get the order for the entries with the new file
List<string> keys = inputIndexMap.Keys.ToList();
List<string> keys = [.. inputIndexMap.Keys];
keys.Sort(CompressUtils.TrrntZipStringCompare);
// Copy over all files to the new archive

View File

@@ -1,5 +1,5 @@
using System.IO;
using System.Linq;
using System;
using System.IO;
using SabreTools.Core.Tools;
using SabreTools.FileTypes.Aaru;
using SabreTools.FileTypes.CHD;
@@ -328,13 +328,13 @@ namespace SabreTools.FileTypes
var baseFile = new BaseFile()
{
Size = size,
CRC = hashes.Contains(HashType.CRC32) ? TextHelper.StringToByteArray(hashDict[HashType.CRC32]) : null,
MD5 = hashes.Contains(HashType.MD5) ? TextHelper.StringToByteArray(hashDict[HashType.MD5]) : null,
SHA1 = hashes.Contains(HashType.SHA1) ? TextHelper.StringToByteArray(hashDict[HashType.SHA1]) : null,
SHA256 = hashes.Contains(HashType.SHA256) ? TextHelper.StringToByteArray(hashDict[HashType.SHA256]) : null,
SHA384 = hashes.Contains(HashType.SHA384) ? TextHelper.StringToByteArray(hashDict[HashType.SHA384]) : null,
SHA512 = hashes.Contains(HashType.SHA512) ? TextHelper.StringToByteArray(hashDict[HashType.SHA512]) : null,
SpamSum = hashes.Contains(HashType.SpamSum) ? TextHelper.StringToByteArray(hashDict[HashType.SpamSum]) : null,
CRC = hashDict.ContainsKey(HashType.CRC32) ? TextHelper.StringToByteArray(hashDict[HashType.CRC32]) : null,
MD5 = hashDict.ContainsKey(HashType.MD5) ? TextHelper.StringToByteArray(hashDict[HashType.MD5]) : null,
SHA1 = hashDict.ContainsKey(HashType.SHA1) ? TextHelper.StringToByteArray(hashDict[HashType.SHA1]) : null,
SHA256 = hashDict.ContainsKey(HashType.SHA256) ? TextHelper.StringToByteArray(hashDict[HashType.SHA256]) : null,
SHA384 = hashDict.ContainsKey(HashType.SHA384) ? TextHelper.StringToByteArray(hashDict[HashType.SHA384]) : null,
SHA512 = hashDict.ContainsKey(HashType.SHA512) ? TextHelper.StringToByteArray(hashDict[HashType.SHA512]) : null,
SpamSum = hashDict.ContainsKey(HashType.SpamSum) ? TextHelper.StringToByteArray(hashDict[HashType.SpamSum]) : null,
};
// Deal with the input stream

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Core.Filter;
using SabreTools.DatFiles;
using SabreTools.DatItems;
@@ -49,7 +48,7 @@ namespace SabreTools.Filtering
public void PopulateFromList(List<string> inputs)
{
// If there are no inputs, just skip
if (inputs == null || !inputs.Any())
if (inputs == null || inputs.Count == 0)
return;
InternalStopwatch watch = new("Populating extras from list");
@@ -59,7 +58,7 @@ namespace SabreTools.Filtering
ExtraIniItem item = new();
// If we don't even have a possible field and file combination
if (!input.Contains(':'))
if (!input.Contains(":"))
{
logger.Warning($"'{input}` is not a valid INI extras string. Valid INI extras strings are of the form 'key:value'. Please refer to README.1ST or the help feature for more details.");
return;
@@ -90,7 +89,7 @@ namespace SabreTools.Filtering
public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
{
// If we have no extras, don't attempt to apply and just return true
if (Items == null || !Items.Any())
if (Items == null || Items.Count == 0)
return true;
var watch = new InternalStopwatch("Applying extra mappings to DAT");
@@ -153,7 +152,7 @@ namespace SabreTools.Filtering
public bool ApplyExtrasDB(DatFile datFile, bool throwOnError = false)
{
// If we have no extras, don't attempt to apply and just return true
if (Items == null || !Items.Any())
if (Items == null || Items.Count == 0)
return true;
var watch = new InternalStopwatch("Applying extra mappings to DAT");

View File

@@ -1,4 +1,3 @@
using SabreTools.Core;
using SabreTools.Core.Tools;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;

View File

@@ -1,7 +1,5 @@
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Filtering;
using Xunit;
namespace SabreTools.Test.Filtering
{

View File

@@ -1,6 +1,4 @@
using System.Collections.Generic;
using SabreTools.Core;
using SabreTools.DatItems;
using SabreTools.DatItems.Formats;
using SabreTools.Filtering;

View File

@@ -468,7 +468,7 @@ Reset the internal state: reset();";
}
// If we had any unmapped formats, return an issue
if (unmappedFormats.Any())
if (unmappedFormats.Count > 0)
{
string message = $"The following inputs were invalid formats: {string.Join(", ", unmappedFormats)}";
return (false, message);