Perform mass cleanup

This is cleanup based on both new .NET functionality (in 6 and 7) as well as a ton of simplifications and things that were missed that were caught due to the cleanup.
This commit is contained in:
Matt Nadareski
2023-04-19 16:39:58 -04:00
parent fd5fd79b95
commit 728b5d6b27
95 changed files with 1353 additions and 1572 deletions

View File

@@ -22,7 +22,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -37,7 +37,7 @@ namespace SabreTools.DatTools
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
ConcurrentList<DatItem> items = datFile.Items[key];
ConcurrentList<DatItem> newItems = new ConcurrentList<DatItem>();
ConcurrentList<DatItem> newItems = new();
foreach (DatItem item in items)
{
DatItem newItem = item;
@@ -75,7 +75,7 @@ namespace SabreTools.DatTools
List<DatItemField> datItemFields,
bool onlySame)
{
InternalStopwatch watch = new InternalStopwatch($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
InternalStopwatch watch = new($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
// If we are matching based on DatItem fields of any sort
if (datItemFields.Any())
@@ -88,7 +88,7 @@ namespace SabreTools.DatTools
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
{
ConcurrentList<DatItem> datItems = intDat.Items[key];
ConcurrentList<DatItem> newDatItems = new ConcurrentList<DatItem>();
ConcurrentList<DatItem> newDatItems = new();
foreach (DatItem datItem in datItems)
{
ConcurrentList<DatItem> dupes = datFile.Items.GetDuplicates(datItem, sorted: true);
@@ -118,11 +118,11 @@ namespace SabreTools.DatTools
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
{
ConcurrentList<DatItem> datItems = intDat.Items[key];
ConcurrentList<DatItem> newDatItems = new ConcurrentList<DatItem>();
ConcurrentList<DatItem> newDatItems = new();
foreach (DatItem datItem in datItems)
{
DatItem newDatItem = datItem.Clone() as DatItem;
if (datFile.Items.ContainsKey(key) && datFile.Items[key].Count() > 0)
if (datFile.Items.ContainsKey(key) && datFile.Items[key].Count > 0)
Replacer.ReplaceFields(newDatItem.Machine, datFile.Items[key][0].Machine, machineFields, onlySame);
newDatItems.Add(newDatItem);
@@ -151,7 +151,7 @@ namespace SabreTools.DatTools
else
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);
InternalStopwatch watch = new InternalStopwatch($"Comparing '{intDat.Header.FileName}' to base DAT");
InternalStopwatch watch = new($"Comparing '{intDat.Header.FileName}' to base DAT");
// For comparison's sake, we want to a the base bucketing
if (useGames)
@@ -195,7 +195,7 @@ namespace SabreTools.DatTools
else
{
ConcurrentList<DatItem> datItems = intDat.Items[key];
ConcurrentList<DatItem> keepDatItems = new ConcurrentList<DatItem>();
ConcurrentList<DatItem> keepDatItems = new();
foreach (DatItem datItem in datItems)
{
if (!datFile.Items.HasDuplicates(datItem, true))
@@ -220,13 +220,13 @@ namespace SabreTools.DatTools
public static List<DatFile> DiffCascade(DatFile datFile, List<DatHeader> datHeaders)
{
// Create a list of DatData objects representing output files
List<DatFile> outDats = new List<DatFile>();
List<DatFile> outDats = new();
// Ensure the current DatFile is sorted optimally
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);
// Loop through each of the inputs and get or create a new DatData object
InternalStopwatch watch = new InternalStopwatch("Initializing and filling all output DATs");
InternalStopwatch watch = new("Initializing and filling all output DATs");
// Create the DatFiles from the set of headers
DatFile[] outDatsArray = new DatFile[datHeaders.Count];
@@ -262,7 +262,7 @@ namespace SabreTools.DatTools
/// <param name="inputs">List of inputs to write out from</param>
public static DatFile DiffDuplicates(DatFile datFile, List<ParentablePath> inputs)
{
InternalStopwatch watch = new InternalStopwatch("Initializing duplicate DAT");
InternalStopwatch watch = new("Initializing duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
@@ -330,7 +330,7 @@ namespace SabreTools.DatTools
/// <param name="inputs">List of inputs to write out from</param>
public static List<DatFile> DiffIndividuals(DatFile datFile, List<ParentablePath> inputs)
{
InternalStopwatch watch = new InternalStopwatch("Initializing all individual DATs");
InternalStopwatch watch = new("Initializing all individual DATs");
// Fill in any information not in the base DAT
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
@@ -403,7 +403,7 @@ namespace SabreTools.DatTools
/// <param name="inputs">List of inputs to write out from</param>
public static DatFile DiffNoDuplicates(DatFile datFile, List<ParentablePath> inputs)
{
InternalStopwatch watch = new InternalStopwatch("Initializing no duplicate DAT");
InternalStopwatch watch = new("Initializing no duplicate DAT");
// Fill in any information not in the base DAT
if (string.IsNullOrWhiteSpace(datFile.Header.FileName))
@@ -473,7 +473,7 @@ namespace SabreTools.DatTools
public static List<DatHeader> PopulateUserData(DatFile datFile, List<ParentablePath> inputs)
{
DatFile[] datFiles = new DatFile[inputs.Count];
InternalStopwatch watch = new InternalStopwatch("Processing individual DATs");
InternalStopwatch watch = new("Processing individual DATs");
// Parse all of the DATs into their own DatFiles in the array
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>

View File

@@ -26,7 +26,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -51,7 +51,7 @@ namespace SabreTools.DatTools
long totalSize = 0;
long currentSize = 0;
InternalStopwatch watch = new InternalStopwatch($"Populating DAT from {basePath}");
InternalStopwatch watch = new($"Populating DAT from {basePath}");
// Process the input
if (Directory.Exists(basePath))
@@ -187,14 +187,14 @@ namespace SabreTools.DatTools
return false;
// Check the file as if it were in a depot
GZipArchive gzarc = new GZipArchive(item);
GZipArchive gzarc = new(item);
BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
// If the rom is valid, add it
if (baseFile != null && baseFile.Filename != null)
{
// Add the list if it doesn't exist already
Rom rom = new Rom(baseFile);
Rom rom = new(baseFile);
datFile.Items.Add(rom.GetKey(ItemKey.CRC), rom);
logger.Verbose($"File added: {Path.GetFileNameWithoutExtension(item)}");
}
@@ -236,7 +236,7 @@ namespace SabreTools.DatTools
/// <param name="archive">BaseArchive to get blanks from</param>
private static void ProcessArchiveBlanks(DatFile datFile, string item, string basePath, BaseArchive archive)
{
List<string> empties = new List<string>();
List<string> empties = new();
// Get the parent path for all items
string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item);
@@ -248,7 +248,7 @@ namespace SabreTools.DatTools
// Add add all of the found empties to the DAT
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
{
Rom emptyRom = new Rom(Path.Combine(empty, "_"), item);
Rom emptyRom = new(Path.Combine(empty, "_"), item);
ProcessFileHelper(datFile, item, emptyRom, basePath, parent);
});
}
@@ -324,7 +324,7 @@ namespace SabreTools.DatTools
private static void ProcessFileHelper(DatFile datFile, string item, DatItem datItem, string basepath, string parent)
{
// If we didn't get an accepted parsed type somehow, cancel out
List<ItemType> parsed = new List<ItemType> { ItemType.Disk, ItemType.Media, ItemType.Rom };
List<ItemType> parsed = new() { ItemType.Disk, ItemType.File, ItemType.Media, ItemType.Rom };
if (!parsed.Contains(datItem.ItemType))
return;

View File

@@ -20,7 +20,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -58,7 +58,7 @@ namespace SabreTools.DatTools
bool statsOnly = false,
bool throwOnError = false)
{
ParentablePath path = new ParentablePath(filename.Trim('"'));
ParentablePath path = new(filename.Trim('"'));
ParseInto(datFile, path, indexId, keep, keepext, quotes, statsOnly, throwOnError);
}
@@ -102,7 +102,7 @@ namespace SabreTools.DatTools
datFile.Header.DatFormat = datFile.Header.DatFormat == 0 ? currentPathFormat : datFile.Header.DatFormat;
datFile.Items.SetBucketedBy(ItemKey.CRC); // Setting this because it can reduce issues later
InternalStopwatch watch = new InternalStopwatch($"Parsing '{currentPath}' into internal DAT");
InternalStopwatch watch = new($"Parsing '{currentPath}' into internal DAT");
// Now parse the correct type of DAT
try
@@ -215,7 +215,7 @@ namespace SabreTools.DatTools
return DatFormat.EverdriveSMDB;
// If we have an INI-based DAT
else if (first.Contains("[") && first.Contains("]"))
else if (first.Contains('[') && first.Contains(']'))
return DatFormat.RomCenter;
// If we have a listroms DAT

View File

@@ -26,7 +26,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -73,10 +73,10 @@ namespace SabreTools.DatTools
#region Rebuild from depots in order
string format = FromOutputFormat(outputFormat) ?? string.Empty;
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
InternalStopwatch watch = new($"Rebuilding all files to {format}");
// Now loop through and get only directories from the input paths
List<string> directories = new List<string>();
List<string> directories = new();
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
{
// Add to the list if the input is a directory
@@ -126,7 +126,7 @@ namespace SabreTools.DatTools
continue;
// If we have a path, we want to try to get the rom information
GZipArchive archive = new GZipArchive(foundpath);
GZipArchive archive = new(foundpath);
BaseFile fileinfo = archive.GetTorrentGZFileInfo();
// If the file information is null, then we continue
@@ -215,7 +215,7 @@ namespace SabreTools.DatTools
#region Rebuild from sources in order
string format = FromOutputFormat(outputFormat) ?? string.Empty;
InternalStopwatch watch = new InternalStopwatch($"Rebuilding all files to {format}");
InternalStopwatch watch = new($"Rebuilding all files to {format}");
// Now loop through all of the files in all of the inputs
foreach (string input in inputs)
@@ -287,8 +287,8 @@ namespace SabreTools.DatTools
List<BaseFile> entries = null;
// Get the TGZ and TXZ status for later
GZipArchive tgz = new GZipArchive(file);
XZArchive txz = new XZArchive(file);
GZipArchive tgz = new(file);
XZArchive txz = new(file);
bool isSingleTorrent = tgz.IsTorrent() || txz.IsTorrent();
// Get the base archive first
@@ -431,11 +431,11 @@ namespace SabreTools.DatTools
if (rule.Tests != null && rule.Tests.Length != 0)
{
// If the file could be transformed correctly
MemoryStream transformStream = new MemoryStream();
MemoryStream transformStream = new();
if (rule.TransformStream(fileStream, transformStream, keepReadOpen: true, keepWriteOpen: true))
{
// Get the file informations that we will be using
Rom headerless = new Rom(BaseFile.GetInfo(transformStream, keepReadOpen: true));
Rom headerless = new(BaseFile.GetInfo(transformStream, keepReadOpen: true));
// If we have duplicates and we're not filtering
if (ShouldRebuild(datFile, headerless, transformStream, false, out dupes))
@@ -508,7 +508,7 @@ namespace SabreTools.DatTools
string machinename = null;
// Get the item from the current file
Rom item = new Rom(BaseFile.GetInfo(stream, keepReadOpen: true));
Rom item = new(BaseFile.GetInfo(stream, keepReadOpen: true));
item.Machine.Name = Path.GetFileNameWithoutExtension(item.Name);
item.Machine.Description = Path.GetFileNameWithoutExtension(item.Name);
@@ -543,7 +543,7 @@ namespace SabreTools.DatTools
private static bool RebuildTorrentGzip(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
GZipArchive tgz = new GZipArchive(file);
GZipArchive tgz = new(file);
BaseFile tgzRom = tgz.GetTorrentGZFileInfo();
if (isZip == false && tgzRom != null && (outputFormat == OutputFormat.TorrentGzip || outputFormat == OutputFormat.TorrentGzipRomba))
{
@@ -587,7 +587,7 @@ namespace SabreTools.DatTools
private static bool RebuildTorrentXz(DatFile datFile, DatItem datItem, string file, string outDir, OutputFormat outputFormat, bool? isZip)
{
// If we have a very specific TGZ->TGZ case, just copy it accordingly
XZArchive txz = new XZArchive(file);
XZArchive txz = new(file);
BaseFile txzRom = txz.GetTorrentXZFileInfo();
if (isZip == false && txzRom != null && (outputFormat == OutputFormat.TorrentXZ || outputFormat == OutputFormat.TorrentXZRomba))
{

View File

@@ -26,7 +26,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -43,7 +43,7 @@ namespace SabreTools.DatTools
if (datFile.Items.TotalCount == 0)
return (null, null);
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by extension");
InternalStopwatch watch = new($"Splitting DAT by extension");
// Make sure all of the extensions don't have a dot at the beginning
var newExtA = extA.Select(s => s.TrimStart('.').ToLowerInvariant());
@@ -98,10 +98,10 @@ namespace SabreTools.DatTools
public static Dictionary<DatItemField, DatFile> SplitByHash(DatFile datFile)
{
// Create each of the respective output DATs
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by best available hashes");
InternalStopwatch watch = new($"Splitting DAT by best available hashes");
// Create the set of field-to-dat mappings
Dictionary<DatItemField, DatFile> fieldDats = new Dictionary<DatItemField, DatFile>();
Dictionary<DatItemField, DatFile> fieldDats = new();
// TODO: Can this be made into a loop?
fieldDats[DatItemField.Status] = DatFile.Create(datFile.Header.CloneStandard());
@@ -223,7 +223,7 @@ namespace SabreTools.DatTools
/// <returns>True if split succeeded, false otherwise</returns>
public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
{
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by level");
InternalStopwatch watch = new($"Splitting DAT by level");
// First, bucket by games so that we can do the right thing
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, lower: false, norename: true);
@@ -271,7 +271,7 @@ namespace SabreTools.DatTools
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
private static int SplitByLevelSort(string a, string b)
{
NaturalComparer nc = new NaturalComparer();
NaturalComparer nc = new();
int adeep = a.Count(c => c == '/' || c == '\\');
int bdeep = b.Count(c => c == '/' || c == '\\');
@@ -321,7 +321,7 @@ namespace SabreTools.DatTools
public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
{
// Create each of the respective output DATs
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by size");
InternalStopwatch watch = new($"Splitting DAT by size");
DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
lessThan.Header.FileName += $" (less than {radix})";
@@ -375,7 +375,7 @@ namespace SabreTools.DatTools
return new List<DatFile>();
// Create each of the respective output DATs
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by total size");
InternalStopwatch watch = new($"Splitting DAT by total size");
// Sort the DatFile by machine name
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None);
@@ -384,7 +384,7 @@ namespace SabreTools.DatTools
var keys = datFile.Items.SortedKeys;
// Get the output list
List<DatFile> datFiles = new List<DatFile>();
List<DatFile> datFiles = new();
// Initialize everything
long currentSize = 0;
@@ -459,13 +459,13 @@ namespace SabreTools.DatTools
public static Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
{
// Create each of the respective output DATs
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by item type");
InternalStopwatch watch = new($"Splitting DAT by item type");
// Create the set of type-to-dat mappings
Dictionary<ItemType, DatFile> typeDats = new Dictionary<ItemType, DatFile>();
Dictionary<ItemType, DatFile> typeDats = new();
// We only care about a subset of types
List<ItemType> outputTypes = new List<ItemType>
List<ItemType> outputTypes = new()
{
ItemType.Disk,
ItemType.Media,

View File

@@ -7,7 +7,6 @@ using System.Threading.Tasks;
using SabreTools.Core;
using SabreTools.DatFiles;
using SabreTools.DatItems;
using SabreTools.IO;
using SabreTools.Logging;
using SabreTools.Reports;
@@ -25,7 +24,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -38,7 +37,7 @@ namespace SabreTools.DatTools
public static List<DatStatistics> CalculateStatistics(List<string> inputs, bool single, bool throwOnError = false)
{
// Create the output list
List<DatStatistics> stats = new List<DatStatistics>();
List<DatStatistics> stats = new();
// Make sure we have all files and then order them
List<ParentablePath> files = PathTool.GetFilesOnly(inputs);
@@ -48,7 +47,7 @@ namespace SabreTools.DatTools
.ToList();
// Init total
DatStatistics totalStats = new DatStatistics
DatStatistics totalStats = new()
{
Statistics = new ItemDictionary(),
DisplayName = "DIR: All DATs",
@@ -58,7 +57,7 @@ namespace SabreTools.DatTools
// Init directory-level variables
string lastdir = null;
DatStatistics dirStats = new DatStatistics
DatStatistics dirStats = new()
{
Statistics = new ItemDictionary(),
MachineCount = 0,
@@ -85,15 +84,15 @@ namespace SabreTools.DatTools
};
}
InternalStopwatch watch = new InternalStopwatch($"Collecting statistics for '{file.CurrentPath}'");
InternalStopwatch watch = new($"Collecting statistics for '{file.CurrentPath}'");
List<string> machines = new List<string>();
List<string> machines = new();
DatFile datdata = Parser.CreateAndParse(file.CurrentPath, statsOnly: true, throwOnError: throwOnError);
// Add single DAT stats (if asked)
if (single)
{
DatStatistics individualStats = new DatStatistics
DatStatistics individualStats = new()
{
Statistics = datdata.Items,
DisplayName = datdata.Header.FileName,
@@ -105,11 +104,11 @@ namespace SabreTools.DatTools
// Add single DAT stats to dir
dirStats.Statistics.AddStatistics(datdata.Items);
dirStats.Statistics.GameCount += datdata.Items.Keys.Count();
dirStats.Statistics.GameCount += datdata.Items.Keys.Count;
// Add single DAT stats to totals
totalStats.Statistics.AddStatistics(datdata.Items);
totalStats.Statistics.GameCount += datdata.Items.Keys.Count();
totalStats.Statistics.GameCount += datdata.Items.Keys.Count;
// Make sure to assign the new directory
lastdir = thisdir;
@@ -165,7 +164,7 @@ namespace SabreTools.DatTools
// Get the proper output directory name
outDir = outDir.Ensure();
InternalStopwatch watch = new InternalStopwatch($"Writing out report data to '{outDir}'");
InternalStopwatch watch = new($"Writing out report data to '{outDir}'");
// Get the dictionary of desired output report names
Dictionary<StatReportFormat, string> outfiles = CreateOutStatsNames(outDir, statDatFormat, reportName);
@@ -209,7 +208,7 @@ namespace SabreTools.DatTools
/// <returns>Dictionary of output formats mapped to file names</returns>
private static Dictionary<StatReportFormat, string> CreateOutStatsNames(string outDir, StatReportFormat statDatFormat, string reportName, bool overwrite = true)
{
Dictionary<StatReportFormat, string> output = new Dictionary<StatReportFormat, string>();
Dictionary<StatReportFormat, string> output = new();
// First try to create the output directory if we need to
if (!Directory.Exists(outDir))

View File

@@ -23,7 +23,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -37,10 +37,10 @@ namespace SabreTools.DatTools
{
bool success = true;
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied depots");
InternalStopwatch watch = new("Verifying all from supplied depots");
// Now loop through and get only directories from the input paths
List<string> directories = new List<string>();
List<string> directories = new();
foreach (string input in inputs)
{
// Add to the list if the input is a directory
@@ -87,7 +87,7 @@ namespace SabreTools.DatTools
continue;
// If we have a path, we want to try to get the rom information
GZipArchive tgz = new GZipArchive(foundpath);
GZipArchive tgz = new(foundpath);
BaseFile fileinfo = tgz.GetTorrentGZFileInfo();
// If the file information is null, then we continue
@@ -120,7 +120,7 @@ namespace SabreTools.DatTools
{
bool success = true;
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied paths");
InternalStopwatch watch = new("Verifying all from supplied paths");
// Force bucketing according to the flags
datFile.Items.SetBucketedBy(ItemKey.NULL);

View File

@@ -22,7 +22,7 @@ namespace SabreTools.DatTools
/// <summary>
/// Logging object
/// </summary>
private static readonly Logger logger = new Logger();
private static readonly Logger logger = new();
#endregion
@@ -54,7 +54,7 @@ namespace SabreTools.DatTools
// Ensure the output directory is set and created
outDir = outDir.Ensure(create: true);
InternalStopwatch watch = new InternalStopwatch($"Writing out internal dat to '{outDir}'");
InternalStopwatch watch = new($"Writing out internal dat to '{outDir}'");
// If the DAT has no output format, default to XML
if (datFile.Header.DatFormat == 0)
@@ -122,7 +122,7 @@ namespace SabreTools.DatTools
{
Statistics = datFile.Items,
DisplayName = datFile.Header.FileName,
MachineCount = datFile.Items.Keys.Count(),
MachineCount = datFile.Items.Keys.Count,
IsDirectory = false,
},
};