using System.Collections; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Threading.Tasks; using SabreTools.Library.Data; using SabreTools.Library.DatItems; using SabreTools.Library.IO; using SabreTools.Library.Reports; using NaturalSort; namespace SabreTools.Library.DatFiles { /// /// Item dictionary with statistics, bucketing, and sorting /// public class ItemDictionary : IDictionary> { #region Private instance variables /// /// Determine the bucketing key for all items /// private Field bucketedBy; /// /// Determine merging type for all items /// private DedupeType mergedBy; /// /// Internal dictionary for the class /// private Dictionary> items; #endregion #region Publically available fields #region Keys /// /// Get the keys from the file dictionary /// /// List of the keys public ICollection Keys { get { return items.Keys; } } /// /// Get the keys in sorted order from the file dictionary /// /// List of the keys in sorted order public List SortedKeys { get { var keys = items.Keys.ToList(); keys.Sort(new NaturalComparer()); return keys; } } #endregion #region Statistics /// /// Overall item count /// public long TotalCount { get; private set; } = 0; /// /// Number of Archive items /// public long ArchiveCount { get; private set; } = 0; /// /// Number of BiosSet items /// public long BiosSetCount { get; private set; } = 0; /// /// Number of Disk items /// public long DiskCount { get; private set; } = 0; /// /// Number of Release items /// public long ReleaseCount { get; private set; } = 0; /// /// Number of Rom items /// public long RomCount { get; private set; } = 0; /// /// Number of Sample items /// public long SampleCount { get; private set; } = 0; /// /// Number of machines /// /// Special count only used by statistics output public long GameCount { get; private set; } = 0; /// /// Total uncompressed size /// public long TotalSize { get; private set; } = 0; /// /// Number of items with a CRC hash /// public long CRCCount { get; private set; } = 0; /// /// Number of items with an MD5 hash /// public long MD5Count { get; private set; } = 0; #if NET_FRAMEWORK /// /// Number of items with a RIPEMD160 hash /// public long RIPEMD160Count { get; private set; } = 0; #endif /// /// Number of items with a SHA-1 hash /// public long SHA1Count { get; private set; } = 0; /// /// Number of items with a SHA-256 hash /// public long SHA256Count { get; private set; } = 0; /// /// Number of items with a SHA-384 hash /// public long SHA384Count { get; private set; } = 0; /// /// Number of items with a SHA-512 hash /// public long SHA512Count { get; private set; } = 0; /// /// Number of items with the baddump status /// public long BaddumpCount { get; private set; } = 0; /// /// Number of items with the good status /// public long GoodCount { get; private set; } = 0; /// /// Number of items with the nodump status /// public long NodumpCount { get; private set; } = 0; /// /// Number of items with the verified status /// public long VerifiedCount { get; private set; } = 0; #endregion #endregion #region Instance methods #region Accessors /// /// Passthrough to access the file dictionary /// /// Key in the dictionary to reference public List this[string key] { get { // Explicit lock for some weird corner cases lock (key) { // Ensure the key exists EnsureKey(key); // Now return the value return items[key]; } } set { AddRange(key, value); } } /// /// Add a value to the file dictionary /// /// Key in the dictionary to add to /// Value to add to the dictionary public void Add(string key, DatItem value) { // Explicit lock for some weird corner cases lock (key) { // Ensure the key exists EnsureKey(key); // If item is null, don't add it if (value == null) return; // Now add the value items[key].Add(value); // Now update the statistics AddItemStatistics(value); } } /// /// Add a range of values to the file dictionary /// /// Key in the dictionary to add to /// Value to add to the dictionary public void Add(string key, List value) { AddRange(key, value); } /// /// Add a range of values to the file dictionary /// /// Key in the dictionary to add to /// Value to add to the dictionary public void AddRange(string key, List value) { // Explicit lock for some weird corner cases lock (key) { // Ensure the key exists EnsureKey(key); // Now add the value items[key].AddRange(value); // Now update the statistics foreach (DatItem item in value) { AddItemStatistics(item); } } } /// /// Get if the file dictionary contains the key /// /// Key in the dictionary to check /// True if the key exists, false otherwise public bool ContainsKey(string key) { // If the key is null, we return false since keys can't be null if (key == null) return false; // Explicit lock for some weird corner cases lock (key) { return items.ContainsKey(key); } } /// /// Get if the file dictionary contains the key and value /// /// Key in the dictionary to check /// Value in the dictionary to check /// True if the key exists, false otherwise public bool Contains(string key, DatItem value) { // If the key is null, we return false since keys can't be null if (key == null) return false; // Explicit lock for some weird corner cases lock (key) { if (items.ContainsKey(key)) return items[key].Contains(value); } return false; } /// /// Remove a key from the file dictionary if it exists /// /// Key in the dictionary to remove public bool Remove(string key) { // If the key doesn't exist, return if (!ContainsKey(key)) return false; // Remove the statistics first foreach (DatItem item in items[key]) { RemoveItemStatistics(item); } // Remove the key from the dictionary return items.Remove(key); } /// /// Remove the first instance of a value from the file dictionary if it exists /// /// Key in the dictionary to remove from /// Value to remove from the dictionary public bool Remove(string key, DatItem value) { // If the key and value doesn't exist, return if (!Contains(key, value)) return false; // Remove the statistics first RemoveItemStatistics(value); return items[key].Remove(value); } /// /// Override the internal Field value /// /// public void SetBucketedBy(Field newBucket) { bucketedBy = newBucket; } /// /// Add to the statistics given a DatItem /// /// Item to add info from private void AddItemStatistics(DatItem item) { // No matter what the item is, we increate the count TotalCount += 1; // Now we do different things for each item type switch (item.ItemType) { case ItemType.Archive: ArchiveCount += 1; break; case ItemType.BiosSet: BiosSetCount += 1; break; case ItemType.Disk: DiskCount += 1; if (((Disk)item).ItemStatus != ItemStatus.Nodump) { MD5Count += (string.IsNullOrWhiteSpace(((Disk)item).MD5) ? 0 : 1); #if NET_FRAMEWORK RIPEMD160Count += (string.IsNullOrWhiteSpace(((Disk)item).RIPEMD160) ? 0 : 1); #endif SHA1Count += (string.IsNullOrWhiteSpace(((Disk)item).SHA1) ? 0 : 1); SHA256Count += (string.IsNullOrWhiteSpace(((Disk)item).SHA256) ? 0 : 1); SHA384Count += (string.IsNullOrWhiteSpace(((Disk)item).SHA384) ? 0 : 1); SHA512Count += (string.IsNullOrWhiteSpace(((Disk)item).SHA512) ? 0 : 1); } BaddumpCount += (((Disk)item).ItemStatus == ItemStatus.BadDump ? 1 : 0); GoodCount += (((Disk)item).ItemStatus == ItemStatus.Good ? 1 : 0); NodumpCount += (((Disk)item).ItemStatus == ItemStatus.Nodump ? 1 : 0); VerifiedCount += (((Disk)item).ItemStatus == ItemStatus.Verified ? 1 : 0); break; case ItemType.Release: ReleaseCount += 1; break; case ItemType.Rom: RomCount += 1; if (((Rom)item).ItemStatus != ItemStatus.Nodump) { TotalSize += ((Rom)item).Size; CRCCount += (string.IsNullOrWhiteSpace(((Rom)item).CRC) ? 0 : 1); MD5Count += (string.IsNullOrWhiteSpace(((Rom)item).MD5) ? 0 : 1); #if NET_FRAMEWORK RIPEMD160Count += (string.IsNullOrWhiteSpace(((Rom)item).RIPEMD160) ? 0 : 1); #endif SHA1Count += (string.IsNullOrWhiteSpace(((Rom)item).SHA1) ? 0 : 1); SHA256Count += (string.IsNullOrWhiteSpace(((Rom)item).SHA256) ? 0 : 1); SHA384Count += (string.IsNullOrWhiteSpace(((Rom)item).SHA384) ? 0 : 1); SHA512Count += (string.IsNullOrWhiteSpace(((Rom)item).SHA512) ? 0 : 1); } BaddumpCount += (((Rom)item).ItemStatus == ItemStatus.BadDump ? 1 : 0); GoodCount += (((Rom)item).ItemStatus == ItemStatus.Good ? 1 : 0); NodumpCount += (((Rom)item).ItemStatus == ItemStatus.Nodump ? 1 : 0); VerifiedCount += (((Rom)item).ItemStatus == ItemStatus.Verified ? 1 : 0); break; case ItemType.Sample: SampleCount += 1; break; } } /// /// Add statistics from another DatStats object /// /// DatStats object to add from private void AddStatistics(ItemDictionary stats) { TotalCount += stats.Count; ArchiveCount += stats.ArchiveCount; BiosSetCount += stats.BiosSetCount; DiskCount += stats.DiskCount; ReleaseCount += stats.ReleaseCount; RomCount += stats.RomCount; SampleCount += stats.SampleCount; GameCount += stats.GameCount; TotalSize += stats.TotalSize; // Individual hash counts CRCCount += stats.CRCCount; MD5Count += stats.MD5Count; #if NET_FRAMEWORK RIPEMD160Count += stats.RIPEMD160Count; #endif SHA1Count += stats.SHA1Count; SHA256Count += stats.SHA256Count; SHA384Count += stats.SHA384Count; SHA512Count += stats.SHA512Count; // Individual status counts BaddumpCount += stats.BaddumpCount; GoodCount += stats.GoodCount; NodumpCount += stats.NodumpCount; VerifiedCount += stats.VerifiedCount; } /// /// Ensure the key exists in the items dictionary /// /// Key to ensure private void EnsureKey(string key) { // If the key is missing from the dictionary, add it if (!items.ContainsKey(key)) items.Add(key, new List()); } /// /// Remove from the statistics given a DatItem /// /// Item to remove info for public void RemoveItemStatistics(DatItem item) { // If we have a null item, we can't do anything if (item == null) return; // No matter what the item is, we increate the count TotalCount -= 1; // Now we do different things for each item type switch (item.ItemType) { case ItemType.Archive: ArchiveCount -= 1; break; case ItemType.BiosSet: BiosSetCount -= 1; break; case ItemType.Disk: DiskCount -= 1; if (((Disk)item).ItemStatus != ItemStatus.Nodump) { MD5Count -= (string.IsNullOrWhiteSpace(((Disk)item).MD5) ? 0 : 1); #if NET_FRAMEWORK RIPEMD160Count -= (string.IsNullOrWhiteSpace(((Disk)item).RIPEMD160) ? 0 : 1); #endif SHA1Count -= (string.IsNullOrWhiteSpace(((Disk)item).SHA1) ? 0 : 1); SHA256Count -= (string.IsNullOrWhiteSpace(((Disk)item).SHA256) ? 0 : 1); SHA384Count -= (string.IsNullOrWhiteSpace(((Disk)item).SHA384) ? 0 : 1); SHA512Count -= (string.IsNullOrWhiteSpace(((Disk)item).SHA512) ? 0 : 1); } BaddumpCount -= (((Disk)item).ItemStatus == ItemStatus.BadDump ? 1 : 0); GoodCount -= (((Disk)item).ItemStatus == ItemStatus.Good ? 1 : 0); NodumpCount -= (((Disk)item).ItemStatus == ItemStatus.Nodump ? 1 : 0); VerifiedCount -= (((Disk)item).ItemStatus == ItemStatus.Verified ? 1 : 0); break; case ItemType.Release: ReleaseCount -= 1; break; case ItemType.Rom: RomCount -= 1; if (((Rom)item).ItemStatus != ItemStatus.Nodump) { TotalSize -= ((Rom)item).Size; CRCCount -= (string.IsNullOrWhiteSpace(((Rom)item).CRC) ? 0 : 1); MD5Count -= (string.IsNullOrWhiteSpace(((Rom)item).MD5) ? 0 : 1); #if NET_FRAMEWORK RIPEMD160Count -= (string.IsNullOrWhiteSpace(((Rom)item).RIPEMD160) ? 0 : 1); #endif SHA1Count -= (string.IsNullOrWhiteSpace(((Rom)item).SHA1) ? 0 : 1); SHA256Count -= (string.IsNullOrWhiteSpace(((Rom)item).SHA256) ? 0 : 1); SHA384Count -= (string.IsNullOrWhiteSpace(((Rom)item).SHA384) ? 0 : 1); SHA512Count -= (string.IsNullOrWhiteSpace(((Rom)item).SHA512) ? 0 : 1); } BaddumpCount -= (((Rom)item).ItemStatus == ItemStatus.BadDump ? 1 : 0); GoodCount -= (((Rom)item).ItemStatus == ItemStatus.Good ? 1 : 0); NodumpCount -= (((Rom)item).ItemStatus == ItemStatus.Nodump ? 1 : 0); VerifiedCount -= (((Rom)item).ItemStatus == ItemStatus.Verified ? 1 : 0); break; case ItemType.Sample: SampleCount -= 1; break; } } #endregion #region Constructors /// /// Generic constructor /// public ItemDictionary() { bucketedBy = Field.NULL; mergedBy = DedupeType.None; items = new Dictionary>(); } #endregion #region Custom Functionality /// /// Take the arbitrarily bucketed Files Dictionary and convert to one bucketed by a user-defined method /// /// Field enum representing how to bucket the individual items /// Dedupe type that should be used /// True if the key should be lowercased (default), false otherwise /// True if games should only be compared on game and file name, false if system and source are counted public void BucketBy(Field bucketBy, DedupeType dedupeType, bool lower = true, bool norename = true) { // If we have a situation where there's no dictionary or no keys at all, we skip if (items == null || items.Count == 0) return; // If the sorted type isn't the same, we want to sort the dictionary accordingly if (bucketedBy != bucketBy) { Globals.Logger.User($"Organizing roms by {bucketBy}"); // Set the sorted type bucketedBy = bucketBy; // Reset the merged type since this might change the merge mergedBy = DedupeType.None; // First do the initial sort of all of the roms inplace List oldkeys = Keys.ToList(); for (int k = 0; k < oldkeys.Count; k++) { string key = oldkeys[k]; // Now add each of the roms to their respective keys for (int i = 0; i < this[key].Count; i++) { DatItem item = this[key][i]; if (item == null) continue; // We want to get the key most appropriate for the given sorting type string newkey = item.GetKey(bucketBy, lower, norename); // If the key is different, move the item to the new key if (newkey != key) { Add(newkey, item); Remove(key, item); i--; // This make sure that the pointer stays on the correct since one was removed } } // If the key is now empty, remove it if (this[key].Count == 0) Remove(key); } } // If the merge type isn't the same, we want to merge the dictionary accordingly if (mergedBy != dedupeType) { Globals.Logger.User($"Deduping roms by {dedupeType}"); // Set the sorted type mergedBy = dedupeType; Parallel.ForEach(Keys, Globals.ParallelOptions, key => { // Get the possibly unsorted list List sortedlist = this[key]; // Sort the list of items to be consistent DatItem.Sort(ref sortedlist, false); // If we're merging the roms, do so if (dedupeType == DedupeType.Full || (dedupeType == DedupeType.Game && bucketBy == Field.MachineName)) sortedlist = DatItem.Merge(sortedlist); // Add the list back to the dictionary Remove(key); AddRange(key, sortedlist); }); } // If the merge type is the same, we want to sort the dictionary to be consistent else { Parallel.ForEach(Keys, Globals.ParallelOptions, key => { // Get the possibly unsorted list List sortedlist = this[key]; // Sort the list of items to be consistent DatItem.Sort(ref sortedlist, false); }); } // Now clean up all empty keys ClearEmpty(); } /// /// Remove all items marked for removal /// public void ClearMarked() { var keys = items.Keys.ToList(); foreach (string key in keys) { List oldItemList = items[key]; List newItemList = oldItemList.Where(i => !i.Remove).ToList(); Remove(key); AddRange(key, newItemList); } } /// /// List all duplicates found in a DAT based on a DatItem /// /// Item to try to match /// True to mark matched roms for removal from the input, false otherwise (default) /// True if the DAT is already sorted accordingly, false otherwise (default) /// List of matched DatItem objects public List GetDuplicates(DatItem datItem, bool remove = false, bool sorted = false) { List output = new List(); // Check for an empty rom list first if (TotalCount == 0) return output; // We want to get the proper key for the DatItem string key = SortAndGetKey(datItem, sorted); // If the key doesn't exist, return the empty list if (!ContainsKey(key)) return output; // Try to find duplicates List roms = this[key]; List left = new List(); for (int i = 0; i < roms.Count; i++) { DatItem other = roms[i]; if (datItem.Equals(other)) { other.Remove = true; output.Add(other); } else { left.Add(other); } } // If we're in removal mode, add back all roms with the proper flags if (remove) { Remove(key); AddRange(key, output); AddRange(key, left); } return output; } /// /// Check if a DAT contains the given DatItem /// /// Item to try to match /// True if the DAT is already sorted accordingly, false otherwise (default) /// True if it contains the rom, false otherwise public bool HasDuplicates(DatItem datItem, bool sorted = false) { // Check for an empty rom list first if (TotalCount == 0) return false; // We want to get the proper key for the DatItem string key = SortAndGetKey(datItem, sorted); // If the key doesn't exist, return the empty list if (!ContainsKey(key)) return false; // Try to find duplicates List roms = this[key]; return roms.Any(r => datItem.Equals(r)); } /// /// Recalculate the statistics for the Dat /// public void RecalculateStats() { // Wipe out any stats already there ResetStatistics(); // If we have a blank Dat in any way, return if (this == null || TotalCount == 0) return; // Loop through and add Parallel.ForEach(items.Keys, Globals.ParallelOptions, key => { List datItems = items[key]; foreach (DatItem item in datItems) { AddItemStatistics(item); } }); } /// /// Remove any keys that have null or empty values /// private void ClearEmpty() { var keys = items.Keys.ToList(); foreach (string key in keys) { if (items[key] == null || items[key].Count == 0) items.Remove(key); } } /// /// Get the highest-order Field value that represents the statistics /// private Field GetBestAvailable() { // If all items are supposed to have a SHA-512, we bucket by that if (RomCount + DiskCount - NodumpCount == SHA512Count) return Field.SHA512; // If all items are supposed to have a SHA-384, we bucket by that else if (RomCount + DiskCount - NodumpCount == SHA384Count) return Field.SHA384; // If all items are supposed to have a SHA-256, we bucket by that else if (RomCount + DiskCount - NodumpCount == SHA256Count) return Field.SHA256; // If all items are supposed to have a SHA-1, we bucket by that else if (RomCount + DiskCount - NodumpCount == SHA1Count) return Field.SHA1; #if NET_FRAMEWORK // If all items are supposed to have a RIPEMD160, we bucket by that else if (RomCount + DiskCount - NodumpCount == RIPEMD160Count) return Field.RIPEMD160; #endif // If all items are supposed to have a MD5, we bucket by that else if (RomCount + DiskCount - NodumpCount == MD5Count) return Field.MD5; // Otherwise, we bucket by CRC else return Field.CRC; } /// /// Reset all statistics /// private void ResetStatistics() { TotalCount = 0; ArchiveCount = 0; BiosSetCount = 0; DiskCount = 0; ReleaseCount = 0; RomCount = 0; SampleCount = 0; GameCount = 0; TotalSize = 0; CRCCount = 0; MD5Count = 0; #if NET_FRAMEWORK RIPEMD160Count = 0; #endif SHA1Count = 0; SHA256Count = 0; SHA384Count = 0; SHA512Count = 0; BaddumpCount = 0; GoodCount = 0; NodumpCount = 0; VerifiedCount = 0; } /// /// Sort the input DAT and get the key to be used by the item /// /// Item to try to match /// True if the DAT is already sorted accordingly, false otherwise (default) /// Key to try to use private string SortAndGetKey(DatItem datItem, bool sorted = false) { // If we're not already sorted, take care of it if (!sorted) BucketBy(GetBestAvailable(), DedupeType.None); // Now that we have the sorted type, we get the proper key return datItem.GetKey(bucketedBy); } #endregion #region IDictionary Implementations public ICollection> Values => ((IDictionary>)items).Values; public int Count => ((ICollection>>)items).Count; public bool IsReadOnly => ((ICollection>>)items).IsReadOnly; public bool TryGetValue(string key, out List value) { return ((IDictionary>)items).TryGetValue(key, out value); } public void Add(KeyValuePair> item) { ((ICollection>>)items).Add(item); } public void Clear() { ((ICollection>>)items).Clear(); } public bool Contains(KeyValuePair> item) { return ((ICollection>>)items).Contains(item); } public void CopyTo(KeyValuePair>[] array, int arrayIndex) { ((ICollection>>)items).CopyTo(array, arrayIndex); } public bool Remove(KeyValuePair> item) { return ((ICollection>>)items).Remove(item); } public IEnumerator>> GetEnumerator() { return ((IEnumerable>>)items).GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable)items).GetEnumerator(); } #endregion #endregion // Instance methods #region Static methods #region Writing /// /// Output the stats for a list of input dats as files in a human-readable format /// /// List of input files and folders /// Name of the output file /// True if single DAT stats are output, false otherwise /// True if baddumps should be included in output, false otherwise /// True if nodumps should be included in output, false otherwise /// Set the statistics output format to use public static void OutputStats( List inputs, string reportName, string outDir, bool single, bool baddumpCol, bool nodumpCol, StatReportFormat statDatFormat) { // If there's no output format, set the default if (statDatFormat == StatReportFormat.None) statDatFormat = StatReportFormat.Textfile; // Get the proper output file name if (string.IsNullOrWhiteSpace(reportName)) reportName = "report"; // Get the proper output directory name outDir = DirectoryExtensions.Ensure(outDir); // Get the dictionary of desired output report names Dictionary outputs = CreateOutStatsNames(outDir, statDatFormat, reportName); // Make sure we have all files and then order them List files = DirectoryExtensions.GetFilesOnly(inputs); files = files .OrderBy(i => Path.GetDirectoryName(i.CurrentPath)) .ThenBy(i => Path.GetFileName(i.CurrentPath)) .ToList(); // Get all of the writers that we need List reports = outputs.Select(kvp => BaseReport.Create(kvp.Key, kvp.Value, baddumpCol, nodumpCol)).ToList(); // Write the header, if any reports.ForEach(report => report.WriteHeader()); // Init all total variables ItemDictionary totalStats = new ItemDictionary(); // Init directory-level variables string lastdir = null; string basepath = null; ItemDictionary dirStats = new ItemDictionary(); // Now process each of the input files foreach (ParentablePath file in files) { // Get the directory for the current file string thisdir = Path.GetDirectoryName(file.CurrentPath); basepath = Path.GetDirectoryName(Path.GetDirectoryName(file.CurrentPath)); // If we don't have the first file and the directory has changed, show the previous directory stats and reset if (lastdir != null && thisdir != lastdir) { // Output separator if needed reports.ForEach(report => report.WriteMidSeparator()); DatFile lastdirdat = DatFile.Create(); reports.ForEach(report => report.ReplaceStatistics($"DIR: {WebUtility.HtmlEncode(lastdir)}", dirStats.GameCount, dirStats)); reports.ForEach(report => report.Write()); // Write the mid-footer, if any reports.ForEach(report => report.WriteFooterSeparator()); // Write the header, if any reports.ForEach(report => report.WriteMidHeader()); // Reset the directory stats dirStats.ResetStatistics(); } Globals.Logger.Verbose($"Beginning stat collection for '{file}'", false); List games = new List(); DatFile datdata = DatFile.CreateAndParse(file.CurrentPath); datdata.Items.BucketBy(Field.MachineName, DedupeType.None, norename: true); // Output single DAT stats (if asked) Globals.Logger.User($"Adding stats for file '{file}'\n", false); if (single) { reports.ForEach(report => report.ReplaceStatistics(datdata.Header.FileName, datdata.Items.Keys.Count, datdata.Items)); reports.ForEach(report => report.Write()); } // Add single DAT stats to dir dirStats.AddStatistics(datdata.Items); dirStats.GameCount += datdata.Items.Keys.Count(); // Add single DAT stats to totals totalStats.AddStatistics(datdata.Items); totalStats.GameCount += datdata.Items.Keys.Count(); // Make sure to assign the new directory lastdir = thisdir; } // Output the directory stats one last time reports.ForEach(report => report.WriteMidSeparator()); if (single) { reports.ForEach(report => report.ReplaceStatistics($"DIR: {WebUtility.HtmlEncode(lastdir)}", dirStats.GameCount, dirStats)); reports.ForEach(report => report.Write()); } // Write the mid-footer, if any reports.ForEach(report => report.WriteFooterSeparator()); // Write the header, if any reports.ForEach(report => report.WriteMidHeader()); // Reset the directory stats dirStats.ResetStatistics(); // Output total DAT stats reports.ForEach(report => report.ReplaceStatistics("DIR: All DATs", totalStats.GameCount, totalStats)); reports.ForEach(report => report.Write()); // Output footer if needed reports.ForEach(report => report.WriteFooter()); Globals.Logger.User(@" Please check the log folder if the stats scrolled offscreen", false); } /// /// Get the proper extension for the stat output format /// /// Output path to use /// StatDatFormat to get the extension for /// Name of the input file to use /// Dictionary of output formats mapped to file names private static Dictionary CreateOutStatsNames(string outDir, StatReportFormat statDatFormat, string reportName, bool overwrite = true) { Dictionary output = new Dictionary(); // First try to create the output directory if we need to if (!Directory.Exists(outDir)) Directory.CreateDirectory(outDir); // Double check the outDir for the end delim if (!outDir.EndsWith(Path.DirectorySeparatorChar.ToString())) outDir += Path.DirectorySeparatorChar; // For each output format, get the appropriate stream writer output.Add(StatReportFormat.None, CreateOutStatsNamesHelper(outDir, ".null", reportName, overwrite)); if (statDatFormat.HasFlag(StatReportFormat.Textfile)) output.Add(StatReportFormat.Textfile, CreateOutStatsNamesHelper(outDir, ".txt", reportName, overwrite)); if (statDatFormat.HasFlag(StatReportFormat.CSV)) output.Add(StatReportFormat.CSV, CreateOutStatsNamesHelper(outDir, ".csv", reportName, overwrite)); if (statDatFormat.HasFlag(StatReportFormat.HTML)) output.Add(StatReportFormat.HTML, CreateOutStatsNamesHelper(outDir, ".html", reportName, overwrite)); if (statDatFormat.HasFlag(StatReportFormat.SSV)) output.Add(StatReportFormat.SSV, CreateOutStatsNamesHelper(outDir, ".ssv", reportName, overwrite)); if (statDatFormat.HasFlag(StatReportFormat.TSV)) output.Add(StatReportFormat.TSV, CreateOutStatsNamesHelper(outDir, ".tsv", reportName, overwrite)); return output; } /// /// Help generating the outstats name /// /// Output directory /// Extension to use for the file /// Name of the input file to use /// True if we ignore existing files, false otherwise /// String containing the new filename private static string CreateOutStatsNamesHelper(string outDir, string extension, string reportName, bool overwrite) { string outfile = outDir + reportName + extension; outfile = outfile.Replace($"{Path.DirectorySeparatorChar}{Path.DirectorySeparatorChar}", Path.DirectorySeparatorChar.ToString()); if (!overwrite) { int i = 1; while (File.Exists(outfile)) { outfile = $"{outDir}{reportName}_{i}{extension}"; outfile = outfile.Replace($"{Path.DirectorySeparatorChar}{Path.DirectorySeparatorChar}", Path.DirectorySeparatorChar.ToString()); i++; } } return outfile; } #endregion #endregion // Static methods } }