Add many more stopwatches

This commit is contained in:
Matt Nadareski
2021-02-02 14:09:49 -08:00
parent b93088c36e
commit c931c84838
10 changed files with 90 additions and 32 deletions

View File

@@ -75,7 +75,7 @@ namespace SabreTools.DatTools
List<DatItemField> datItemFields,
bool onlySame)
{
logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
InternalStopwatch watch = new InternalStopwatch($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
// If we are matching based on DatItem fields of any sort
if (datItemFields.Any())
@@ -133,6 +133,8 @@ namespace SabreTools.DatTools
intDat.Items.AddRange(key, newDatItems);
});
}
watch.Stop();
}
/// <summary>
@@ -149,7 +151,7 @@ namespace SabreTools.DatTools
else
datFile.Items.BucketBy(ItemKey.CRC, DedupeType.None);
logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
InternalStopwatch watch = new InternalStopwatch($"Comparing '{intDat.Header.FileName}' to base DAT");
// For comparison's sake, we want to a the base bucketing
if (useGames)
@@ -205,6 +207,8 @@ namespace SabreTools.DatTools
intDat.Items.AddRange(key, keepDatItems);
}
});
watch.Stop();
}
/// <summary>
@@ -448,32 +452,6 @@ namespace SabreTools.DatTools
return outerDiffData;
}
/// <summary>
/// Fill a DatFile with all items with a particular source index ID
/// </summary>
/// <param name="datFile">Current DatFile object to use for updating</param>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="index">Source index ID to retrieve items for</param>
/// <returns>DatFile containing all items with the source index ID/returns>
public static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
return;
foreach (DatItem item in items)
{
if (item.Source.Index == index)
indexDat.Items.Add(key, item);
}
});
}
/// <summary>
/// Populate from multiple paths while returning the invividual headers
/// </summary>
@@ -543,5 +521,31 @@ namespace SabreTools.DatTools
if (delete)
addFrom.Items = null;
}
/// <summary>
/// Fill a DatFile with all items with a particular source index ID
/// </summary>
/// <param name="datFile">Current DatFile object to use for updating</param>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="index">Source index ID to retrieve items for</param>
/// <returns>DatFile containing all items with the source index ID/returns>
private static void FillWithSourceIndex(DatFile datFile, DatFile indexDat, int index)
{
// Loop through and add the items for this index to the output
Parallel.ForEach(datFile.Items.Keys, Globals.ParallelOptions, key =>
{
List<DatItem> items = DatItem.Merge(datFile.Items[key]);
// If the rom list is empty or null, just skip it
if (items == null || items.Count == 0)
return;
foreach (DatItem item in items)
{
if (item.Source.Index == index)
indexDat.Items.Add(key, item);
}
});
}
}
}

View File

@@ -51,6 +51,8 @@ namespace SabreTools.DatTools
long totalSize = 0;
long currentSize = 0;
InternalStopwatch watch = new InternalStopwatch($"Populating DAT from {basePath}");
// Process the input
if (Directory.Exists(basePath))
{
@@ -90,6 +92,7 @@ namespace SabreTools.DatTools
logger.User(totalSize, totalSize, basePath);
}
watch.Stop();
return true;
}

View File

@@ -102,6 +102,8 @@ namespace SabreTools.DatTools
datFile.Header.DatFormat = datFile.Header.DatFormat == 0 ? currentPathFormat : datFile.Header.DatFormat;
datFile.Items.SetBucketedBy(ItemKey.CRC); // Setting this because it can reduce issues later
InternalStopwatch watch = new InternalStopwatch($"Parsing '{currentPath}' into internal DAT");
// Now parse the correct type of DAT
try
{
@@ -112,6 +114,8 @@ namespace SabreTools.DatTools
{
logger.Error(ex, $"Error with file '{currentPath}'");
}
watch.Stop();
}
/// <summary>

View File

@@ -43,6 +43,8 @@ namespace SabreTools.DatTools
if (datFile.Items.TotalCount == 0)
return (null, null);
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by extension");
// Make sure all of the extensions don't have a dot at the beginning
var newExtA = extA.Select(s => s.TrimStart('.').ToLowerInvariant());
string newExtAString = string.Join(",", newExtA);
@@ -84,6 +86,7 @@ namespace SabreTools.DatTools
});
// Then return both DatFiles
watch.Stop();
return (extADat, extBDat);
}
@@ -95,7 +98,7 @@ namespace SabreTools.DatTools
public static Dictionary<DatItemField, DatFile> SplitByHash(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by best available hashes");
// Create the set of field-to-dat mappings
Dictionary<DatItemField, DatFile> fieldDats = new Dictionary<DatItemField, DatFile>();
@@ -206,6 +209,7 @@ namespace SabreTools.DatTools
}
});
watch.Stop();
return fieldDats;
}
@@ -219,6 +223,8 @@ namespace SabreTools.DatTools
/// <returns>True if split succeeded, false otherwise</returns>
public static bool SplitByLevel(DatFile datFile, string outDir, bool shortname, bool basedat)
{
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by level");
// First, bucket by games so that we can do the right thing
datFile.Items.BucketBy(ItemKey.Machine, DedupeType.None, lower: false, norename: true);
@@ -253,6 +259,7 @@ namespace SabreTools.DatTools
tempDat.Header.Name = Path.GetDirectoryName(key);
});
watch.Stop();
return true;
}
@@ -314,7 +321,7 @@ namespace SabreTools.DatTools
public static (DatFile lessThan, DatFile greaterThan) SplitBySize(DatFile datFile, long radix)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by size");
DatFile lessThan = DatFile.Create(datFile.Header.CloneStandard());
lessThan.Header.FileName += $" (less than {radix})";
@@ -351,6 +358,7 @@ namespace SabreTools.DatTools
});
// Then return both DatFiles
watch.Stop();
return (lessThan, greaterThan);
}
@@ -362,7 +370,7 @@ namespace SabreTools.DatTools
public static Dictionary<ItemType, DatFile> SplitByType(DatFile datFile)
{
// Create each of the respective output DATs
logger.User("Creating and populating new DATs");
InternalStopwatch watch = new InternalStopwatch($"Splitting DAT by item type");
// Create the set of type-to-dat mappings
Dictionary<ItemType, DatFile> typeDats = new Dictionary<ItemType, DatFile>();
@@ -391,6 +399,7 @@ namespace SabreTools.DatTools
FillWithItemType(datFile, typeDats[itemType], itemType);
});
watch.Stop();
return typeDats;
}

View File

@@ -120,6 +120,8 @@ namespace SabreTools.DatTools
{
bool success = true;
InternalStopwatch watch = new InternalStopwatch("Verifying all from supplied paths");
// Force bucketing according to the flags
datFile.Items.SetBucketedBy(ItemKey.NULL);
if (hashOnly)
@@ -143,6 +145,8 @@ namespace SabreTools.DatTools
datFile.Items[key] = items;
}
watch.Stop();
// Set fixdat headers in case of writing out
datFile.Header.FileName = $"fixDAT_{datFile.Header.FileName}";
datFile.Header.Name = $"fixDAT_{datFile.Header.Name}";

View File

@@ -54,6 +54,8 @@ namespace SabreTools.DatTools
// Ensure the output directory is set and created
outDir = outDir.Ensure(create: true);
InternalStopwatch watch = new InternalStopwatch($"Writing out internal dat to {outDir}");
// If the DAT has no output format, default to XML
if (datFile.Header.DatFormat == 0)
{
@@ -95,6 +97,10 @@ namespace SabreTools.DatTools
logger.Error(ex);
return false;
}
finally
{
watch.Stop();
}
return true;
}

View File

@@ -104,6 +104,8 @@ namespace SabreTools.Filtering
/// <returns>True if cleaning was successful, false on error</returns>
public bool ApplyCleaning(DatFile datFile, bool throwOnError = false)
{
InternalStopwatch watch = new InternalStopwatch("Applying cleaning steps to DAT");
try
{
// Perform item-level cleaning
@@ -143,6 +145,10 @@ namespace SabreTools.Filtering
logger.Error(ex);
return false;
}
finally
{
watch.Stop();
}
return true;
}

View File

@@ -50,6 +50,8 @@ namespace SabreTools.Filtering
/// <param name="inputs">Field and file combinations</param>
public void PopulateFromList(List<string> inputs)
{
InternalStopwatch watch = new InternalStopwatch("Populating extras from list");
foreach (string input in inputs)
{
ExtraIniItem item = new ExtraIniItem();
@@ -70,6 +72,8 @@ namespace SabreTools.Filtering
if (item.PopulateFromFile(fileString))
Items.Add(item);
}
watch.Stop();
}
#endregion
@@ -84,6 +88,8 @@ namespace SabreTools.Filtering
/// <returns>True if the extras were applied, false on error</returns>
public bool ApplyExtras(DatFile datFile, bool throwOnError = false)
{
InternalStopwatch watch = new InternalStopwatch("Applying extra mappings to DAT");
try
{
// Bucket by game first
@@ -130,6 +136,10 @@ namespace SabreTools.Filtering
logger.Error(ex);
return false;
}
finally
{
watch.Stop();
}
return true;
}

View File

@@ -56,6 +56,8 @@ namespace SabreTools.Filtering
/// <param name="fields">List of field names</param>
public void PopulateExclusionsFromList(List<string> fields)
{
InternalStopwatch watch = new InternalStopwatch("Populating removals from list");
// Instantiate the removers, if necessary
DatHeaderRemover ??= new DatHeaderRemover();
DatItemRemover ??= new DatItemRemover();
@@ -81,6 +83,8 @@ namespace SabreTools.Filtering
// If we didn't match anything, log an error
logger.Warning($"The value {field} did not match any known field names. Please check the wiki for more details on supported field names.");
}
watch.Stop();
}
#endregion
@@ -98,7 +102,7 @@ namespace SabreTools.Filtering
return;
// Output the logging statement
logger.User("Removing filtered fields");
InternalStopwatch watch = new InternalStopwatch("Applying removals to DAT");
// Remove DatHeader fields
if (DatHeaderRemover != null)
@@ -119,6 +123,8 @@ namespace SabreTools.Filtering
datFile.Items.AddRange(key, items);
});
}
watch.Stop();
}
#endregion

View File

@@ -47,6 +47,8 @@ namespace SabreTools.Filtering
/// <returns>True if the DatFile was split, false on error</returns>
public bool ApplySplitting(DatFile datFile, bool useTags, bool throwOnError = false)
{
InternalStopwatch watch = new InternalStopwatch("Applying splitting to DAT");
try
{
// If we are using tags from the DAT, set the proper input for split type unless overridden
@@ -81,6 +83,10 @@ namespace SabreTools.Filtering
logger.Error(ex);
return false;
}
finally
{
watch.Stop();
}
return true;
}