This commit is contained in:
Matt Nadareski
2020-08-28 01:13:55 -07:00
parent d475d80fc4
commit 4bf5a835e7
15 changed files with 86 additions and 77 deletions

View File

@@ -1908,7 +1908,7 @@ namespace SabreTools.Library.DatFiles
});
// Now find all folders that are empty, if we are supposed to
if (!(Header.OutputDepot?.IsActive ?? false) && addBlanks)
if (Header.OutputDepot?.IsActive != true && addBlanks)
{
List<string> empties = DirectoryExtensions.ListEmpty(basePath);
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
@@ -1979,7 +1979,7 @@ namespace SabreTools.Library.DatFiles
bool copyFiles)
{
// Special case for if we are in Depot mode (all names are supposed to be SHA-1 hashes)
if (Header.OutputDepot?.IsActive ?? false)
if (Header.OutputDepot?.IsActive == true)
{
GZipArchive gzarc = new GZipArchive(item);
BaseFile baseFile = gzarc.GetTorrentGZFileInfo();
@@ -2980,11 +2980,7 @@ namespace SabreTools.Library.DatFiles
/// <param name="quickScan">True to enable external scanning of archives, false otherwise</param>
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
/// <returns>True if verification was a success, false otherwise</returns>
public bool VerifyGeneric(
List<string> inputs,
bool hashOnly = false,
bool quickScan = false,
TreatAsFiles asFiles = 0x00)
public bool VerifyGeneric(List<string> inputs, bool hashOnly, bool quickScan, TreatAsFiles asFiles = 0x00)
{
bool success = true;
@@ -2996,45 +2992,27 @@ namespace SabreTools.Library.DatFiles
PopulateFromDir(input, quickScan ? Hash.SecureHashes : Hash.DeepHashes, asFiles: asFiles);
}
// If we are checking hashes only, essentially diff the inputs
// Force bucketing according to the flags
Items.SetBucketedBy(Field.NULL);
if (hashOnly)
{
// First we need to bucket and dedupe by hash to get duplicates
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
var keys = Items.SortedKeys.ToList();
foreach (string key in keys)
{
List<DatItem> items = Items[key];
for (int i = 0; i < items.Count; i++)
{
// Unmatched items will have a source ID of 99, remove all others
if (items[i].Source.Index != 99)
items[i].Remove = true;
}
// Set the list back, just in case
Items[key] = items;
}
}
// If we are checking full names, get only files found in directory
else
{
var keys = Items.SortedKeys.ToList();
foreach (string key in keys)
{
List<DatItem> items = Items[key];
List<DatItem> newItems = DatItem.Merge(items);
for (int i = 0; i < newItems.Count; i++)
{
// Unmatched items will have a source ID of 99, remove all others
if (newItems[i].Source.Index != 99)
newItems[i].Remove = true;
}
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
// Set the list back, just in case
Items[key] = newItems;
// Then mark items for removal
var keys = Items.SortedKeys.ToList();
foreach (string key in keys)
{
List<DatItem> items = Items[key];
for (int i = 0; i < items.Count; i++)
{
// Unmatched items will have a source ID of 99, remove all others
if (items[i].Source.Index != 99)
items[i].Remove = true;
}
// Set the list back, just in case
Items[key] = items;
}
// Set fixdat headers in case of writing out
@@ -3618,7 +3596,7 @@ namespace SabreTools.Library.DatFiles
string post = CreatePrefixPostfix(item, false);
// If we're in Depot mode, take care of that instead
if (Header.OutputDepot?.IsActive ?? false)
if (Header.OutputDepot?.IsActive == true)
{
if (item.ItemType == ItemType.Disk)
{

View File

@@ -373,6 +373,27 @@ namespace SabreTools.Library.DatFiles
return items[key].Remove(value);
}
/// <summary>
/// Reset a key from the file dictionary if it exists
/// </summary>
/// <param name="key">Key in the dictionary to reset</param>
public bool Reset(string key)
{
// If the key doesn't exist, return
if (!ContainsKey(key))
return false;
// Remove the statistics first
foreach (DatItem item in items[key])
{
RemoveItemStatistics(item);
}
// Remove the key from the dictionary
items[key] = new List<DatItem>();
return true;
}
/// <summary>
/// Override the internal Field value
/// </summary>
@@ -658,10 +679,11 @@ namespace SabreTools.Library.DatFiles
// Set the sorted type
mergedBy = dedupeType;
Parallel.ForEach(Keys, Globals.ParallelOptions, key =>
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Get the possibly unsorted list
List<DatItem> sortedlist = this[key];
List<DatItem> sortedlist = this[key].ToList();
// Sort the list of items to be consistent
DatItem.Sort(ref sortedlist, false);
@@ -671,14 +693,15 @@ namespace SabreTools.Library.DatFiles
sortedlist = DatItem.Merge(sortedlist);
// Add the list back to the dictionary
Remove(key);
Reset(key);
AddRange(key, sortedlist);
});
}
// If the merge type is the same, we want to sort the dictionary to be consistent
else
{
Parallel.ForEach(Keys, Globals.ParallelOptions, key =>
List<string> keys = Keys.ToList();
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
{
// Get the possibly unsorted list
List<DatItem> sortedlist = this[key];
@@ -811,9 +834,12 @@ namespace SabreTools.Library.DatFiles
/// </summary>
private void ClearEmpty()
{
var keys = items.Keys.ToList();
var keys = items.Keys.Where(k => k != null).ToList();
foreach (string key in keys)
{
if (!items.ContainsKey(key))
continue;
if (items[key] == null || items[key].Count == 0)
items.Remove(key);
}

View File

@@ -136,7 +136,7 @@ namespace SabreTools.Library.DatFiles
ProcessItemName(datItem, false, forceRomName: false);
// Romba mode automatically uses item name
if ((Header.OutputDepot?.IsActive ?? false) || Header.UseRomName)
if (Header.OutputDepot?.IsActive == true || Header.UseRomName)
{
sw.Write($"{datItem.Name}\n");
}