Less reliance on specific hashes in DFD path

This commit is contained in:
Matt Nadareski
2020-09-18 10:21:04 -07:00
parent 7ed7107fd2
commit cae8ce898c
4 changed files with 35 additions and 26 deletions

View File

@@ -3,8 +3,10 @@ using System.IO;
using SabreTools.Library.Data;
using SabreTools.Library.DatFiles;
using SabreTools.Library.Filtering;
using SabreTools.Library.Help;
using SabreTools.Library.IO;
using SabreTools.Library.Tools;
namespace RombaSharp.Features
{
@@ -52,6 +54,7 @@ namespace RombaSharp.Features
datfile.Header.Name = string.IsNullOrWhiteSpace(name) ? "untitled" : name;
datfile.Header.Description = description;
datfile.PopulateFromDir(source, asFiles: TreatAsFile.AaruFormat | TreatAsFile.CHD);
datfile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() });
datfile.Write(outDir: outdat);
}
}

View File

@@ -1970,23 +1970,22 @@ namespace SabreTools.Library.DatFiles
/// Create a new Dat from a directory
/// </summary>
/// <param name="basePath">Base folder to be used in creating the DAT</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
/// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
/// <param name="quickScan">True if archive header should be used, false otherwise</param>
/// TODO: Look into removing "copyFiles". I don't think it's useful anymore
public bool PopulateFromDir(
string basePath,
Hash omitFromScan = Hash.DeepHashes,
TreatAsFile asFiles = 0x00,
SkipFileType skipFileType = SkipFileType.None,
bool addBlanks = false,
bool addDate = false,
bool copyFiles = false)
bool copyFiles = false,
bool quickScan = false)
{
// Clean the temp directory path
Globals.TempDir = DirectoryExtensions.Ensure(Globals.TempDir, temp: true);
@@ -2000,7 +1999,7 @@ namespace SabreTools.Library.DatFiles
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.AllDirectories).ToList();
Parallel.ForEach(files, Globals.ParallelOptions, item =>
{
CheckFileForHashes(item, basePath, omitFromScan, asFiles, skipFileType, addBlanks, addDate, copyFiles);
CheckFileForHashes(item, basePath, asFiles, skipFileType, addBlanks, addDate, copyFiles, quickScan);
});
// Now find all folders that are empty, if we are supposed to
@@ -2044,12 +2043,12 @@ namespace SabreTools.Library.DatFiles
CheckFileForHashes(
basePath,
Path.GetDirectoryName(Path.GetDirectoryName(basePath)),
omitFromScan,
asFiles,
skipFileType,
addBlanks,
addDate,
copyFiles);
copyFiles,
quickScan);
}
// Now that we're done, delete the temp folder (if it's not the default)
@@ -2071,15 +2070,16 @@ namespace SabreTools.Library.DatFiles
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="quickScan">True if archive header should be used, false otherwise</param>
private void CheckFileForHashes(
string item,
string basePath,
Hash omitFromScan,
TreatAsFile asFiles,
SkipFileType skipFileType,
bool addBlanks,
bool addDate,
bool copyFiles)
bool copyFiles,
bool quickScan)
{
// If we're in depot mode, process it separately
if (CheckDepotFile(item))
@@ -2089,7 +2089,7 @@ namespace SabreTools.Library.DatFiles
(string newItem, string newBasePath) = CopyIfNeeded(item, basePath, copyFiles);
// Initialize possible archive variables
BaseArchive archive = BaseArchive.Create(newItem);
BaseArchive archive = BaseArchive.Create(newItem, quickScan);
List<BaseFile> extracted = null;
// If we have an archive and we're supposed to scan it
@@ -2105,11 +2105,11 @@ namespace SabreTools.Library.DatFiles
// If the extracted list is null, just scan the item itself
if (extracted == null)
ProcessFile(newItem, newBasePath, omitFromScan, addDate, asFiles);
ProcessFile(newItem, newBasePath, addDate, asFiles);
// Otherwise, add all of the found items
else
ProcessArchive(newItem, newBasePath, addBlanks, archive, extracted, omitFromScan);
ProcessArchive(newItem, newBasePath, addBlanks, archive, extracted);
// Cue to delete the file if it's a copy
if (copyFiles && item != newItem)
@@ -2178,8 +2178,8 @@ namespace SabreTools.Library.DatFiles
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="archive">BaseArchive to get blank folders from, if necessary</param>
/// <param name="extracted">List of BaseFiles representing the internal files</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
private void ProcessArchive(string item, string basePath, bool addBlanks, BaseArchive archive, List<BaseFile> extracted, Hash omitFromScan)
/// <param name="quickScan">True if only information from file headers should be used, false otherwise</param>
private void ProcessArchive(string item, string basePath, bool addBlanks, BaseArchive archive, List<BaseFile> extracted)
{
// Get the parent path for all items
string parent = (Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item);
@@ -2188,7 +2188,6 @@ namespace SabreTools.Library.DatFiles
Parallel.ForEach(extracted, Globals.ParallelOptions, baseFile =>
{
DatItem datItem = DatItem.Create(baseFile);
datItem.RemoveFields(omitFromScan.AsFields());
ProcessFileHelper(item, datItem, basePath, parent);
});
@@ -2215,15 +2214,13 @@ namespace SabreTools.Library.DatFiles
/// </summary>
/// <param name="item">File to be added</param>
/// <param name="basePath">Path the represents the parent directory</param>
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="asFiles">TreatAsFiles representing CHD and Archive scanning</param>
private void ProcessFile(string item, string basePath, Hash omitFromScan, bool addDate, TreatAsFile asFiles)
private void ProcessFile(string item, string basePath, bool addDate, TreatAsFile asFiles)
{
Globals.Logger.Verbose($"'{Path.GetFileName(item)}' treated like a file");
BaseFile baseFile = FileExtensions.GetInfo(item, addDate, Header.HeaderSkipper, asFiles);
DatItem datItem = DatItem.Create(baseFile);
datItem.RemoveFields(omitFromScan.AsFields());
ProcessFileHelper(item, datItem, basePath, string.Empty);
}
@@ -3004,8 +3001,7 @@ namespace SabreTools.Library.DatFiles
Globals.Logger.User("Processing files:\n");
foreach (string input in inputs)
{
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, quickScan ? Hash.SecureHashes : Hash.DeepHashes, asFiles: asFiles);
PopulateFromDir(input, asFiles: asFiles, quickScan: quickScan);
}
// Force bucketing according to the flags

View File

@@ -156,6 +156,9 @@ Reset the internal state: reset();";
datFile.PopulateFromDir(input);
}
// TODO: We might not want to remove higher order hashes in the future
datFile.ApplyCleaning(new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() });
break;
// Apply a filter

View File

@@ -3,7 +3,8 @@ using System.Collections.Generic;
using System.IO;
using SabreTools.Library.DatFiles;
using SabreTools.Library.Help;
using SabreTools.Library.DatItems;
using SabreTools.Library.Tools;
namespace SabreTools.Features
{
@@ -16,9 +17,9 @@ namespace SabreTools.Features
Name = Value;
Flags = new List<string>() { "-d", "--d2d", "--dfd" };
Description = "Create DAT(s) from an input directory";
_featureType = FeatureType.Flag;
_featureType = Library.Help.FeatureType.Flag;
LongDescription = "Create a DAT file from an input directory or set of files. By default, this will output a DAT named based on the input directory and the current date. It will also treat all archives as possible games and add all three hashes (CRC, MD5, SHA-1) for each file.";
Features = new Dictionary<string, Feature>();
Features = new Dictionary<string, Library.Help.Feature>();
// Hash Features
AddFeature(SkipMd5Flag);
@@ -53,7 +54,7 @@ namespace SabreTools.Features
AddFeature(ThreadsInt32Input);
}
public override void ProcessFeatures(Dictionary<string, Feature> features)
public override void ProcessFeatures(Dictionary<string, Library.Help.Feature> features)
{
base.ProcessFeatures(features);
@@ -67,6 +68,12 @@ namespace SabreTools.Features
var skipFileType = GetSkipFileType(features);
var splitType = GetSplitType(features);
// Apply the omit from scan values to the cleaner
if (Cleaner.ExcludeFields == null)
Cleaner.ExcludeFields = new List<Field>();
Cleaner.ExcludeFields.AddRange(omitFromScan.AsFields());
// Create a new DATFromDir object and process the inputs
DatFile basedat = DatFile.Create(Header);
basedat.Header.Date = DateTime.Now.ToString("yyyy-MM-dd");
@@ -86,12 +93,12 @@ namespace SabreTools.Features
// Now populate from the path
bool success = datdata.PopulateFromDir(
basePath,
omitFromScan,
asFiles,
skipFileType,
addBlankFiles,
addFileDates,
copyFiles);
copyFiles,
quickScan: omitFromScan == Hash.SecureHashes);
if (success)
{