Temp dir should be global

This commit is contained in:
Matt Nadareski
2020-08-02 13:08:33 -07:00
parent feb21c5ef8
commit 417d2253db
9 changed files with 45 additions and 25 deletions

View File

@@ -66,8 +66,8 @@ have a current entry in the DAT index.";
foreach (string dir in onlyDirs) foreach (string dir in onlyDirs)
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null); df.PopulateFromDir(dir, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, _tmpdir, false, true, null); df.PopulateFromDir(dir, Hash.DeepHashes, false, true, SkipFileType.None, false, false, false, true, null);
} }
// Create an empty Dat for files that need to be rebuilt // Create an empty Dat for files that need to be rebuilt

View File

@@ -350,7 +350,6 @@ namespace RombaSharp.Features
// General settings // General settings
internal static string _logdir; // Log folder location internal static string _logdir; // Log folder location
internal static string _tmpdir; // Temp folder location
internal static string _webdir; // Web frontend location internal static string _webdir; // Web frontend location
internal static string _baddir; // Fail-to-unpack file folder location internal static string _baddir; // Fail-to-unpack file folder location
internal static int _verbosity; // Verbosity of the output internal static int _verbosity; // Verbosity of the output
@@ -588,7 +587,7 @@ namespace RombaSharp.Features
// Finally set all of the fields // Finally set all of the fields
Globals.MaxThreads = workers; Globals.MaxThreads = workers;
_logdir = logdir; _logdir = logdir;
_tmpdir = tmpdir; Globals.TempDir = tmpdir;
_webdir = webdir; _webdir = webdir;
_baddir = baddir; _baddir = baddir;
_verbosity = verbosity; _verbosity = verbosity;

View File

@@ -55,7 +55,7 @@ namespace RombaSharp.Features
// Now run the D2D on the input and write out // Now run the D2D on the input and write out
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */, datfile.PopulateFromDir(source, Hash.DeepHashes, true /* bare */, false /* archivesAsFiles */, SkipFileType.None, false /* addBlanks */,
false /* addDate */, _tmpdir, false /* copyFiles */, true /* chdsAsFiles */, null /* filter */); false /* addDate */, false /* copyFiles */, true /* chdsAsFiles */, null /* filter */);
datfile.Write(outDir: outdat); datfile.Write(outDir: outdat);
} }
} }

View File

@@ -64,7 +64,7 @@ contents of any changed dats.";
datroot.Header.Type = "SuperDAT"; datroot.Header.Type = "SuperDAT";
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null); datroot.PopulateFromDir(_dats, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
datroot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None); datroot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Create a List of dat hashes in the database (SHA-1) // Create a List of dat hashes in the database (SHA-1)

View File

@@ -66,7 +66,7 @@ namespace RombaSharp.Features
DatFile depot = DatFile.Create(); DatFile depot = DatFile.Create();
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, _tmpdir, false, true, null); depot.PopulateFromDir(depotname, Hash.DeepHashes, false, false, SkipFileType.None, false, false, false, true, null);
depot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None); depot.Items.BucketBy(BucketedBy.SHA1, DedupeType.None);
// Set the base queries to use // Set the base queries to use

View File

@@ -1491,7 +1491,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param> /// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param> /// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param> /// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="outDir">Output directory to </param> /// <param name="outDir">Output directory to </param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
@@ -1505,7 +1504,6 @@ namespace SabreTools.Library.DatFiles
SkipFileType skipFileType, SkipFileType skipFileType,
bool addBlanks, bool addBlanks,
bool addDate, bool addDate,
string tempDir,
bool copyFiles, bool copyFiles,
bool chdsAsFiles, bool chdsAsFiles,
Filter filter, Filter filter,
@@ -1532,7 +1530,7 @@ namespace SabreTools.Library.DatFiles
} }
// Clean the temp directory path // Clean the temp directory path
tempDir = DirectoryExtensions.Ensure(tempDir, temp: true); Globals.TempDir = DirectoryExtensions.Ensure(Globals.TempDir, temp: true);
// Process the input // Process the input
if (Directory.Exists(basePath)) if (Directory.Exists(basePath))
@@ -1544,7 +1542,7 @@ namespace SabreTools.Library.DatFiles
Parallel.ForEach(files, Globals.ParallelOptions, item => Parallel.ForEach(files, Globals.ParallelOptions, item =>
{ {
CheckFileForHashes(item, basePath, omitFromScan, archivesAsFiles, skipFileType, CheckFileForHashes(item, basePath, omitFromScan, archivesAsFiles, skipFileType,
addBlanks, addDate, tempDir, copyFiles, chdsAsFiles); addBlanks, addDate, copyFiles, chdsAsFiles);
}); });
// Now find all folders that are empty, if we are supposed to // Now find all folders that are empty, if we are supposed to
@@ -1586,13 +1584,13 @@ namespace SabreTools.Library.DatFiles
else if (File.Exists(basePath)) else if (File.Exists(basePath))
{ {
CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, archivesAsFiles, CheckFileForHashes(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, archivesAsFiles,
skipFileType, addBlanks, addDate, tempDir, copyFiles, chdsAsFiles); skipFileType, addBlanks, addDate, copyFiles, chdsAsFiles);
} }
// Now that we're done, delete the temp folder (if it's not the default) // Now that we're done, delete the temp folder (if it's not the default)
Globals.Logger.User("Cleaning temp folder"); Globals.Logger.User("Cleaning temp folder");
if (tempDir != Path.GetTempPath()) if (Globals.TempDir != Path.GetTempPath())
DirectoryExtensions.TryDelete(tempDir); DirectoryExtensions.TryDelete(Globals.TempDir);
// If we have a valid filter, perform the filtering now // If we have a valid filter, perform the filtering now
if (filter != null && filter != default(Filter)) if (filter != null && filter != default(Filter))
@@ -1611,7 +1609,6 @@ namespace SabreTools.Library.DatFiles
/// <param name="skipFileType">Type of files that should be skipped</param> /// <param name="skipFileType">Type of files that should be skipped</param>
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param> /// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param> /// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param> /// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
/// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param> /// <param name="chdsAsFiles">True if CHDs should be treated like regular files, false otherwise</param>
private void CheckFileForHashes( private void CheckFileForHashes(
@@ -1622,7 +1619,6 @@ namespace SabreTools.Library.DatFiles
SkipFileType skipFileType, SkipFileType skipFileType,
bool addBlanks, bool addBlanks,
bool addDate, bool addDate,
string tempDir,
bool copyFiles, bool copyFiles,
bool chdsAsFiles) bool chdsAsFiles)
{ {
@@ -1654,7 +1650,7 @@ namespace SabreTools.Library.DatFiles
string newBasePath = basePath; string newBasePath = basePath;
if (copyFiles) if (copyFiles)
{ {
newBasePath = Path.Combine(tempDir, Guid.NewGuid().ToString()); newBasePath = Path.Combine(Globals.TempDir, Guid.NewGuid().ToString());
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1))); newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
DirectoryExtensions.TryCreateDirectory(Path.GetDirectoryName(newItem)); DirectoryExtensions.TryCreateDirectory(Path.GetDirectoryName(newItem));
File.Copy(item, newItem, true); File.Copy(item, newItem, true);
@@ -2601,7 +2597,7 @@ namespace SabreTools.Library.DatFiles
{ {
// TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually // TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually
PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */, PopulateFromDir(input, (quickScan ? Hash.SecureHashes : Hash.DeepHashes) /* omitFromScan */, true /* bare */, false /* archivesAsFiles */,
SkipFileType.None, false /* addBlanks */, false /* addDate */, string.Empty /* tempDir */, false /* copyFiles */, chdsAsFiles, filter); SkipFileType.None, false /* addBlanks */, false /* addDate */, false /* copyFiles */, chdsAsFiles, filter);
} }
// Setup the fixdat // Setup the fixdat

View File

@@ -20,6 +20,24 @@ namespace SabreTools.Library.Data
#region Public accessors #region Public accessors
/// <summary>
/// Command line arguments passed in to the parent program
/// </summary>
public static string CommandLineArgs => string.Join(" ", Environment.GetCommandLineArgs());
/// <summary>
/// Directory path for the current executable
/// </summary>
public static string ExeDir => Path.GetDirectoryName(ExeName);
/// <summary>
/// File path for the current executable
/// </summary>
public static string ExeName => new Uri(Assembly.GetExecutingAssembly().GetName().CodeBase).LocalPath;
/// <summary>
/// Logging object for writing to file and console
/// </summary>
public static Logger Logger public static Logger Logger
{ {
get get
@@ -32,18 +50,23 @@ namespace SabreTools.Library.Data
set { _logger = value; } set { _logger = value; }
} }
/// <summary>
/// Maximum threads to use during parallel operations
/// </summary>
public static int MaxThreads { get; set; } = Environment.ProcessorCount; public static int MaxThreads { get; set; } = Environment.ProcessorCount;
/// <summary>
/// ParallelOptions object for use in parallel operations
/// </summary>
public static ParallelOptions ParallelOptions => new ParallelOptions() public static ParallelOptions ParallelOptions => new ParallelOptions()
{ {
MaxDegreeOfParallelism = MaxThreads MaxDegreeOfParallelism = MaxThreads
}; };
public static string ExeName => new Uri(Assembly.GetExecutingAssembly().GetName().CodeBase).LocalPath; /// <summary>
/// Temporary directory location
public static string ExeDir => Path.GetDirectoryName(ExeName); /// </summary>
public static string TempDir { get; set; } = Path.GetTempPath();
public static string CommandLineArgs => string.Join(" ", Environment.GetCommandLineArgs());
#endregion #endregion
} }

View File

@@ -2365,6 +2365,10 @@ Some special strings that can be used:
// Set threading flag, if necessary // Set threading flag, if necessary
if (features.ContainsKey(ThreadsInt32Value)) if (features.ContainsKey(ThreadsInt32Value))
Globals.MaxThreads = GetInt32(features, ThreadsInt32Value); Globals.MaxThreads = GetInt32(features, ThreadsInt32Value);
// Set temp path, if necessary
if (features.ContainsKey(TempStringValue))
Globals.TempDir = GetString(features, TempStringValue);
} }
#region Protected Specific Extraction #region Protected Specific Extraction

View File

@@ -60,7 +60,6 @@ namespace SabreTools.Features
bool chdsAsFiles = GetBoolean(features, ChdsAsFilesValue); bool chdsAsFiles = GetBoolean(features, ChdsAsFilesValue);
bool copyFiles = GetBoolean(features, CopyFilesValue); bool copyFiles = GetBoolean(features, CopyFilesValue);
bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue); bool noAutomaticDate = GetBoolean(features, NoAutomaticDateValue);
string tempDir = GetString(features, TempStringValue);
var omitFromScan = GetOmitFromScan(features); var omitFromScan = GetOmitFromScan(features);
var skipFileType = GetSkipFileType(features); var skipFileType = GetSkipFileType(features);
@@ -85,7 +84,6 @@ namespace SabreTools.Features
skipFileType, skipFileType,
addBlankFiles, addBlankFiles,
addFileDates, addFileDates,
tempDir,
copyFiles, copyFiles,
chdsAsFiles, chdsAsFiles,
Filter); Filter);