mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
[SabreTools.Library] Update folder name
This commit is contained in:
BIN
SabreTools.Library/7za.dll
Normal file
BIN
SabreTools.Library/7za.dll
Normal file
Binary file not shown.
BIN
SabreTools.Library/AnyCPU/7za.dll
Normal file
BIN
SabreTools.Library/AnyCPU/7za.dll
Normal file
Binary file not shown.
BIN
SabreTools.Library/AnyCPU/sqlite3.dll
Normal file
BIN
SabreTools.Library/AnyCPU/sqlite3.dll
Normal file
Binary file not shown.
2
SabreTools.Library/App.config
Normal file
2
SabreTools.Library/App.config
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration />
|
||||
59
SabreTools.Library/Data/Build.cs
Normal file
59
SabreTools.Library/Data/Build.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
|
||||
namespace SabreTools.Helper.Data
|
||||
{
|
||||
public static class Build
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns true if running in a Mono environment
|
||||
/// </summary>
|
||||
public static bool MonoEnvironment
|
||||
{
|
||||
get { return (Type.GetType("Mono.Runtime") != null); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Readies the console and outputs the header
|
||||
/// </summary>
|
||||
/// <param name="name">The name to be displayed as the program</param>B
|
||||
public static void Start(string name)
|
||||
{
|
||||
// Dynamically create the header string, adapted from http://stackoverflow.com/questions/8200661/how-to-align-string-in-fixed-length-string
|
||||
int width = Console.WindowWidth - 3;
|
||||
string border = "+" + new string('-', width) + "+";
|
||||
string mid = name + " " + Constants.Version;
|
||||
mid = "|" + mid.PadLeft(((width - mid.Length) / 2) + mid.Length).PadRight(width) + "|";
|
||||
|
||||
// If we're outputting to console, do fancy things
|
||||
if (!Console.IsOutputRedirected)
|
||||
{
|
||||
// Set the console to ready state
|
||||
ConsoleColor formertext = ConsoleColor.White;
|
||||
ConsoleColor formerback = ConsoleColor.Black;
|
||||
if (!MonoEnvironment)
|
||||
{
|
||||
Console.SetBufferSize(Console.BufferWidth, 999);
|
||||
formertext = Console.ForegroundColor;
|
||||
formerback = Console.BackgroundColor;
|
||||
Console.ForegroundColor = ConsoleColor.Yellow;
|
||||
Console.BackgroundColor = ConsoleColor.Blue;
|
||||
}
|
||||
|
||||
Console.Title = name + " " + Constants.Version;
|
||||
|
||||
// Output the header
|
||||
Console.WriteLine(border);
|
||||
Console.WriteLine(mid);
|
||||
Console.WriteLine(border);
|
||||
Console.WriteLine();
|
||||
|
||||
// Return the console to the original text and background colors
|
||||
if (!MonoEnvironment)
|
||||
{
|
||||
Console.ForegroundColor = formertext;
|
||||
Console.BackgroundColor = formerback;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
154
SabreTools.Library/Data/Constants.cs
Normal file
154
SabreTools.Library/Data/Constants.cs
Normal file
@@ -0,0 +1,154 @@
|
||||
using System;
|
||||
using System.Reflection;
|
||||
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
namespace SabreTools.Helper.Data
|
||||
{
|
||||
public static class Constants
|
||||
{
|
||||
/// <summary>
|
||||
/// The current toolset version to be used by all child applications
|
||||
/// </summary>
|
||||
public static string Version = "v0.9.5-" + Assembly.GetExecutingAssembly().GetLinkerTime().ToString("yyyy-MM-dd HH:mm:ss");
|
||||
public const int HeaderHeight = 3;
|
||||
|
||||
#region 0-byte file constants
|
||||
|
||||
public const long SizeZero = 0;
|
||||
public const string CRCZero = "00000000";
|
||||
public const string MD5Zero = "d41d8cd98f00b204e9800998ecf8427e";
|
||||
public const string SHA1Zero = "da39a3ee5e6b4b0d3255bfef95601890afd80709";
|
||||
public const string SHA256Zero = "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad";
|
||||
public const string SHA384Zero = "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7";
|
||||
public const string SHA512Zero = "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Byte (1000-based) size comparisons
|
||||
|
||||
public const long KiloByte = 1000;
|
||||
public static long MegaByte = (long)Math.Pow(KiloByte, 2);
|
||||
public static long GigaByte = (long)Math.Pow(KiloByte, 3);
|
||||
public static long TeraByte = (long)Math.Pow(KiloByte, 4);
|
||||
public static long PetaByte = (long)Math.Pow(KiloByte, 5);
|
||||
public static long ExaByte = (long)Math.Pow(KiloByte, 6);
|
||||
public static long ZettaByte = (long)Math.Pow(KiloByte, 7);
|
||||
public static long YottaByte = (long)Math.Pow(KiloByte, 8);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Byte (1024-based) size comparisons
|
||||
|
||||
public const long KibiByte = 1024;
|
||||
public static long MibiByte = (long)Math.Pow(KibiByte, 2);
|
||||
public static long GibiByte = (long)Math.Pow(KibiByte, 3);
|
||||
public static long TibiByte = (long)Math.Pow(KibiByte, 4);
|
||||
public static long PibiByte = (long)Math.Pow(KibiByte, 5);
|
||||
public static long ExiByte = (long)Math.Pow(KibiByte, 6);
|
||||
public static long ZittiByte = (long)Math.Pow(KibiByte, 7);
|
||||
public static long YittiByte = (long)Math.Pow(KibiByte, 8);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Database schema
|
||||
|
||||
public const string HeadererDbSchema = "Headerer";
|
||||
public const string HeadererFileName = "Headerer.sqlite";
|
||||
public const string HeadererConnectionString = "Data Source=" + HeadererFileName + ";Version = 3;";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash string length constants
|
||||
|
||||
public const int CRCLength = 8;
|
||||
public const int MD5Length = 32;
|
||||
public const int SHA1Length = 40;
|
||||
public const int SHA256Length = 64;
|
||||
public const int SHA384Length = 96;
|
||||
public const int SHA512Length = 128;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Magic numbers as strings
|
||||
|
||||
public const string SevenZipSig = "377ABCAF271C";
|
||||
public const string GzSig = "1F8B";
|
||||
public const string RarSig = "526172211A0700";
|
||||
public const string RarFiveSig = "526172211A070100";
|
||||
public const string TarSig = "7573746172202000";
|
||||
public const string TarZeroSig = "7573746172003030";
|
||||
public const string ZipSig = "504B0304";
|
||||
public const string ZipSigEmpty = "504B0506";
|
||||
public const string ZipSigSpanned = "504B0708";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Regular Expressions
|
||||
|
||||
public const string XmlPattern = @"<(.*?)>(.*?)</(.*?)>";
|
||||
public const string HeaderPatternCMP = @"(^.*?) \($";
|
||||
public const string ItemPatternCMP = @"^\s*(\S*?) (.*)";
|
||||
public const string EndPatternCMP = @"^\s*\)\s*$";
|
||||
|
||||
#endregion
|
||||
|
||||
#region TorrentZip, T7z, and TGZ headers
|
||||
|
||||
/* TorrentZip Header Format
|
||||
https://pkware.cachefly.net/webdocs/APPNOTE/APPNOTE_6.2.0.txt
|
||||
http://www.romvault.com/trrntzip_explained.doc
|
||||
|
||||
00-03 Local file header signature (0x50, 0x4B, 0x03, 0x04)
|
||||
04-05 Version needed to extract (0x14, 0x00)
|
||||
06-07 General purpose bit flag (0x02, 0x00)
|
||||
08-09 Compression method (0x08, 0x00)
|
||||
0A-0B Last mod file time (0x00, 0xBC)
|
||||
0C-0D Last mod file date (0x98, 0x21)
|
||||
*/
|
||||
public static byte[] TorrentZipHeader = new byte[] { 0x50, 0x4b, 0x03, 0x04, 0x14, 0x00, 0x02, 0x00, 0x08, 0x00, 0x00, 0xbc, 0x98, 0x21 };
|
||||
|
||||
/* Torrent7z Header Format
|
||||
http://cpansearch.perl.org/src/BJOERN/Compress-Deflate7-1.0/7zip/DOC/7zFormat.txt
|
||||
|
||||
00-05 Local file header signature (0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C)
|
||||
06-07 ArchiveVersion (0x00, 0x03)
|
||||
The rest is unknown
|
||||
*/
|
||||
public static byte[] Torrent7ZipHeader = new byte[] { 0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c, 0x00, 0x03 };
|
||||
public static byte[] Torrent7ZipSignature = new byte[] { 0xa9, 0xa9, 0x9f, 0xd1, 0x57, 0x08, 0xa9, 0xd7, 0xea, 0x29, 0x64, 0xb2,
|
||||
0x36, 0x1b, 0x83, 0x52, 0x33, 0x00, 0x74, 0x6f, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x37, 0x7a, 0x5f, 0x30, 0x2e, 0x39, 0x62, 0x65, 0x74, 0x61 };
|
||||
|
||||
/* (Torrent)GZ Header Format
|
||||
https://tools.ietf.org/html/rfc1952
|
||||
|
||||
00 Identification 1 (0x1F)
|
||||
01 Identification 2 (0x8B)
|
||||
02 Compression Method (0-7 reserved, 8 deflate; 0x08)
|
||||
03 Flags (0 FTEXT, 1 FHCRC, 2 FEXTRA, 3 FNAME, 4 FCOMMENT, 5 reserved, 6 reserved, 7 reserved; 0x04)
|
||||
04-07 Modification time (Unix format; 0x00, 0x00, 0x00, 0x00)
|
||||
08 Extra Flags (2 maximum compression, 4 fastest algorithm; 0x00)
|
||||
09 OS (See list on https://tools.ietf.org/html/rfc1952; 0x00)
|
||||
0A-0B Length of extra field (mirrored; 0x1C, 0x00)
|
||||
0C-27 Extra field
|
||||
0C-1B MD5 Hash
|
||||
1C-1F CRC hash
|
||||
20-27 Int64 size (mirrored)
|
||||
*/
|
||||
public static byte[] TorrentGZHeader = new byte[] { 0x1f, 0x8b, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00 };
|
||||
|
||||
#endregion
|
||||
|
||||
#region ZIP internal signatures
|
||||
|
||||
public const uint LocalFileHeaderSignature = 0x04034b50;
|
||||
public const uint EndOfLocalFileHeaderSignature = 0x08074b50;
|
||||
public const uint CentralDirectoryHeaderSignature = 0x02014b50;
|
||||
public const uint EndOfCentralDirSignature = 0x06054b50;
|
||||
public const uint Zip64EndOfCentralDirSignature = 0x06064b50;
|
||||
public const uint Zip64EndOfCentralDirectoryLocator = 0x07064b50;
|
||||
public const uint TorrentZipFileDateTime = 0x2198BC00;
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
369
SabreTools.Library/Data/Enums.cs
Normal file
369
SabreTools.Library/Data/Enums.cs
Normal file
@@ -0,0 +1,369 @@
|
||||
namespace SabreTools.Helper.Data
|
||||
{
|
||||
#region Archival
|
||||
|
||||
/// <summary>
|
||||
/// Version of tool archive made by
|
||||
/// </summary>
|
||||
public enum ArchiveVersion : ushort
|
||||
{
|
||||
MSDOSandOS2 = 0,
|
||||
Amiga = 1,
|
||||
OpenVMS = 2,
|
||||
UNIX = 3,
|
||||
VMCMS = 4,
|
||||
AtariST = 5,
|
||||
OS2HPFS = 6,
|
||||
Macintosh = 7,
|
||||
ZSystem = 8,
|
||||
CPM = 9,
|
||||
WindowsNTFS = 10,
|
||||
MVS = 11,
|
||||
VSE = 12,
|
||||
AcornRisc = 13,
|
||||
VFAT = 14,
|
||||
AlternateMVS = 15,
|
||||
BeOS = 16,
|
||||
Tandem = 17,
|
||||
OS400 = 18,
|
||||
OSXDarwin = 19,
|
||||
TorrentZip = 20,
|
||||
TorrentZip64 = 45,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compression method based on flag
|
||||
/// </summary>
|
||||
public enum CompressionMethod : ushort
|
||||
{
|
||||
Stored = 0,
|
||||
Shrunk = 1,
|
||||
ReducedCompressionFactor1 = 2,
|
||||
ReducedCompressionFactor2 = 3,
|
||||
ReducedCompressionFactor3 = 4,
|
||||
ReducedCompressionFactor4 = 5,
|
||||
Imploded = 6,
|
||||
Tokenizing = 7,
|
||||
Deflated = 8,
|
||||
Delfate64 = 9,
|
||||
PKWAREDataCompressionLibrary = 10,
|
||||
BZIP2 = 12,
|
||||
LZMA = 14,
|
||||
IBMTERSE = 18,
|
||||
IBMLZ77 = 19,
|
||||
WavPak = 97,
|
||||
PPMdVersionIRev1 = 98,
|
||||
|
||||
// Reserved and unused (SHOULD NOT BE USED)
|
||||
Type11 = 11,
|
||||
Type13 = 13,
|
||||
Type15 = 15,
|
||||
Type16 = 16,
|
||||
Type17 = 17,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output format for rebuilt files
|
||||
/// </summary>
|
||||
public enum OutputFormat
|
||||
{
|
||||
// Currently implemented
|
||||
Folder = 0,
|
||||
TorrentZip = 1,
|
||||
TorrentGzip = 2,
|
||||
TapeArchive = 5,
|
||||
|
||||
// Currently unimplemented
|
||||
Torrent7Zip = 3,
|
||||
TorrentRar = 4,
|
||||
TorrentXZ = 6,
|
||||
TorrentLrzip = 7,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR extra area flag
|
||||
/// </summary>
|
||||
public enum RarExtraAreaFlag : uint
|
||||
{
|
||||
FileEncryption = 0x01,
|
||||
FileHash = 0x02,
|
||||
FileTime = 0x03,
|
||||
FileVersion = 0x04,
|
||||
Redirection = 0x05,
|
||||
UnixOwner = 0x06,
|
||||
ServiceData = 0x07,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR header types
|
||||
/// </summary>
|
||||
public enum RarHeaderType : uint
|
||||
{
|
||||
MainArchiveHeader = 1,
|
||||
File = 2,
|
||||
Service = 3,
|
||||
ArchiveEncryption = 4,
|
||||
EndOfArchive = 5,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR entry redirection type
|
||||
/// </summary>
|
||||
public enum RarRedirectionType : uint
|
||||
{
|
||||
UnixSymlink = 0x0001,
|
||||
WindowsSymlink = 0x0002,
|
||||
WindowsJunction = 0x0003,
|
||||
HardLink = 0x0004,
|
||||
FileCopy = 0x0005,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 7zip Properties
|
||||
/// </summary>
|
||||
public enum SevenZipProperties : uint
|
||||
{
|
||||
kEnd = 0x00,
|
||||
|
||||
kHeader = 0x01,
|
||||
|
||||
kArchiveProperties = 0x02,
|
||||
|
||||
kAdditionalStreamsInfo = 0x03,
|
||||
kMainStreamsInfo = 0x04,
|
||||
kFilesInfo = 0x05,
|
||||
|
||||
kPackInfo = 0x06,
|
||||
kUnPackInfo = 0x07,
|
||||
kSubStreamsInfo = 0x08,
|
||||
|
||||
kSize = 0x09,
|
||||
kCRC = 0x0A,
|
||||
|
||||
kFolder = 0x0B,
|
||||
|
||||
kCodersUnPackSize = 0x0C,
|
||||
kNumUnPackStream = 0x0D,
|
||||
|
||||
kEmptyStream = 0x0E,
|
||||
kEmptyFile = 0x0F,
|
||||
kAnti = 0x10,
|
||||
|
||||
kName = 0x11,
|
||||
kCTime = 0x12,
|
||||
kATime = 0x13,
|
||||
kMTime = 0x14,
|
||||
kWinAttributes = 0x15,
|
||||
kComment = 0x16,
|
||||
|
||||
kEncodedHeader = 0x17,
|
||||
|
||||
kStartPos = 0x18,
|
||||
kDummy = 0x19,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Zip open type
|
||||
/// </summary>
|
||||
/// <remarks>https://raw.githubusercontent.com/gjefferyes/RomVault/5a93500001f0d068f32cf77a048950717507f733/ROMVault2/SupportedFiles/ZipEnums.cs</remarks>
|
||||
public enum ZipOpenType
|
||||
{
|
||||
Closed,
|
||||
OpenRead,
|
||||
OpenWrite
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Zip testing type
|
||||
/// </summary>
|
||||
/// <remarks>https://raw.githubusercontent.com/gjefferyes/RomVault/5a93500001f0d068f32cf77a048950717507f733/ROMVault2/SupportedFiles/ZipEnums.cs</remarks>
|
||||
public enum ZipReturn
|
||||
{
|
||||
ZipGood,
|
||||
ZipFileLocked,
|
||||
ZipFileCountError,
|
||||
ZipSignatureError,
|
||||
ZipExtraDataOnEndOfZip,
|
||||
ZipUnsupportedCompression,
|
||||
ZipLocalFileHeaderError,
|
||||
ZipCentralDirError,
|
||||
ZipEndOfCentralDirectoryError,
|
||||
Zip64EndOfCentralDirError,
|
||||
Zip64EndOfCentralDirectoryLocatorError,
|
||||
ZipReadingFromOutputFile,
|
||||
ZipWritingToInputFile,
|
||||
ZipErrorGettingDataStream,
|
||||
ZipCRCDecodeError,
|
||||
ZipDecodeError,
|
||||
ZipFileNameToLong,
|
||||
ZipFileAlreadyOpen,
|
||||
ZipCannotFastOpen,
|
||||
ZipErrorOpeningFile,
|
||||
ZipErrorFileNotFound,
|
||||
ZipErrorReadingFile,
|
||||
ZipErrorTimeStamp,
|
||||
ZipErrorRollBackFile,
|
||||
ZipUntested
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DatFile related
|
||||
|
||||
/// <summary>
|
||||
/// Determines forcemerging tag for DAT output
|
||||
/// </summary>
|
||||
public enum ForceMerging
|
||||
{
|
||||
None = 0,
|
||||
Split,
|
||||
Merged,
|
||||
NonMerged,
|
||||
Full,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines forcenodump tag for DAT output
|
||||
/// </summary>
|
||||
public enum ForceNodump
|
||||
{
|
||||
None = 0,
|
||||
Obsolete,
|
||||
Required,
|
||||
Ignore,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines forcepacking tag for DAT output
|
||||
/// </summary>
|
||||
public enum ForcePacking
|
||||
{
|
||||
None = 0,
|
||||
Zip,
|
||||
Unzip,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines which files should be skipped in DFD
|
||||
/// </summary>
|
||||
public enum SkipFileType
|
||||
{
|
||||
None = 0,
|
||||
Archive,
|
||||
File,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines how the current dictionary is sorted by
|
||||
/// </summary>
|
||||
public enum SortedBy
|
||||
{
|
||||
Default = 0,
|
||||
Size,
|
||||
CRC,
|
||||
MD5,
|
||||
SHA1,
|
||||
SHA256,
|
||||
SHA384,
|
||||
SHA512,
|
||||
Game,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines how a DAT will be split internally
|
||||
/// </summary>
|
||||
public enum SplitType
|
||||
{
|
||||
None = 0,
|
||||
NonMerged,
|
||||
Merged,
|
||||
FullNonMerged,
|
||||
Split,
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DatItem related
|
||||
|
||||
/// <summary>
|
||||
/// Determine what type of file an item is
|
||||
/// </summary>
|
||||
public enum ItemType
|
||||
{
|
||||
Rom = 0,
|
||||
Disk = 1,
|
||||
Sample = 2,
|
||||
Release = 3,
|
||||
BiosSet = 4,
|
||||
Archive = 5,
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Help related
|
||||
|
||||
/// <summary>
|
||||
/// Determines the feature type to check for
|
||||
/// </summary>
|
||||
public enum FeatureType
|
||||
{
|
||||
Flag = 0,
|
||||
String,
|
||||
List,
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Logging related
|
||||
|
||||
/// <summary>
|
||||
/// Severity of the logging statement
|
||||
/// </summary>
|
||||
public enum LogLevel
|
||||
{
|
||||
VERBOSE = 0,
|
||||
USER,
|
||||
WARNING,
|
||||
ERROR,
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Skippers and Mappers
|
||||
|
||||
/// <summary>
|
||||
/// Determines the header skip operation
|
||||
/// </summary>
|
||||
public enum HeaderSkipOperation
|
||||
{
|
||||
None = 0,
|
||||
Bitswap,
|
||||
Byteswap,
|
||||
Wordswap,
|
||||
WordByteswap,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the type of test to be done
|
||||
/// </summary>
|
||||
public enum HeaderSkipTest
|
||||
{
|
||||
Data = 0,
|
||||
Or,
|
||||
Xor,
|
||||
And,
|
||||
File,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the operator to be used in a file test
|
||||
/// </summary>
|
||||
public enum HeaderSkipTestFileOperator
|
||||
{
|
||||
Equal = 0,
|
||||
Less,
|
||||
Greater,
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
320
SabreTools.Library/Data/Flags.cs
Normal file
320
SabreTools.Library/Data/Flags.cs
Normal file
@@ -0,0 +1,320 @@
|
||||
using System;
|
||||
|
||||
namespace SabreTools.Helper.Data
|
||||
{
|
||||
#region Archival
|
||||
|
||||
/// <summary>
|
||||
/// Determines the level to scan archives at
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum ArchiveScanLevel
|
||||
{
|
||||
// 7zip
|
||||
SevenZipExternal = 0x00001,
|
||||
SevenZipInternal = 0x00002,
|
||||
SevenZipBoth = SevenZipExternal | SevenZipInternal,
|
||||
|
||||
// GZip
|
||||
GZipExternal = 0x00010,
|
||||
GZipInternal = 0x00020,
|
||||
GZipBoth = GZipExternal | GZipInternal,
|
||||
|
||||
// RAR
|
||||
RarExternal = 0x00100,
|
||||
RarInternal = 0x00200,
|
||||
RarBoth = RarExternal | RarInternal,
|
||||
|
||||
// Zip
|
||||
ZipExternal = 0x01000,
|
||||
ZipInternal = 0x02000,
|
||||
ZipBoth = ZipExternal | ZipInternal,
|
||||
|
||||
// Tar
|
||||
TarExternal = 0x10000,
|
||||
TarInternal = 0x20000,
|
||||
TarBoth = TarExternal | TarInternal,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the archive general bit flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum GeneralPurposeBitFlag : ushort
|
||||
{
|
||||
Encrypted = 0x0001,
|
||||
ZeroedCRCAndSize = 0x0008,
|
||||
CompressedPatchedData = 0x0020,
|
||||
StrongEncryption = 0x0040,
|
||||
LanguageEncodingFlag = 0x0800,
|
||||
EncryptedCentralDirectory = 0x2000,
|
||||
|
||||
// For Method 6 - Imploding
|
||||
Imploding8KSlidingDictionary = 0x0002,
|
||||
Imploding3ShannonFanoTrees = 0x0004,
|
||||
|
||||
// For Methods 8 and 9 - Deflating
|
||||
DeflatingMaximumCompression = 0x0002,
|
||||
DeflatingFastCompression = 0x0004,
|
||||
DeflatingSuperFastCompression = 0x0006,
|
||||
EnhancedDeflating = 0x0010,
|
||||
|
||||
// For Method 14 - LZMA
|
||||
LZMAEOSMarkerUsed = 0x0002,
|
||||
|
||||
// Reserved and unused (SHOULD NOT BE USED)
|
||||
Bit7 = 0x0080,
|
||||
Bit8 = 0x0100,
|
||||
Bit9 = 0x0200,
|
||||
Bit10 = 0x0400,
|
||||
Bit12 = 0x1000, // Reserved by PKWARE for enhanced compression
|
||||
Bit14 = 0x4000, // Reserved by PKWARE
|
||||
Bit15 = 0x8000, // Reserved by PKWARE
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal file attributes used by archives
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum InternalFileAttributes : ushort
|
||||
{
|
||||
ASCIIOrTextFile = 0x0001,
|
||||
RecordLengthControl = 0x0002,
|
||||
|
||||
// Reserved and unused (SHOULD NOT BE USED)
|
||||
Bit1 = 0x0002,
|
||||
Bit2 = 0x0004,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR archive flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RarArchiveFlags : uint
|
||||
{
|
||||
Volume = 0x0001, // Volume. Archive is a part of multivolume set.
|
||||
VolumeNumberField = 0x0002, // Volume number field is present. This flag is present in all volumes except first.
|
||||
Solid = 0x0004, // Solid archive.
|
||||
RecoveryRecordPresent = 0x0008, // Recovery record is present.
|
||||
Locked = 0x0010, // Locked archive.
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR entry encryption flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RarEncryptionFlags : uint
|
||||
{
|
||||
PasswordCheckDataPresent = 0x0001,
|
||||
UseTweakedChecksums = 0x0002,
|
||||
|
||||
/*
|
||||
If flag 0x0002 is present, RAR transforms the checksum preserving file or service data integrity, so it becomes dependent on
|
||||
encryption key. It makes guessing file contents based on checksum impossible. It affects both data CRC32 in file header and
|
||||
checksums in file hash record in extra area.
|
||||
*/
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR file flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RarFileFlags : uint
|
||||
{
|
||||
Directory = 0x0001, // Directory file system object (file header only)
|
||||
TimeInUnix = 0x0002, // Time field in Unix format is present
|
||||
CRCPresent = 0x0004, // CRC32 field is present
|
||||
UnpackedSizeUnknown = 0x0008, // Unpacked size is unknown
|
||||
|
||||
/*
|
||||
If flag 0x0008 is set, unpacked size field is still present, but must be ignored and extraction
|
||||
must be performed until reaching the end of compression stream. This flag can be set if actual
|
||||
file size is larger than reported by OS or if file size is unknown such as for all volumes except
|
||||
last when archiving from stdin to multivolume archive
|
||||
*/
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR header flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RarHeaderFlags : uint
|
||||
{
|
||||
ExtraAreaPresent = 0x0001, // Extra area is present in the end of header
|
||||
DataAreaPresent = 0x0002, // Data area is present in the end of header
|
||||
BlocksWithUnknownType = 0x0004, // Blocks with unknown type and this flag must be skipped when updating an archive
|
||||
DataAreaContinuingFromPrevious = 0x0008, // Data area is continuing from previous volume
|
||||
DataAreaContinuingToNext = 0x0010, // Data area is continuing in next volume
|
||||
BlockDependsOnPreceding = 0x0020, // Block depends on preceding file block
|
||||
PreserveChildBlock = 0x0040, // Preserve a child block if host block is modified
|
||||
}
|
||||
|
||||
[Flags]
|
||||
public enum RarUnixOwnerRecordFlags : uint
|
||||
{
|
||||
UserNameStringIsPresent = 0x0001,
|
||||
GroupNameStringIsPresent = 0x0002,
|
||||
NumericUserIdIsPresent = 0x0004,
|
||||
NumericGroupIdIsPresent = 0x0008,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RAR entry time flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RarTimeFlags : uint
|
||||
{
|
||||
TimeInUnixFormat = 0x0001,
|
||||
ModificationTimePresent = 0x0002,
|
||||
CreationTimePresent = 0x0004,
|
||||
LastAccessTimePresent = 0x0008,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Zipfile special status
|
||||
/// </summary>
|
||||
/// <remarks>https://github.com/gjefferyes/RomVault/blob/5a93500001f0d068f32cf77a048950717507f733/ROMVault2/SupportedFiles/ZipEnums.cs</remarks>
|
||||
[Flags]
|
||||
public enum ZipStatus
|
||||
{
|
||||
None = 0x0,
|
||||
TorrentZip = 0x1,
|
||||
ExtraData = 0x2
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DatFile related
|
||||
|
||||
/// <summary>
|
||||
/// Determines the DAT output format
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum DatFormat
|
||||
{
|
||||
// XML Formats
|
||||
Logiqx = 0x01,
|
||||
SoftwareList = Logiqx << 1,
|
||||
OfflineList = SoftwareList << 1,
|
||||
SabreDat = OfflineList << 1,
|
||||
|
||||
// Propietary Formats
|
||||
ClrMamePro = SabreDat << 1,
|
||||
RomCenter = ClrMamePro << 1,
|
||||
DOSCenter = RomCenter << 1,
|
||||
AttractMode = DOSCenter << 1,
|
||||
|
||||
// Standardized Text Formats
|
||||
MissFile = AttractMode << 1,
|
||||
CSV = MissFile << 1,
|
||||
TSV = CSV << 1,
|
||||
|
||||
// SFV-similar Formats
|
||||
RedumpSFV = TSV << 1,
|
||||
RedumpMD5 = RedumpSFV << 1,
|
||||
RedumpSHA1 = RedumpMD5 << 1,
|
||||
RedumpSHA256 = RedumpSHA1 << 1,
|
||||
RedumpSHA384 = RedumpSHA256 << 1,
|
||||
RedumpSHA512 = RedumpSHA384 << 1,
|
||||
|
||||
// Specialty combinations
|
||||
ALL = 0xFFFFF,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines which diffs should be created
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum DiffMode
|
||||
{
|
||||
// Standard diffs
|
||||
Dupes = 0x01,
|
||||
NoDupes = Dupes << 1,
|
||||
Individuals = NoDupes << 1,
|
||||
All = Dupes | NoDupes | Individuals,
|
||||
|
||||
// Cascaded diffs
|
||||
Cascade = Individuals << 1,
|
||||
ReverseCascade = Cascade << 1,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine which format to output Stats to
|
||||
/// </summary>
|
||||
/// [Flags]
|
||||
public enum StatDatFormat
|
||||
{
|
||||
None = 0x01,
|
||||
HTML = None << 1,
|
||||
CSV = HTML << 1,
|
||||
TSV = CSV << 1,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine what hashes to strip from the DAT
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum Hash
|
||||
{
|
||||
CRC = 0x0001,
|
||||
MD5 = CRC << 1,
|
||||
SHA1 = MD5 << 1,
|
||||
SHA256 = SHA1 << 1,
|
||||
SHA384 = SHA256 << 1,
|
||||
SHA512 = SHA384 << 1,
|
||||
xxHash = SHA512 << 1,
|
||||
|
||||
// Special combinations
|
||||
Standard = CRC | MD5 | SHA1,
|
||||
DeepHashes = SHA256 | SHA384 | SHA512 | xxHash,
|
||||
SecureHashes = MD5 | SHA1 | SHA256 | SHA384 | SHA512 | xxHash,
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DatItem related
|
||||
|
||||
/// <summary>
|
||||
/// Determines which type of duplicate a file is
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum DupeType
|
||||
{
|
||||
// Type of match
|
||||
Hash = 0x01,
|
||||
All = 0x02,
|
||||
|
||||
// Location of match
|
||||
Internal = 0x10,
|
||||
External = 0x20,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine the status of the item
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum ItemStatus
|
||||
{
|
||||
NULL = 0x00, // This is a fake flag that is used for filter only
|
||||
None = 0x01,
|
||||
Good = 0x02,
|
||||
BadDump = 0x04,
|
||||
Nodump = 0x08,
|
||||
Verified = 0x10,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine what type of machine it is
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum MachineType
|
||||
{
|
||||
NULL = 0x00, // This is a fake flag used for filter only
|
||||
None = 0x01,
|
||||
Bios = 0x02,
|
||||
Device = 0x04,
|
||||
Mechanical = 0x08,
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
77
SabreTools.Library/Data/Globals.cs
Normal file
77
SabreTools.Library/Data/Globals.cs
Normal file
@@ -0,0 +1,77 @@
|
||||
using System;
|
||||
using System.Reflection;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Data
|
||||
{
|
||||
public class Globals
|
||||
{
|
||||
#region Private implementations
|
||||
|
||||
private static Logger _logger = null;
|
||||
private static int _maxDegreeOfParallelism = 4;
|
||||
private static string _exeName = new Uri(Assembly.GetExecutingAssembly().GetName().CodeBase).LocalPath;
|
||||
private static string _exeDir = Path.GetDirectoryName(_exeName);
|
||||
private static string _args = string.Join(" ", Environment.GetCommandLineArgs());
|
||||
|
||||
#endregion
|
||||
|
||||
#region Public accessors
|
||||
|
||||
public static Logger Logger
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_logger == null)
|
||||
{
|
||||
_logger = new Logger();
|
||||
}
|
||||
return _logger;
|
||||
}
|
||||
set { _logger = value; }
|
||||
}
|
||||
public static int MaxDegreeOfParallelism
|
||||
{
|
||||
set { _maxDegreeOfParallelism = value; }
|
||||
}
|
||||
public static ParallelOptions ParallelOptions
|
||||
{
|
||||
get
|
||||
{
|
||||
return new ParallelOptions()
|
||||
{
|
||||
MaxDegreeOfParallelism = _maxDegreeOfParallelism
|
||||
};
|
||||
}
|
||||
}
|
||||
public static string ExeName
|
||||
{
|
||||
get
|
||||
{
|
||||
return _exeName;
|
||||
}
|
||||
}
|
||||
public static string ExeDir
|
||||
{
|
||||
get
|
||||
{
|
||||
return _exeDir;
|
||||
}
|
||||
}
|
||||
public static string CommandLineArgs
|
||||
{
|
||||
get
|
||||
{
|
||||
return _args;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
70
SabreTools.Library/Dats/Archive.cs
Normal file
70
SabreTools.Library/Dats/Archive.cs
Normal file
@@ -0,0 +1,70 @@
|
||||
using System;
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Archive : DatItem, ICloneable
|
||||
{
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Archive object
|
||||
/// </summary>
|
||||
public Archive()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.Archive;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new Archive()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
// If we don't have an archive, return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as an archive
|
||||
Archive newOther = (Archive)other;
|
||||
|
||||
// If the archive information matches
|
||||
return (_name == newOther.Name);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
95
SabreTools.Library/Dats/BiosSet.cs
Normal file
95
SabreTools.Library/Dats/BiosSet.cs
Normal file
@@ -0,0 +1,95 @@
|
||||
using System;
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class BiosSet : DatItem, ICloneable
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
private string _description;
|
||||
private bool? _default;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
public string Description
|
||||
{
|
||||
get { return _description; }
|
||||
set { _description = value; }
|
||||
}
|
||||
public bool? Default
|
||||
{
|
||||
get { return _default; }
|
||||
set { _default = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Sample object
|
||||
/// </summary>
|
||||
public BiosSet()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.BiosSet;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new BiosSet()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
|
||||
Description = this.Description,
|
||||
Default = this.Default,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
// If we don't have a biosset, return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as a biosset
|
||||
BiosSet newOther = (BiosSet)other;
|
||||
|
||||
// If the archive information matches
|
||||
return (_name == newOther.Name && _description == newOther.Description && _default == newOther.Default);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
596
SabreTools.Library/Dats/DatFile.cs
Normal file
596
SabreTools.Library/Dats/DatFile.cs
Normal file
@@ -0,0 +1,596 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
// Data common to most DAT types
|
||||
private string _fileName;
|
||||
private string _name;
|
||||
private string _description;
|
||||
private string _rootDir;
|
||||
private string _category;
|
||||
private string _version;
|
||||
private string _date;
|
||||
private string _author;
|
||||
private string _email;
|
||||
private string _homepage;
|
||||
private string _url;
|
||||
private string _comment;
|
||||
private string _header;
|
||||
private string _type; // Generally only used for SuperDAT
|
||||
private ForceMerging _forceMerging;
|
||||
private ForceNodump _forceNodump;
|
||||
private ForcePacking _forcePacking;
|
||||
private DatFormat _datFormat;
|
||||
private bool _excludeOf;
|
||||
private bool _mergeRoms;
|
||||
private Hash _stripHash;
|
||||
private bool _oneGameOneRegion;
|
||||
private List<string> _regions = new List<string>();
|
||||
private SortedDictionary<string, List<DatItem>> _files = new SortedDictionary<string, List<DatItem>>();
|
||||
private SortedBy _sortedBy;
|
||||
|
||||
// Data specific to the Miss DAT type
|
||||
private bool _useGame;
|
||||
private string _prefix;
|
||||
private string _postfix;
|
||||
private bool _quotes;
|
||||
private string _repExt;
|
||||
private string _addExt;
|
||||
private bool _remExt;
|
||||
private bool _gameName;
|
||||
private bool _romba;
|
||||
|
||||
// Statistical data related to the DAT
|
||||
private long _romCount;
|
||||
private long _diskCount;
|
||||
private long _totalSize;
|
||||
private long _crcCount;
|
||||
private long _md5Count;
|
||||
private long _sha1Count;
|
||||
private long _sha256Count;
|
||||
private long _sha384Count;
|
||||
private long _sha512Count;
|
||||
private long _baddumpCount;
|
||||
private long _nodumpCount;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
// Data common to most DAT types
|
||||
public string FileName
|
||||
{
|
||||
get { return _fileName; }
|
||||
set { _fileName = value; }
|
||||
}
|
||||
public string Name
|
||||
{
|
||||
get { return _name; }
|
||||
set { _name = value; }
|
||||
}
|
||||
public string Description
|
||||
{
|
||||
get { return _description; }
|
||||
set { _description = value; }
|
||||
}
|
||||
public string RootDir
|
||||
{
|
||||
get { return _rootDir; }
|
||||
set { _rootDir = value; }
|
||||
}
|
||||
public string Category
|
||||
{
|
||||
get { return _category; }
|
||||
set { _category = value; }
|
||||
}
|
||||
public string Version
|
||||
{
|
||||
get { return _version; }
|
||||
set { _version = value; }
|
||||
}
|
||||
public string Date
|
||||
{
|
||||
get { return _date; }
|
||||
set { _date = value; }
|
||||
}
|
||||
public string Author
|
||||
{
|
||||
get { return _author; }
|
||||
set { _author = value; }
|
||||
}
|
||||
public string Email
|
||||
{
|
||||
get { return _email; }
|
||||
set { _email = value; }
|
||||
}
|
||||
public string Homepage
|
||||
{
|
||||
get { return _homepage; }
|
||||
set { _homepage = value; }
|
||||
}
|
||||
public string Url
|
||||
{
|
||||
get { return _url; }
|
||||
set { _url = value; }
|
||||
}
|
||||
public string Comment
|
||||
{
|
||||
get { return _comment; }
|
||||
set { _comment = value; }
|
||||
}
|
||||
public string Header
|
||||
{
|
||||
get { return _header; }
|
||||
set { _header = value; }
|
||||
}
|
||||
public string Type // Generally only used for SuperDAT
|
||||
{
|
||||
get { return _type; }
|
||||
set { _type = value; }
|
||||
}
|
||||
public ForceMerging ForceMerging
|
||||
{
|
||||
get { return _forceMerging; }
|
||||
set { _forceMerging = value; }
|
||||
}
|
||||
public ForceNodump ForceNodump
|
||||
{
|
||||
get { return _forceNodump; }
|
||||
set { _forceNodump = value; }
|
||||
}
|
||||
public ForcePacking ForcePacking
|
||||
{
|
||||
get { return _forcePacking; }
|
||||
set { _forcePacking = value; }
|
||||
}
|
||||
public DatFormat DatFormat
|
||||
{
|
||||
get { return _datFormat; }
|
||||
set { _datFormat = value; }
|
||||
}
|
||||
public bool ExcludeOf
|
||||
{
|
||||
get { return _excludeOf; }
|
||||
set { _excludeOf = value; }
|
||||
}
|
||||
public bool MergeRoms
|
||||
{
|
||||
get { return _mergeRoms; }
|
||||
set { _mergeRoms = value; }
|
||||
}
|
||||
public Hash StripHash
|
||||
{
|
||||
get { return _stripHash; }
|
||||
set { _stripHash = value; }
|
||||
}
|
||||
public bool OneGameOneRegion
|
||||
{
|
||||
get { return _oneGameOneRegion; }
|
||||
set { _oneGameOneRegion = value; }
|
||||
}
|
||||
public List<string> Regions
|
||||
{
|
||||
get { return _regions; }
|
||||
set { _regions = value; }
|
||||
}
|
||||
public SortedBy SortedBy
|
||||
{
|
||||
get { return _sortedBy; }
|
||||
set { _sortedBy = value; }
|
||||
}
|
||||
|
||||
// Data specific to the Miss DAT type
|
||||
public bool UseGame
|
||||
{
|
||||
get { return _useGame; }
|
||||
set { _useGame = value; }
|
||||
}
|
||||
public string Prefix
|
||||
{
|
||||
get { return _prefix; }
|
||||
set { _prefix = value; }
|
||||
}
|
||||
public string Postfix
|
||||
{
|
||||
get { return _postfix; }
|
||||
set { _postfix = value; }
|
||||
}
|
||||
public bool Quotes
|
||||
{
|
||||
get { return _quotes; }
|
||||
set { _quotes = value; }
|
||||
}
|
||||
public string RepExt
|
||||
{
|
||||
get { return _repExt; }
|
||||
set { _repExt = value; }
|
||||
}
|
||||
public string AddExt
|
||||
{
|
||||
get { return _addExt; }
|
||||
set { _addExt = value; }
|
||||
}
|
||||
public bool RemExt
|
||||
{
|
||||
get { return _remExt; }
|
||||
set { _remExt = value; }
|
||||
}
|
||||
public bool GameName
|
||||
{
|
||||
get { return _gameName; }
|
||||
set { _gameName = value; }
|
||||
}
|
||||
public bool Romba
|
||||
{
|
||||
get { return _romba; }
|
||||
set { _romba = value; }
|
||||
}
|
||||
|
||||
// Statistical data related to the DAT
|
||||
public long RomCount
|
||||
{
|
||||
get { return _romCount; }
|
||||
set { _romCount = value; }
|
||||
}
|
||||
public long DiskCount
|
||||
{
|
||||
get { return _diskCount; }
|
||||
set { _diskCount = value; }
|
||||
}
|
||||
public long TotalSize
|
||||
{
|
||||
get { return _totalSize; }
|
||||
set { _totalSize = value; }
|
||||
}
|
||||
public long CRCCount
|
||||
{
|
||||
get { return _crcCount; }
|
||||
set { _crcCount = value; }
|
||||
}
|
||||
public long MD5Count
|
||||
{
|
||||
get { return _md5Count; }
|
||||
set { _md5Count = value; }
|
||||
}
|
||||
public long SHA1Count
|
||||
{
|
||||
get { return _sha1Count; }
|
||||
set { _sha1Count = value; }
|
||||
}
|
||||
public long SHA256Count
|
||||
{
|
||||
get { return _sha256Count; }
|
||||
set { _sha256Count = value; }
|
||||
}
|
||||
public long SHA384Count
|
||||
{
|
||||
get { return _sha384Count; }
|
||||
set { _sha384Count = value; }
|
||||
}
|
||||
public long SHA512Count
|
||||
{
|
||||
get { return _sha512Count; }
|
||||
set { _sha512Count = value; }
|
||||
}
|
||||
public long BaddumpCount
|
||||
{
|
||||
get { return _baddumpCount; }
|
||||
set { _baddumpCount = value; }
|
||||
}
|
||||
public long NodumpCount
|
||||
{
|
||||
get { return _nodumpCount; }
|
||||
set { _nodumpCount = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Methods
|
||||
|
||||
#region Accessors
|
||||
|
||||
/// <summary>
|
||||
/// Passthrough to access the file dictionary
|
||||
/// </summary>
|
||||
/// <param name="key">Key in the dictionary to reference</param>
|
||||
public List<DatItem> this[string key]
|
||||
{
|
||||
get
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is missing from the dictionary, add it
|
||||
if (!_files.ContainsKey(key))
|
||||
{
|
||||
_files.Add(key, new List<DatItem>());
|
||||
}
|
||||
|
||||
// Now return the value
|
||||
return _files[key];
|
||||
}
|
||||
}
|
||||
set
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is missing from the dictionary, add it
|
||||
if (!_files.ContainsKey(key))
|
||||
{
|
||||
_files.Add(key, new List<DatItem>());
|
||||
}
|
||||
|
||||
// Now set the value
|
||||
_files[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a new key to the file dictionary
|
||||
/// </summary>
|
||||
/// <param name="key">Key in the dictionary to add to</param>
|
||||
public void Add(string key)
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is missing from the dictionary, add it
|
||||
if (!_files.ContainsKey(key))
|
||||
{
|
||||
_files.Add(key, new List<DatItem>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a value to the file dictionary
|
||||
/// </summary>
|
||||
/// <param name="key">Key in the dictionary to add to</param>
|
||||
/// <param name="value">Value to add to the dictionary</param>
|
||||
public void Add(string key, DatItem value)
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is missing from the dictionary, add it
|
||||
if (!_files.ContainsKey(key))
|
||||
{
|
||||
_files.Add(key, new List<DatItem>());
|
||||
}
|
||||
|
||||
// Now add the value
|
||||
_files[key].Add(value);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a range of values to the file dictionary
|
||||
/// </summary>
|
||||
/// <param name="key">Key in the dictionary to add to</param>
|
||||
/// <param name="value">Value to add to the dictionary</param>
|
||||
public void AddRange(string key, List<DatItem> value)
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is missing from the dictionary, add it
|
||||
if (!_files.ContainsKey(key))
|
||||
{
|
||||
_files.Add(key, new List<DatItem>());
|
||||
}
|
||||
|
||||
// Now add the value
|
||||
_files[key].AddRange(value);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get if the file dictionary contains the key
|
||||
/// </summary>
|
||||
/// <param name="key">Key in the dictionary to check</param>
|
||||
/// <returns>True if the key exists, false otherwise</returns>
|
||||
public bool ContainsKey(string key)
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
// If the key is null, we return false since keys can't be null
|
||||
if (key == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
return _files.ContainsKey(key);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the number of DatItems in the file dictionary
|
||||
/// </summary>
|
||||
/// <returns>Number of DatItems in the file dictionary</returns>
|
||||
public long Count
|
||||
{
|
||||
get
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
int count = 0;
|
||||
foreach (string key in _files.Keys)
|
||||
{
|
||||
count += _files[key].Count;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delete the file dictionary
|
||||
/// </summary>
|
||||
public void Delete()
|
||||
{
|
||||
_files = null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the keys from the file dictionary
|
||||
/// </summary>
|
||||
/// <returns>IEnumerable of the keys</returns>
|
||||
public IEnumerable<string> Keys
|
||||
{
|
||||
get
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
return _files.Keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove a key from the file dictionary
|
||||
/// </summary>
|
||||
/// <param name="key"></param>
|
||||
public void Remove(string key)
|
||||
{
|
||||
// If the dictionary is null, create it
|
||||
if (_files == null)
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
lock (_files)
|
||||
{
|
||||
// If the key is in the dictionary, remove it
|
||||
if (_files.ContainsKey(key))
|
||||
{
|
||||
_files.Remove(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reset the file dictionary
|
||||
/// </summary>
|
||||
public void Reset()
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set a new file dictionary from an existing one
|
||||
/// </summary>
|
||||
/// <param name="newdict"></param>
|
||||
public void Set(SortedDictionary<string, List<DatItem>> newdict)
|
||||
{
|
||||
_files = newdict;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a new, empty DatFile object
|
||||
/// </summary>
|
||||
public DatFile()
|
||||
{
|
||||
_files = new SortedDictionary<string, List<DatItem>>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new DatFile from an existing one
|
||||
/// </summary>
|
||||
/// <param name="df"></param>
|
||||
public DatFile(DatFile datFile)
|
||||
{
|
||||
_fileName = datFile.FileName;
|
||||
_name = datFile.Name;
|
||||
_description = datFile.Description;
|
||||
_rootDir = datFile.RootDir;
|
||||
_category = datFile.Category;
|
||||
_version = datFile.Version;
|
||||
_date = datFile.Date;
|
||||
_author = datFile.Author;
|
||||
_email = datFile.Email;
|
||||
_homepage = datFile.Homepage;
|
||||
_url = datFile.Url;
|
||||
_comment = datFile.Comment;
|
||||
_header = datFile.Header;
|
||||
_type = datFile.Type;
|
||||
_forceMerging = datFile.ForceMerging;
|
||||
_forceNodump = datFile.ForceNodump;
|
||||
_forcePacking = datFile.ForcePacking;
|
||||
_excludeOf = datFile.ExcludeOf;
|
||||
_datFormat = datFile.DatFormat;
|
||||
_mergeRoms = datFile.MergeRoms;
|
||||
_stripHash = datFile.StripHash;
|
||||
_sortedBy = SortedBy.Default;
|
||||
_useGame = datFile.UseGame;
|
||||
_prefix = datFile.Prefix;
|
||||
_postfix = datFile.Postfix;
|
||||
_quotes = datFile.Quotes;
|
||||
_repExt = datFile.RepExt;
|
||||
_addExt = datFile.AddExt;
|
||||
_remExt = datFile.RemExt;
|
||||
_gameName = datFile.GameName;
|
||||
_romba = datFile.Romba;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Instance Methods
|
||||
}
|
||||
}
|
||||
723
SabreTools.Library/Dats/DatItem.cs
Normal file
723
SabreTools.Library/Dats/DatItem.cs
Normal file
@@ -0,0 +1,723 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public abstract class DatItem : IEquatable<DatItem>, IComparable<DatItem>
|
||||
{
|
||||
#region Protected instance variables
|
||||
|
||||
// Standard item information
|
||||
protected string _name;
|
||||
private string _merge;
|
||||
protected ItemType _itemType;
|
||||
protected DupeType _dupeType;
|
||||
|
||||
// Machine information
|
||||
protected Machine _machine;
|
||||
|
||||
// Software list information
|
||||
protected bool? _supported;
|
||||
protected string _publisher;
|
||||
protected List<Tuple<string, string>> _infos;
|
||||
protected string _partName;
|
||||
protected string _partInterface;
|
||||
protected List<Tuple<string, string>> _features;
|
||||
protected string _areaName;
|
||||
protected long? _areaSize;
|
||||
|
||||
// Source metadata information
|
||||
protected int _systemId;
|
||||
protected string _systemName;
|
||||
protected int _sourceId;
|
||||
protected string _sourceName;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
// Standard item information
|
||||
public string Name
|
||||
{
|
||||
get { return _name; }
|
||||
set { _name = value; }
|
||||
}
|
||||
public string MergeTag
|
||||
{
|
||||
get { return _merge; }
|
||||
set { _merge = value; }
|
||||
}
|
||||
public ItemType Type
|
||||
{
|
||||
get { return _itemType; }
|
||||
set { _itemType = value; }
|
||||
}
|
||||
public DupeType Dupe
|
||||
{
|
||||
get { return _dupeType; }
|
||||
set { _dupeType = value; }
|
||||
}
|
||||
|
||||
// Machine information
|
||||
public Machine Machine
|
||||
{
|
||||
get { return _machine; }
|
||||
set { _machine = value; }
|
||||
}
|
||||
|
||||
// Software list information
|
||||
public bool? Supported
|
||||
{
|
||||
get { return _supported; }
|
||||
set { _supported = value; }
|
||||
}
|
||||
public string Publisher
|
||||
{
|
||||
get { return _publisher; }
|
||||
set { _publisher = value; }
|
||||
}
|
||||
public List<Tuple<string, string>> Infos
|
||||
{
|
||||
get { return _infos; }
|
||||
set { _infos = value; }
|
||||
}
|
||||
public string PartName
|
||||
{
|
||||
get { return _partName; }
|
||||
set { _partName = value; }
|
||||
}
|
||||
public string PartInterface
|
||||
{
|
||||
get { return _partInterface; }
|
||||
set { _partInterface = value; }
|
||||
}
|
||||
public List<Tuple<string, string>> Features
|
||||
{
|
||||
get { return _features; }
|
||||
set { _features = value; }
|
||||
}
|
||||
public string AreaName
|
||||
{
|
||||
get { return _areaName; }
|
||||
set { _areaName = value; }
|
||||
}
|
||||
public long? AreaSize
|
||||
{
|
||||
get { return _areaSize; }
|
||||
set { _areaSize = value; }
|
||||
}
|
||||
|
||||
// Source metadata information
|
||||
public int SystemID
|
||||
{
|
||||
get { return _systemId; }
|
||||
set { _systemId = value; }
|
||||
}
|
||||
public string System
|
||||
{
|
||||
get { return _systemName; }
|
||||
set { _systemName = value; }
|
||||
}
|
||||
public int SourceID
|
||||
{
|
||||
get { return _sourceId; }
|
||||
set { _sourceId = value; }
|
||||
}
|
||||
public string Source
|
||||
{
|
||||
get { return _sourceName; }
|
||||
set { _sourceName = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Methods
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public int CompareTo(DatItem other)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
try
|
||||
{
|
||||
if (_name == other.Name)
|
||||
{
|
||||
ret = (this.Equals(other) ? 0 : 1);
|
||||
}
|
||||
ret = String.Compare(_name, other.Name);
|
||||
}
|
||||
catch
|
||||
{
|
||||
ret = 1;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine if an item is a duplicate using partial matching logic
|
||||
/// </summary>
|
||||
/// <param name="other">DatItem to use as a baseline</param>
|
||||
/// <returns>True if the roms are duplicates, false otherwise</returns>
|
||||
public abstract bool Equals(DatItem other);
|
||||
|
||||
/// <summary>
|
||||
/// Return the duplicate status of two items
|
||||
/// </summary>
|
||||
/// <param name="lastItem">DatItem to check against</param>
|
||||
/// <returns>The DupeType corresponding to the relationship between the two</returns>
|
||||
public DupeType GetDuplicateStatus(DatItem lastItem)
|
||||
{
|
||||
DupeType output = 0x00;
|
||||
|
||||
// If we don't have a duplicate at all, return none
|
||||
if (!this.Equals(lastItem))
|
||||
{
|
||||
return output;
|
||||
}
|
||||
|
||||
// If the duplicate is external already or should be, set it
|
||||
if ((lastItem.Dupe & DupeType.External) != 0 || lastItem.SystemID != this.SystemID || lastItem.SourceID != this.SourceID)
|
||||
{
|
||||
if (lastItem.Machine.Name == this.Machine.Name && lastItem.Name == this.Name)
|
||||
{
|
||||
output = DupeType.External | DupeType.All;
|
||||
}
|
||||
else
|
||||
{
|
||||
output = DupeType.External | DupeType.Hash;
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, it's considered an internal dupe
|
||||
else
|
||||
{
|
||||
if (lastItem.Machine.Name == this.Machine.Name && lastItem.Name == this.Name)
|
||||
{
|
||||
output = DupeType.Internal | DupeType.All;
|
||||
}
|
||||
else
|
||||
{
|
||||
output = DupeType.Internal | DupeType.Hash;
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sorting and Merging
|
||||
|
||||
/// <summary>
|
||||
/// Check if a DAT contains the given rom
|
||||
/// </summary>
|
||||
/// <param name="datdata">Dat to match against</param>
|
||||
/// <returns>True if it contains the rom, false otherwise</returns>
|
||||
public bool HasDuplicates(DatFile datdata)
|
||||
{
|
||||
// Check for an empty rom list first
|
||||
if (datdata.Count == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// We want to get the proper key for the DatItem
|
||||
string key = SortAndGetKey(datdata);
|
||||
|
||||
// If the key doesn't exist, return the empty list
|
||||
if (!datdata.ContainsKey(key))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Try to find duplicates
|
||||
List<DatItem> roms = datdata[key];
|
||||
|
||||
foreach (DatItem rom in roms)
|
||||
{
|
||||
if (this.Equals(rom))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// List all duplicates found in a DAT based on a rom
|
||||
/// </summary>
|
||||
/// <param name="datdata">Dat to match against</param>
|
||||
/// <param name="remove">True to remove matched roms from the input, false otherwise (default)</param>
|
||||
/// <returns>List of matched DatItem objects</returns>
|
||||
public List<DatItem> GetDuplicates(DatFile datdata, bool remove = false)
|
||||
{
|
||||
List<DatItem> output = new List<DatItem>();
|
||||
|
||||
// Check for an empty rom list first
|
||||
if (datdata.Count == 0)
|
||||
{
|
||||
return output;
|
||||
}
|
||||
|
||||
// We want to get the proper key for the DatItem
|
||||
string key = SortAndGetKey(datdata);
|
||||
|
||||
// If the key doesn't exist, return the empty list
|
||||
if (!datdata.ContainsKey(key))
|
||||
{
|
||||
return output;
|
||||
}
|
||||
|
||||
// Try to find duplicates
|
||||
List<DatItem> roms = datdata[key];
|
||||
List<DatItem> left = new List<DatItem>();
|
||||
|
||||
foreach (DatItem rom in roms)
|
||||
{
|
||||
if (this.Equals(rom))
|
||||
{
|
||||
output.Add(rom);
|
||||
}
|
||||
else
|
||||
{
|
||||
left.Add(rom);
|
||||
}
|
||||
}
|
||||
|
||||
// If we're in removal mode, replace the list with the new one
|
||||
if (remove)
|
||||
{
|
||||
datdata[key] = left;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sort the input DAT and get the key to be used by the item
|
||||
/// </summary>
|
||||
/// <param name="datdata">Dat to match against</param>
|
||||
/// <returns>Key to try to use</returns>
|
||||
private string SortAndGetKey(DatFile datdata)
|
||||
{
|
||||
string key = null;
|
||||
|
||||
// If all items are supposed to have a SHA-512, we sort by that
|
||||
if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA512Count
|
||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA512))
|
||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA512))))
|
||||
{
|
||||
if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).SHA512;
|
||||
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)this).SHA512;
|
||||
datdata.BucketBy(SortedBy.SHA512, false /* mergeroms */);
|
||||
}
|
||||
}
|
||||
|
||||
// If all items are supposed to have a SHA-384, we sort by that
|
||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA384Count
|
||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA384))
|
||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA384))))
|
||||
{
|
||||
if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).SHA384;
|
||||
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)this).SHA384;
|
||||
datdata.BucketBy(SortedBy.SHA384, false /* mergeroms */);
|
||||
}
|
||||
}
|
||||
|
||||
// If all items are supposed to have a SHA-256, we sort by that
|
||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA256Count
|
||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA256))
|
||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA256))))
|
||||
{
|
||||
if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).SHA256;
|
||||
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)this).SHA256;
|
||||
datdata.BucketBy(SortedBy.SHA256, false /* mergeroms */);
|
||||
}
|
||||
}
|
||||
|
||||
// If all items are supposed to have a SHA-1, we sort by that
|
||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.SHA1Count
|
||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).SHA1))
|
||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).SHA1))))
|
||||
{
|
||||
if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).SHA1;
|
||||
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)this).SHA1;
|
||||
datdata.BucketBy(SortedBy.SHA1, false /* mergeroms */);
|
||||
}
|
||||
}
|
||||
|
||||
// If all items are supposed to have an MD5, we sort by that
|
||||
else if (datdata.RomCount + datdata.DiskCount - datdata.NodumpCount == datdata.MD5Count
|
||||
&& ((_itemType == ItemType.Rom && !String.IsNullOrEmpty(((Rom)this).MD5))
|
||||
|| (_itemType == ItemType.Disk && !String.IsNullOrEmpty(((Disk)this).MD5))))
|
||||
{
|
||||
if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).MD5;
|
||||
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)this).MD5;
|
||||
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
||||
}
|
||||
}
|
||||
|
||||
// If we've gotten here and we have a Disk, sort by MD5
|
||||
else if (_itemType == ItemType.Disk)
|
||||
{
|
||||
key = ((Disk)this).MD5;
|
||||
datdata.BucketBy(SortedBy.MD5, false /* mergeroms */);
|
||||
}
|
||||
|
||||
// If we've gotten here and we have a Rom, sort by CRC
|
||||
else if (_itemType == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)this).CRC;
|
||||
datdata.BucketBy(SortedBy.CRC, false /* mergeroms */);
|
||||
}
|
||||
|
||||
// Otherwise, we use -1 as the key
|
||||
else
|
||||
{
|
||||
key = "-1";
|
||||
datdata.BucketBy(SortedBy.Size, false /* mergeroms */);
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Instance Methods
|
||||
|
||||
#region Static Methods
|
||||
|
||||
#region Sorting and Merging
|
||||
|
||||
/// <summary>
|
||||
/// Merge an arbitrary set of ROMs based on the supplied information
|
||||
/// </summary>
|
||||
/// <param name="infiles">List of File objects representing the roms to be merged</param>
|
||||
/// <returns>A List of RomData objects representing the merged roms</returns>
|
||||
public static List<DatItem> Merge(List<DatItem> infiles)
|
||||
{
|
||||
// Check for null or blank roms first
|
||||
if (infiles == null || infiles.Count == 0)
|
||||
{
|
||||
return new List<DatItem>();
|
||||
}
|
||||
|
||||
// Create output list
|
||||
List<DatItem> outfiles = new List<DatItem>();
|
||||
|
||||
// Then deduplicate them by checking to see if data matches previous saved roms
|
||||
foreach (DatItem file in infiles)
|
||||
{
|
||||
// If it's a nodump, add and skip
|
||||
if (file.Type == ItemType.Rom && ((Rom)file).ItemStatus == ItemStatus.Nodump)
|
||||
{
|
||||
outfiles.Add(file);
|
||||
continue;
|
||||
}
|
||||
else if (file.Type == ItemType.Disk && ((Disk)file).ItemStatus == ItemStatus.Nodump)
|
||||
{
|
||||
outfiles.Add(file);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If it's the first rom in the list, don't touch it
|
||||
if (outfiles.Count != 0)
|
||||
{
|
||||
// Check if the rom is a duplicate
|
||||
DupeType dupetype = 0x00;
|
||||
DatItem saveditem = new Rom();
|
||||
int pos = -1;
|
||||
for (int i = 0; i < outfiles.Count; i++)
|
||||
{
|
||||
DatItem lastrom = outfiles[i];
|
||||
|
||||
// Get the duplicate status
|
||||
dupetype = file.GetDuplicateStatus(lastrom);
|
||||
|
||||
// If it's a duplicate, skip adding it to the output but add any missing information
|
||||
if (dupetype != 0x00)
|
||||
{
|
||||
// If we don't have a rom or disk, then just skip adding
|
||||
if (file.Type != ItemType.Rom && file.Type != ItemType.Disk)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
saveditem = lastrom;
|
||||
pos = i;
|
||||
|
||||
// Roms have more infomration to save
|
||||
if (file.Type == ItemType.Rom)
|
||||
{
|
||||
((Rom)saveditem).Size = ((Rom)saveditem).Size;
|
||||
((Rom)saveditem).CRC = (String.IsNullOrEmpty(((Rom)saveditem).CRC) && !String.IsNullOrEmpty(((Rom)file).CRC)
|
||||
? ((Rom)file).CRC
|
||||
: ((Rom)saveditem).CRC);
|
||||
((Rom)saveditem).MD5 = (String.IsNullOrEmpty(((Rom)saveditem).MD5) && !String.IsNullOrEmpty(((Rom)file).MD5)
|
||||
? ((Rom)file).MD5
|
||||
: ((Rom)saveditem).MD5);
|
||||
((Rom)saveditem).SHA1 = (String.IsNullOrEmpty(((Rom)saveditem).SHA1) && !String.IsNullOrEmpty(((Rom)file).SHA1)
|
||||
? ((Rom)file).SHA1
|
||||
: ((Rom)saveditem).SHA1);
|
||||
}
|
||||
else
|
||||
{
|
||||
((Disk)saveditem).MD5 = (String.IsNullOrEmpty(((Disk)saveditem).MD5) && !String.IsNullOrEmpty(((Disk)file).MD5)
|
||||
? ((Disk)file).MD5
|
||||
: ((Disk)saveditem).MD5);
|
||||
((Disk)saveditem).SHA1 = (String.IsNullOrEmpty(((Disk)saveditem).SHA1) && !String.IsNullOrEmpty(((Disk)file).SHA1)
|
||||
? ((Disk)file).SHA1
|
||||
: ((Disk)saveditem).SHA1);
|
||||
}
|
||||
|
||||
saveditem.Dupe = dupetype;
|
||||
|
||||
// If the current system has a lower ID than the previous, set the system accordingly
|
||||
if (file.SystemID < saveditem.SystemID)
|
||||
{
|
||||
saveditem.SystemID = file.SystemID;
|
||||
saveditem.System = file.System;
|
||||
saveditem.Machine = (Machine)file.Machine.Clone();
|
||||
saveditem.Name = file.Name;
|
||||
}
|
||||
|
||||
// If the current source has a lower ID than the previous, set the source accordingly
|
||||
if (file.SourceID < saveditem.SourceID)
|
||||
{
|
||||
saveditem.SourceID = file.SourceID;
|
||||
saveditem.Source = file.Source;
|
||||
saveditem.Machine = (Machine)file.Machine.Clone();
|
||||
saveditem.Name = file.Name;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If no duplicate is found, add it to the list
|
||||
if (dupetype == 0x00)
|
||||
{
|
||||
outfiles.Add(file);
|
||||
}
|
||||
// Otherwise, if a new rom information is found, add that
|
||||
else
|
||||
{
|
||||
outfiles.RemoveAt(pos);
|
||||
outfiles.Insert(pos, saveditem);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
outfiles.Add(file);
|
||||
}
|
||||
}
|
||||
|
||||
// Then return the result
|
||||
return outfiles;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resolve name duplicates in an arbitrary set of ROMs based on the supplied information
|
||||
/// </summary>
|
||||
/// <param name="infiles">List of File objects representing the roms to be merged</param>
|
||||
/// <returns>A List of RomData objects representing the renamed roms</returns>
|
||||
public static List<DatItem> ResolveNames(List<DatItem> infiles)
|
||||
{
|
||||
// Create the output list
|
||||
List<DatItem> output = new List<DatItem>();
|
||||
|
||||
// First we want to make sure the list is in alphabetical order
|
||||
Sort(ref infiles, true);
|
||||
|
||||
// Now we want to loop through and check names
|
||||
DatItem lastItem = null;
|
||||
string lastrenamed = null;
|
||||
int lastid = 0;
|
||||
for (int i = 0; i < infiles.Count; i++)
|
||||
{
|
||||
DatItem datItem = infiles[i];
|
||||
|
||||
// If we have the first item, we automatically add it
|
||||
if (lastItem == null)
|
||||
{
|
||||
output.Add(datItem);
|
||||
lastItem = datItem;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the current item exactly matches the last item, then we don't add it
|
||||
if ((datItem.GetDuplicateStatus(lastItem) & DupeType.All) != 0)
|
||||
{
|
||||
Globals.Logger.Verbose("Exact duplicate found for '" + datItem.Name + "'");
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the current name matches the previous name, rename the current item
|
||||
else if (datItem.Name == lastItem.Name)
|
||||
{
|
||||
Globals.Logger.Verbose("Name duplicate found for '" + datItem.Name + "'");
|
||||
|
||||
if (datItem.Type == ItemType.Disk)
|
||||
{
|
||||
Disk disk = (Disk)datItem;
|
||||
disk.Name += "_" + (!String.IsNullOrEmpty(disk.MD5)
|
||||
? disk.MD5
|
||||
: !String.IsNullOrEmpty(disk.SHA1)
|
||||
? disk.SHA1
|
||||
: "1");
|
||||
datItem = disk;
|
||||
lastrenamed = lastrenamed ?? datItem.Name;
|
||||
}
|
||||
else if (datItem.Type == ItemType.Rom)
|
||||
{
|
||||
Rom rom = (Rom)datItem;
|
||||
rom.Name += "_" + (!String.IsNullOrEmpty(rom.CRC)
|
||||
? rom.CRC
|
||||
: !String.IsNullOrEmpty(rom.MD5)
|
||||
? rom.MD5
|
||||
: !String.IsNullOrEmpty(rom.SHA1)
|
||||
? rom.SHA1
|
||||
: "1");
|
||||
datItem = rom;
|
||||
lastrenamed = lastrenamed ?? datItem.Name;
|
||||
}
|
||||
|
||||
// If we have a conflict with the last renamed item, do the right thing
|
||||
if (datItem.Name == lastrenamed)
|
||||
{
|
||||
lastrenamed = datItem.Name;
|
||||
datItem.Name += (lastid == 0 ? "" : "_" + lastid);
|
||||
lastid++;
|
||||
}
|
||||
// If we have no conflict, then we want to reset the lastrenamed and id
|
||||
else
|
||||
{
|
||||
lastrenamed = null;
|
||||
lastid = 0;
|
||||
}
|
||||
|
||||
output.Add(datItem);
|
||||
}
|
||||
|
||||
// Otherwise, we say that we have a valid named file
|
||||
else
|
||||
{
|
||||
output.Add(datItem);
|
||||
lastItem = datItem;
|
||||
lastrenamed = null;
|
||||
lastid = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sort a list of File objects by SystemID, SourceID, Game, and Name (in order)
|
||||
/// </summary>
|
||||
/// <param name="roms">List of File objects representing the roms to be sorted</param>
|
||||
/// <param name="norename">True if files are not renamed, false otherwise</param>
|
||||
/// <returns>True if it sorted correctly, false otherwise</returns>
|
||||
public static bool Sort(ref List<DatItem> roms, bool norename)
|
||||
{
|
||||
roms.Sort(delegate (DatItem x, DatItem y)
|
||||
{
|
||||
try
|
||||
{
|
||||
NaturalComparer nc = new NaturalComparer();
|
||||
if (x.SystemID == y.SystemID)
|
||||
{
|
||||
if (x.SourceID == y.SourceID)
|
||||
{
|
||||
if (x.Machine != null && y.Machine != null && x.Machine.Name == y.Machine.Name)
|
||||
{
|
||||
if ((x.Type == ItemType.Rom || x.Type == ItemType.Disk) && (y.Type == ItemType.Rom || y.Type == ItemType.Disk))
|
||||
{
|
||||
if (Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(x.Name)) == Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(y.Name)))
|
||||
{
|
||||
return nc.Compare(Path.GetFileName(Style.RemovePathUnsafeCharacters(x.Name)), Path.GetFileName(Style.RemovePathUnsafeCharacters(y.Name)));
|
||||
}
|
||||
return nc.Compare(Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(x.Name)), Path.GetDirectoryName(Style.RemovePathUnsafeCharacters(y.Name)));
|
||||
}
|
||||
else if ((x.Type == ItemType.Rom || x.Type == ItemType.Disk) && (y.Type != ItemType.Rom && y.Type != ItemType.Disk))
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else if ((x.Type != ItemType.Rom && x.Type != ItemType.Disk) && (y.Type == ItemType.Rom || y.Type == ItemType.Disk))
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Path.GetDirectoryName(x.Name) == Path.GetDirectoryName(y.Name))
|
||||
{
|
||||
return nc.Compare(Path.GetFileName(x.Name), Path.GetFileName(y.Name));
|
||||
}
|
||||
return nc.Compare(Path.GetDirectoryName(x.Name), Path.GetDirectoryName(y.Name));
|
||||
}
|
||||
}
|
||||
return nc.Compare(x.Machine.Name, y.Machine.Name);
|
||||
}
|
||||
return (norename ? nc.Compare(x.Machine.Name, y.Machine.Name) : x.SourceID - y.SourceID);
|
||||
}
|
||||
return (norename ? nc.Compare(x.Machine.Name, y.Machine.Name) : x.SystemID - y.SystemID);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// Absorb the error
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Static Methods
|
||||
}
|
||||
}
|
||||
144
SabreTools.Library/Dats/Disk.cs
Normal file
144
SabreTools.Library/Dats/Disk.cs
Normal file
@@ -0,0 +1,144 @@
|
||||
using System;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Disk : DatItem, ICloneable
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
// Disk information
|
||||
protected string _md5;
|
||||
protected string _sha1;
|
||||
protected string _sha256;
|
||||
protected string _sha384;
|
||||
protected string _sha512;
|
||||
protected ItemStatus _itemStatus;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
// Disk information
|
||||
public string MD5
|
||||
{
|
||||
get { return _md5; }
|
||||
set { _md5 = value; }
|
||||
}
|
||||
public string SHA1
|
||||
{
|
||||
get { return _sha1; }
|
||||
set { _sha1 = value; }
|
||||
}
|
||||
public string SHA256
|
||||
{
|
||||
get { return _sha256; }
|
||||
set { _sha256 = value; }
|
||||
}
|
||||
public string SHA384
|
||||
{
|
||||
get { return _sha384; }
|
||||
set { _sha384 = value; }
|
||||
}
|
||||
public string SHA512
|
||||
{
|
||||
get { return _sha512; }
|
||||
set { _sha512 = value; }
|
||||
}
|
||||
public ItemStatus ItemStatus
|
||||
{
|
||||
get { return _itemStatus; }
|
||||
set { _itemStatus = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Disk object
|
||||
/// </summary>
|
||||
public Disk()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.Disk;
|
||||
_dupeType = 0x00;
|
||||
_itemStatus = ItemStatus.None;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new Disk()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
|
||||
MD5 = this.MD5,
|
||||
SHA1 = this.SHA1,
|
||||
SHA256 = this.SHA256,
|
||||
SHA384 = this.SHA384,
|
||||
SHA512 = this.SHA512,
|
||||
ItemStatus = this.ItemStatus,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
bool dupefound = false;
|
||||
|
||||
// If we don't have a rom, return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as a rom
|
||||
Disk newOther = (Disk)other;
|
||||
|
||||
// If either is a nodump, it's never a match
|
||||
if (_itemStatus == ItemStatus.Nodump || newOther.ItemStatus == ItemStatus.Nodump)
|
||||
{
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
if (((String.IsNullOrEmpty(_md5) || String.IsNullOrEmpty(newOther.MD5)) || this.MD5 == newOther.MD5)
|
||||
&& ((String.IsNullOrEmpty(this.SHA1) || String.IsNullOrEmpty(newOther.SHA1)) || this.SHA1 == newOther.SHA1)
|
||||
&& ((String.IsNullOrEmpty(this.SHA256) || String.IsNullOrEmpty(newOther.SHA256)) || this.SHA256 == newOther.SHA256)
|
||||
&& ((String.IsNullOrEmpty(this.SHA384) || String.IsNullOrEmpty(newOther.SHA384)) || this.SHA384 == newOther.SHA384)
|
||||
&& ((String.IsNullOrEmpty(this.SHA512) || String.IsNullOrEmpty(newOther.SHA512)) || this.SHA256 == newOther.SHA512))
|
||||
{
|
||||
dupefound = true;
|
||||
}
|
||||
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
715
SabreTools.Library/Dats/Filter.cs
Normal file
715
SabreTools.Library/Dats/Filter.cs
Normal file
@@ -0,0 +1,715 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Filter
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
#region Positive
|
||||
|
||||
private List<string> _gameNames;
|
||||
private List<string> _romNames;
|
||||
private List<string> _romTypes;
|
||||
private List<string> _crcs;
|
||||
private List<string> _md5s;
|
||||
private List<string> _sha1s;
|
||||
private List<string> _sha256s;
|
||||
private List<string> _sha384s;
|
||||
private List<string> _sha512s;
|
||||
private ItemStatus _itemStatuses;
|
||||
private MachineType _machineTypes;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Negative
|
||||
|
||||
private List<string> _notGameNames;
|
||||
private List<string> _notRomNames;
|
||||
private List<string> _notRomTypes;
|
||||
private List<string> _notCrcs;
|
||||
private List<string> _notMd5s;
|
||||
private List<string> _notSha1s;
|
||||
private List<string> _notSha256s;
|
||||
private List<string> _notSha384s;
|
||||
private List<string> _notSha512s;
|
||||
private ItemStatus _itemNotStatuses;
|
||||
private MachineType _machineNotTypes;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Neutral
|
||||
|
||||
private long _sizeGreaterThanOrEqual;
|
||||
private long _sizeLessThanOrEqual;
|
||||
private long _sizeEqualTo;
|
||||
private bool _includeOfInGame;
|
||||
private bool? _runnable;
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Private instance variables
|
||||
|
||||
#region Pubically facing variables
|
||||
|
||||
#region Positive
|
||||
|
||||
public List<string> GameNames
|
||||
{
|
||||
get { return _gameNames; }
|
||||
set { _gameNames = value; }
|
||||
}
|
||||
public List<string> RomNames
|
||||
{
|
||||
get { return _romNames; }
|
||||
set { _romNames = value; }
|
||||
}
|
||||
public List<string> RomTypes
|
||||
{
|
||||
get { return _romTypes; }
|
||||
set { _romTypes = value; }
|
||||
}
|
||||
public List<string> CRCs
|
||||
{
|
||||
get { return _crcs; }
|
||||
set { _crcs = value; }
|
||||
}
|
||||
public List<string> MD5s
|
||||
{
|
||||
get { return _md5s; }
|
||||
set { _md5s = value; }
|
||||
}
|
||||
public List<string> SHA1s
|
||||
{
|
||||
get { return _sha1s; }
|
||||
set { _sha1s = value; }
|
||||
}
|
||||
public List<string> SHA256s
|
||||
{
|
||||
get { return _sha256s; }
|
||||
set { _sha256s = value; }
|
||||
}
|
||||
public List<string> SHA384s
|
||||
{
|
||||
get { return _sha384s; }
|
||||
set { _sha384s = value; }
|
||||
}
|
||||
public List<string> SHA512s
|
||||
{
|
||||
get { return _sha512s; }
|
||||
set { _sha512s = value; }
|
||||
}
|
||||
public ItemStatus ItemStatuses
|
||||
{
|
||||
get { return _itemStatuses; }
|
||||
set { _itemStatuses = value; }
|
||||
}
|
||||
public MachineType MachineTypes
|
||||
{
|
||||
get { return _machineTypes; }
|
||||
set { _machineTypes = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Negative
|
||||
|
||||
public List<string> NotGameNames
|
||||
{
|
||||
get { return _notGameNames; }
|
||||
set { _notGameNames = value; }
|
||||
}
|
||||
public List<string> NotRomNames
|
||||
{
|
||||
get { return _notRomNames; }
|
||||
set { _notRomNames = value; }
|
||||
}
|
||||
public List<string> NotRomTypes
|
||||
{
|
||||
get { return _notRomTypes; }
|
||||
set { _notRomTypes = value; }
|
||||
}
|
||||
public List<string> NotCRCs
|
||||
{
|
||||
get { return _notCrcs; }
|
||||
set { _notCrcs = value; }
|
||||
}
|
||||
public List<string> NotMD5s
|
||||
{
|
||||
get { return _notMd5s; }
|
||||
set { _notMd5s = value; }
|
||||
}
|
||||
public List<string> NotSHA1s
|
||||
{
|
||||
get { return _notSha1s; }
|
||||
set { _notSha1s = value; }
|
||||
}
|
||||
public List<string> NotSHA256s
|
||||
{
|
||||
get { return _notSha256s; }
|
||||
set { _notSha256s = value; }
|
||||
}
|
||||
public List<string> NotSHA384s
|
||||
{
|
||||
get { return _notSha384s; }
|
||||
set { _notSha384s = value; }
|
||||
}
|
||||
public List<string> NotSHA512s
|
||||
{
|
||||
get { return _notSha512s; }
|
||||
set { _notSha512s = value; }
|
||||
}
|
||||
public ItemStatus NotItemStatuses
|
||||
{
|
||||
get { return _itemNotStatuses; }
|
||||
set { _itemNotStatuses = value; }
|
||||
}
|
||||
public MachineType NotMachineTypes
|
||||
{
|
||||
get { return _machineNotTypes; }
|
||||
set { _machineNotTypes = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Neutral
|
||||
|
||||
public long SizeGreaterThanOrEqual
|
||||
{
|
||||
get { return _sizeGreaterThanOrEqual; }
|
||||
set { _sizeGreaterThanOrEqual = value; }
|
||||
}
|
||||
public long SizeLessThanOrEqual
|
||||
{
|
||||
get { return _sizeLessThanOrEqual; }
|
||||
set { _sizeLessThanOrEqual = value; }
|
||||
}
|
||||
public long SizeEqualTo
|
||||
{
|
||||
get { return _sizeEqualTo; }
|
||||
set { _sizeEqualTo = value; }
|
||||
}
|
||||
public bool IncludeOfInGame
|
||||
{
|
||||
get { return _includeOfInGame; }
|
||||
set { _includeOfInGame = value; }
|
||||
}
|
||||
public bool? Runnable
|
||||
{
|
||||
get { return _runnable; }
|
||||
set { _runnable = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Pubically facing variables
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create an empty Filter object
|
||||
/// </summary>
|
||||
public Filter()
|
||||
{
|
||||
// Positive
|
||||
_gameNames = new List<string>();
|
||||
_romNames = new List<string>();
|
||||
_romTypes = new List<string>();
|
||||
_crcs = new List<string>();
|
||||
_md5s = new List<string>();
|
||||
_sha1s = new List<string>();
|
||||
_sha256s = new List<string>();
|
||||
_sha384s = new List<string>();
|
||||
_sha512s = new List<string>();
|
||||
_itemStatuses = ItemStatus.NULL;
|
||||
_machineTypes = MachineType.NULL;
|
||||
|
||||
// Negative
|
||||
_notGameNames = new List<string>();
|
||||
_notRomNames = new List<string>();
|
||||
_notRomTypes = new List<string>();
|
||||
_notCrcs = new List<string>();
|
||||
_notMd5s = new List<string>();
|
||||
_notSha1s = new List<string>();
|
||||
_notSha256s = new List<string>();
|
||||
_notSha384s = new List<string>();
|
||||
_notSha512s = new List<string>();
|
||||
_itemNotStatuses = ItemStatus.NULL;
|
||||
_machineNotTypes = MachineType.NULL;
|
||||
|
||||
// Neutral
|
||||
_sizeGreaterThanOrEqual = -1;
|
||||
_sizeLessThanOrEqual = -1;
|
||||
_sizeEqualTo = -1;
|
||||
_includeOfInGame = false;
|
||||
_runnable = null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance methods
|
||||
|
||||
/// <summary>
|
||||
/// Check to see if a DatItem passes the filter
|
||||
/// </summary>
|
||||
/// <param name="item">DatItem to check</param>
|
||||
/// <returns>True if the file passed the filter, false otherwise</returns>
|
||||
public bool ItemPasses(DatItem item)
|
||||
{
|
||||
// If the item is null, we automatically fail it
|
||||
if (item == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the item's machine is null, we automatically fail it
|
||||
if (item.Machine == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Filter on machine type
|
||||
if (_machineTypes != MachineType.NULL && (item.Machine.MachineType & _machineTypes) == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_machineNotTypes != MachineType.NULL && (item.Machine.MachineType & _machineNotTypes) != 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Filter on machine runability
|
||||
if (_runnable != null && item.Machine.Runnable != _runnable)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Take care of Rom and Disk specific differences
|
||||
if (item.Type == ItemType.Rom)
|
||||
{
|
||||
Rom rom = (Rom)item;
|
||||
|
||||
// Filter on status
|
||||
if (_itemStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemStatuses) == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_itemNotStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemNotStatuses) != 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Filter on rom size
|
||||
if (_sizeEqualTo != -1 && rom.Size != _sizeEqualTo)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_sizeGreaterThanOrEqual != -1 && rom.Size < _sizeGreaterThanOrEqual)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_sizeLessThanOrEqual != -1 && rom.Size > _sizeLessThanOrEqual)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on CRC
|
||||
if (_crcs.Count > 0)
|
||||
{
|
||||
// If the CRC isn't in the list, return false
|
||||
if (!FindValueInList(_crcs, rom.CRC))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notCrcs.Count > 0)
|
||||
{
|
||||
// If the CRC is in the list, return false
|
||||
if (FindValueInList(_notCrcs, rom.CRC))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on MD5
|
||||
if (_md5s.Count > 0)
|
||||
{
|
||||
// If the MD5 isn't in the list, return false
|
||||
if (!FindValueInList(_md5s, rom.MD5))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notMd5s.Count > 0)
|
||||
{
|
||||
// If the MD5 is in the list, return false
|
||||
if (FindValueInList(_notMd5s, rom.MD5))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA1
|
||||
if (_sha1s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha1s, rom.SHA1))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha1s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha1s, rom.SHA1))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA256
|
||||
if (_sha256s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha256s, rom.SHA256))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha256s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha256s, rom.SHA256))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA384
|
||||
if (_sha384s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha384s, rom.SHA384))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha384s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha384s, rom.SHA384))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA512
|
||||
if (_sha512s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha512s, rom.SHA512))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha512s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha512s, rom.SHA512))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (item.Type == ItemType.Disk)
|
||||
{
|
||||
Disk rom = (Disk)item;
|
||||
|
||||
// Filter on status
|
||||
if (_itemStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemStatuses) == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_itemNotStatuses != ItemStatus.NULL && (rom.ItemStatus & _itemNotStatuses) != 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Filter on MD5
|
||||
if (_md5s.Count > 0)
|
||||
{
|
||||
// If the MD5 isn't in the list, return false
|
||||
if (!FindValueInList(_md5s, rom.MD5))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notMd5s.Count > 0)
|
||||
{
|
||||
// If the MD5 is in the list, return false
|
||||
if (FindValueInList(_notMd5s, rom.MD5))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA1
|
||||
if (_sha1s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha1s, rom.SHA1))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha1s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha1s, rom.SHA1))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA256
|
||||
if (_sha256s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha256s, rom.SHA256))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha256s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha256s, rom.SHA256))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA384
|
||||
if (_sha384s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha384s, rom.SHA384))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha384s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha384s, rom.SHA384))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on SHA512
|
||||
if (_sha512s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 isn't in the list, return false
|
||||
if (!FindValueInList(_sha512s, rom.SHA512))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notSha512s.Count > 0)
|
||||
{
|
||||
// If the SHA-1 is in the list, return false
|
||||
if (FindValueInList(_notSha512s, rom.SHA512))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on game name
|
||||
if (_gameNames.Count > 0)
|
||||
{
|
||||
bool found = FindValueInList(_gameNames, item.Machine.Name);
|
||||
|
||||
// If we are checking CloneOf and RomOf, add them in as well
|
||||
if (_includeOfInGame)
|
||||
{
|
||||
found |= FindValueInList(_gameNames, item.Machine.CloneOf);
|
||||
found |= FindValueInList(_gameNames, item.Machine.RomOf);
|
||||
}
|
||||
|
||||
// If the game name was not found in the list, return false
|
||||
if (!found)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notGameNames.Count > 0)
|
||||
{
|
||||
bool found = FindValueInList(_gameNames, item.Machine.Name);
|
||||
|
||||
// If we are checking CloneOf and RomOf, add them in as well
|
||||
if (_includeOfInGame)
|
||||
{
|
||||
found |= FindValueInList(_gameNames, item.Machine.CloneOf);
|
||||
found |= FindValueInList(_gameNames, item.Machine.RomOf);
|
||||
}
|
||||
|
||||
// If the game name was found in the list, return false
|
||||
if (found)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on rom name
|
||||
if (_romNames.Count > 0)
|
||||
{
|
||||
// If the rom name was not found in the list, return false
|
||||
if (!FindValueInList(_romNames, item.Name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notRomNames.Count > 0)
|
||||
{
|
||||
// If the rom name was found in the list, return false
|
||||
if (FindValueInList(_notRomNames, item.Name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter on rom type
|
||||
if (_romTypes.Count == 0 && _notRomTypes.Count == 0 && item.Type != ItemType.Rom && item.Type != ItemType.Disk)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_romTypes.Count > 0)
|
||||
{
|
||||
// If the rom type was not found in the list, return false
|
||||
if (!FindValueInList(_romTypes, item.Type.ToString()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (_notRomTypes.Count > 0)
|
||||
{
|
||||
// If the rom type was found in the list, return false
|
||||
if (FindValueInList(_notRomTypes, item.Type.ToString()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generic code to check if a specific value is in the list given
|
||||
/// </summary>
|
||||
/// <param name="haystack">List to search for the value in</param>
|
||||
/// <param name="needle">Value to search the list for</param>
|
||||
/// <returns>True if the value could be found, false otherwise</returns>
|
||||
private bool FindValueInList(List<string> haystack, string needle)
|
||||
{
|
||||
bool found = false;
|
||||
foreach (string straw in haystack)
|
||||
{
|
||||
if (!String.IsNullOrEmpty(straw))
|
||||
{
|
||||
string regexStraw = straw;
|
||||
|
||||
// If the straw has no special characters at all, treat it as an exact match
|
||||
if (regexStraw == Regex.Escape(regexStraw))
|
||||
{
|
||||
regexStraw = "^" + regexStraw + "$";
|
||||
}
|
||||
|
||||
// Check if a match is found with the regex
|
||||
found |= Regex.IsMatch(needle, regexStraw, RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
|
||||
}
|
||||
}
|
||||
|
||||
return found;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Static methods
|
||||
|
||||
/// <summary>
|
||||
/// Get the machine type from a string
|
||||
/// </summary>
|
||||
/// <param name="gametype">Machine type as a string</param>
|
||||
/// <returns>A machine type based on the input</returns>
|
||||
public static MachineType GetMachineTypeFromString(string gametype)
|
||||
{
|
||||
MachineType machineType = MachineType.NULL;
|
||||
switch (gametype.ToLowerInvariant())
|
||||
{
|
||||
case "none":
|
||||
machineType |= MachineType.None;
|
||||
break;
|
||||
case "bios":
|
||||
machineType |= MachineType.Bios;
|
||||
break;
|
||||
case "dev":
|
||||
case "device":
|
||||
machineType |= MachineType.Device;
|
||||
break;
|
||||
case "mech":
|
||||
case "mechanical":
|
||||
machineType |= MachineType.Mechanical;
|
||||
break;
|
||||
default:
|
||||
Globals.Logger.Warning(gametype + " is not a valid type");
|
||||
break;
|
||||
}
|
||||
|
||||
return machineType;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the item status from a string
|
||||
/// </summary>
|
||||
/// <param name="status">Item status as a string</param>
|
||||
/// <returns>An item status based on the input</returns>
|
||||
public static ItemStatus GetStatusFromString(string status)
|
||||
{
|
||||
ItemStatus itemStatus = ItemStatus.NULL;
|
||||
switch (status.ToLowerInvariant())
|
||||
{
|
||||
case "none":
|
||||
itemStatus |= ItemStatus.None;
|
||||
break;
|
||||
case "good":
|
||||
itemStatus |= ItemStatus.Good;
|
||||
break;
|
||||
case "baddump":
|
||||
itemStatus |= ItemStatus.BadDump;
|
||||
break;
|
||||
case "nodump":
|
||||
itemStatus |= ItemStatus.Nodump;
|
||||
break;
|
||||
case "verified":
|
||||
itemStatus |= ItemStatus.Verified;
|
||||
break;
|
||||
default:
|
||||
Globals.Logger.Warning(status + " is not a valid status");
|
||||
break;
|
||||
}
|
||||
|
||||
return itemStatus;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
157
SabreTools.Library/Dats/Machine.cs
Normal file
157
SabreTools.Library/Dats/Machine.cs
Normal file
@@ -0,0 +1,157 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Machine : ICloneable
|
||||
{
|
||||
#region Protected instance variables
|
||||
|
||||
// Machine information
|
||||
protected string _name;
|
||||
protected string _comment;
|
||||
protected string _description;
|
||||
protected string _year;
|
||||
protected string _manufacturer;
|
||||
protected string _romOf;
|
||||
protected string _cloneOf;
|
||||
protected string _sampleOf;
|
||||
protected string _sourceFile;
|
||||
protected bool? _runnable;
|
||||
protected string _board;
|
||||
protected string _rebuildTo;
|
||||
protected List<string> _devices;
|
||||
protected MachineType _machineType;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
// Machine information
|
||||
public string Name
|
||||
{
|
||||
get { return _name; }
|
||||
set { _name = value; }
|
||||
}
|
||||
public string Comment
|
||||
{
|
||||
get { return _comment; }
|
||||
set { _comment = value; }
|
||||
}
|
||||
public string Description
|
||||
{
|
||||
get { return _description; }
|
||||
set { _description = value; }
|
||||
}
|
||||
public string Year
|
||||
{
|
||||
get { return _year; }
|
||||
set { _year = value; }
|
||||
}
|
||||
public string Manufacturer
|
||||
{
|
||||
get { return _manufacturer; }
|
||||
set { _manufacturer = value; }
|
||||
}
|
||||
public string RomOf
|
||||
{
|
||||
get { return _romOf; }
|
||||
set { _romOf = value; }
|
||||
}
|
||||
public string CloneOf
|
||||
{
|
||||
get { return _cloneOf; }
|
||||
set { _cloneOf = value; }
|
||||
}
|
||||
public string SampleOf
|
||||
{
|
||||
get { return _sampleOf; }
|
||||
set { _sampleOf = value; }
|
||||
}
|
||||
public string SourceFile
|
||||
{
|
||||
get { return _sourceFile; }
|
||||
set { _sourceFile = value; }
|
||||
}
|
||||
public bool? Runnable
|
||||
{
|
||||
get { return _runnable; }
|
||||
set { _runnable = value; }
|
||||
}
|
||||
public string Board
|
||||
{
|
||||
get { return _board; }
|
||||
set { _board = value; }
|
||||
}
|
||||
public string RebuildTo
|
||||
{
|
||||
get { return _rebuildTo; }
|
||||
set { _rebuildTo = value; }
|
||||
}
|
||||
public List<string> Devices
|
||||
{
|
||||
get { return _devices; }
|
||||
set { _devices = value; }
|
||||
}
|
||||
public MachineType MachineType
|
||||
{
|
||||
get { return _machineType; }
|
||||
set { _machineType = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Machine object
|
||||
/// </summary>
|
||||
public Machine()
|
||||
{
|
||||
_name = "";
|
||||
_description = "";
|
||||
_runnable = null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new Machine object with the included information
|
||||
/// </summary>
|
||||
/// <param name="name">Name of the machine</param>
|
||||
/// <param name="description">Description of the machine</param>
|
||||
public Machine(string name, string description)
|
||||
{
|
||||
_name = name;
|
||||
_description = description;
|
||||
_runnable = null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloneing
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new Machine()
|
||||
{
|
||||
Name = _name,
|
||||
Comment = _comment,
|
||||
Description = _description,
|
||||
Year = _year,
|
||||
Manufacturer = _manufacturer,
|
||||
RomOf = _romOf,
|
||||
CloneOf = _cloneOf,
|
||||
SampleOf = _sampleOf,
|
||||
SourceFile = _sourceFile,
|
||||
Runnable = _runnable,
|
||||
Board = _board,
|
||||
RebuildTo = _rebuildTo,
|
||||
Devices = _devices,
|
||||
MachineType = _machineType,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
539
SabreTools.Library/Dats/Partials/DatFile.ConvertUpdate.cs
Normal file
539
SabreTools.Library/Dats/Partials/DatFile.ConvertUpdate.cs
Normal file
@@ -0,0 +1,539 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Converting and Updating
|
||||
|
||||
/// <summary>
|
||||
/// Determine if input files should be merged, diffed, or processed invidually
|
||||
/// </summary>
|
||||
/// <param name="inputPaths">Names of the input files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void DetermineUpdateType(List<string> inputPaths, string outDir, bool merge, DiffMode diff, bool inplace, bool skip,
|
||||
bool bare, bool clean, bool remUnicode, bool descAsName, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
// If we're in merging or diffing mode, use the full list of inputs
|
||||
if (merge || diff != 0)
|
||||
{
|
||||
// Make sure there are no folders in inputs
|
||||
List<string> newInputFileNames = FileTools.GetOnlyFilesFromInputs(inputPaths, appendparent: true);
|
||||
|
||||
// If we're in inverse cascade, reverse the list
|
||||
if ((diff & DiffMode.ReverseCascade) != 0)
|
||||
{
|
||||
newInputFileNames.Reverse();
|
||||
}
|
||||
|
||||
// Create a dictionary of all ROMs from the input DATs
|
||||
List<DatFile> datHeaders = PopulateUserData(newInputFileNames, inplace, clean,
|
||||
remUnicode, descAsName, outDir, filter, splitType, trim, single, root);
|
||||
|
||||
// Modify the Dictionary if necessary and output the results
|
||||
if (diff != 0 && diff < DiffMode.Cascade)
|
||||
{
|
||||
DiffNoCascade(diff, outDir, newInputFileNames);
|
||||
}
|
||||
// If we're in cascade and diff, output only cascaded diffs
|
||||
else if (diff != 0 && diff >= DiffMode.Cascade)
|
||||
{
|
||||
DiffCascade(outDir, inplace, newInputFileNames, datHeaders, skip);
|
||||
}
|
||||
// Output all entries with user-defined merge
|
||||
else
|
||||
{
|
||||
MergeNoDiff(outDir, newInputFileNames, datHeaders);
|
||||
}
|
||||
}
|
||||
// Otherwise, loop through all of the inputs individually
|
||||
else
|
||||
{
|
||||
Update(inputPaths, outDir, inplace, clean, remUnicode, descAsName, filter, splitType, trim, single, root);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Populate the user DatData object from the input files
|
||||
/// </summary>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
/// <returns>List of DatData objects representing headers</returns>
|
||||
private List<DatFile> PopulateUserData(List<string> inputs, bool inplace, bool clean, bool remUnicode, bool descAsName,
|
||||
string outDir, Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
DatFile[] datHeaders = new DatFile[inputs.Count];
|
||||
DateTime start = DateTime.Now;
|
||||
Globals.Logger.User("Processing individual DATs");
|
||||
|
||||
// Parse all of the DATs into their own DatFiles in the array
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
||||
{
|
||||
string input = inputs[i];
|
||||
Globals.Logger.User("Adding DAT: " + input.Split('¬')[0]);
|
||||
datHeaders[i] = new DatFile
|
||||
{
|
||||
DatFormat = (DatFormat != 0 ? DatFormat : 0),
|
||||
MergeRoms = MergeRoms,
|
||||
};
|
||||
|
||||
datHeaders[i].Parse(input.Split('¬')[0], i, 0, splitType, true, clean, descAsName);
|
||||
});
|
||||
|
||||
Globals.Logger.User("Processing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
Globals.Logger.User("Populating internal DAT");
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, i =>
|
||||
{
|
||||
// Get the list of keys from the DAT
|
||||
List<string> keys = datHeaders[i].Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Add everything from the key to the internal DAT
|
||||
AddRange(key, datHeaders[i][key]);
|
||||
|
||||
// Now remove the key from the source DAT
|
||||
lock (datHeaders)
|
||||
{
|
||||
datHeaders[i].Remove(key);
|
||||
}
|
||||
});
|
||||
|
||||
// Now remove the file dictionary from the souce DAT to save memory
|
||||
datHeaders[i].Delete();
|
||||
});
|
||||
|
||||
// Now that we have a merged DAT, filter it
|
||||
Filter(filter, single, trim, root);
|
||||
|
||||
Globals.Logger.User("Processing and populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
return datHeaders.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output non-cascading diffs
|
||||
/// </summary>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
public void DiffNoCascade(DiffMode diff, string outDir, List<string> inputs)
|
||||
{
|
||||
DateTime start = DateTime.Now;
|
||||
Globals.Logger.User("Initializing all output DATs");
|
||||
|
||||
// Default vars for use
|
||||
string post = "";
|
||||
DatFile outerDiffData = new DatFile();
|
||||
DatFile dupeData = new DatFile();
|
||||
|
||||
// Fill in any information not in the base DAT
|
||||
if (String.IsNullOrEmpty(_fileName))
|
||||
{
|
||||
_fileName = "All DATs";
|
||||
}
|
||||
if (String.IsNullOrEmpty(_name))
|
||||
{
|
||||
_name = "All DATs";
|
||||
}
|
||||
if (String.IsNullOrEmpty(_description))
|
||||
{
|
||||
_description = "All DATs";
|
||||
}
|
||||
|
||||
// Don't have External dupes
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
post = " (No Duplicates)";
|
||||
outerDiffData = new DatFile(this);
|
||||
outerDiffData.FileName += post;
|
||||
outerDiffData.Name += post;
|
||||
outerDiffData.Description += post;
|
||||
outerDiffData.Reset();
|
||||
}
|
||||
|
||||
// Have External dupes
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
post = " (Duplicates)";
|
||||
dupeData = new DatFile(this);
|
||||
dupeData.FileName += post;
|
||||
dupeData.Name += post;
|
||||
dupeData.Description += post;
|
||||
dupeData.Reset();
|
||||
}
|
||||
|
||||
// Create a list of DatData objects representing individual output files
|
||||
List<DatFile> outDats = new List<DatFile>();
|
||||
|
||||
// Loop through each of the inputs and get or create a new DatData object
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||
DatFile diffData = new DatFile(this);
|
||||
diffData.FileName += innerpost;
|
||||
diffData.Name += innerpost;
|
||||
diffData.Description += innerpost;
|
||||
diffData.Reset();
|
||||
outDatsArray[j] = diffData;
|
||||
});
|
||||
|
||||
outDats = outDatsArray.ToList();
|
||||
}
|
||||
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
// Now, loop through the dictionary and populate the correct DATs
|
||||
start = DateTime.Now;
|
||||
Globals.Logger.User("Populating all output DATs");
|
||||
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = DatItem.Merge(this[key]);
|
||||
|
||||
// If the rom list is empty or null, just skip it
|
||||
if (items == null || items.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Loop through and add the items correctly
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
// No duplicates
|
||||
if ((diff & DiffMode.NoDupes) != 0 || (diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
if ((item.Dupe & DupeType.Internal) != 0)
|
||||
{
|
||||
// Individual DATs that are output
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
outDats[item.SystemID].Add(key, item);
|
||||
}
|
||||
|
||||
// Merged no-duplicates DAT
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
DatItem newrom = item;
|
||||
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||
|
||||
outerDiffData.Add(key, newrom);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Duplicates only
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
if ((item.Dupe & DupeType.External) != 0)
|
||||
{
|
||||
DatItem newrom = item;
|
||||
newrom.Machine.Name += " (" + Path.GetFileNameWithoutExtension(inputs[newrom.SystemID].Split('¬')[0]) + ")";
|
||||
|
||||
dupeData.Add(key, newrom);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
// Finally, loop through and output each of the DATs
|
||||
start = DateTime.Now;
|
||||
Globals.Logger.User("Outputting all created DATs");
|
||||
|
||||
// Output the difflist (a-b)+(b-a) diff
|
||||
if ((diff & DiffMode.NoDupes) != 0)
|
||||
{
|
||||
outerDiffData.WriteToFile(outDir);
|
||||
}
|
||||
|
||||
// Output the (ab) diff
|
||||
if ((diff & DiffMode.Dupes) != 0)
|
||||
{
|
||||
dupeData.WriteToFile(outDir);
|
||||
}
|
||||
|
||||
// Output the individual (a-b) DATs
|
||||
if ((diff & DiffMode.Individuals) != 0)
|
||||
{
|
||||
Parallel.For(0, inputs.Count, j =>
|
||||
{
|
||||
// If we have an output directory set, replace the path
|
||||
string[] split = inputs[j].Split('¬');
|
||||
string path = outDir + (split[0] == split[1]
|
||||
? Path.GetFileName(split[0])
|
||||
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length)));
|
||||
|
||||
// Try to output the file
|
||||
outDats[j].WriteToFile(path);
|
||||
});
|
||||
}
|
||||
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output cascading diffs
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inplace">True if cascaded diffs are outputted in-place, false otherwise</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
public void DiffCascade(string outDir, bool inplace, List<string> inputs, List<DatFile> datHeaders, bool skip)
|
||||
{
|
||||
string post = "";
|
||||
|
||||
// Create a list of DatData objects representing output files
|
||||
List<DatFile> outDats = new List<DatFile>();
|
||||
|
||||
// Loop through each of the inputs and get or create a new DatData object
|
||||
DateTime start = DateTime.Now;
|
||||
Globals.Logger.User("Initializing all output DATs");
|
||||
|
||||
DatFile[] outDatsArray = new DatFile[inputs.Count];
|
||||
|
||||
Parallel.For(0, inputs.Count, Globals.ParallelOptions, j =>
|
||||
{
|
||||
string innerpost = " (" + Path.GetFileNameWithoutExtension(inputs[j].Split('¬')[0]) + " Only)";
|
||||
DatFile diffData;
|
||||
|
||||
// If we're in inplace mode, take the appropriate DatData object already stored
|
||||
if (inplace || !String.IsNullOrEmpty(outDir))
|
||||
{
|
||||
diffData = datHeaders[j];
|
||||
}
|
||||
else
|
||||
{
|
||||
diffData = new DatFile(this);
|
||||
diffData.FileName += post;
|
||||
diffData.Name += post;
|
||||
diffData.Description += post;
|
||||
}
|
||||
diffData.Reset();
|
||||
|
||||
outDatsArray[j] = diffData;
|
||||
});
|
||||
|
||||
outDats = outDatsArray.ToList();
|
||||
Globals.Logger.User("Initializing complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
// Now, loop through the dictionary and populate the correct DATs
|
||||
start = DateTime.Now;
|
||||
Globals.Logger.User("Populating all output DATs");
|
||||
List<string> keys = Keys.ToList();
|
||||
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = DatItem.Merge(this[key]);
|
||||
|
||||
// If the rom list is empty or null, just skip it
|
||||
if (items == null || items.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
// There's odd cases where there are items with System ID < 0. Skip them for now
|
||||
if (item.SystemID < 0)
|
||||
{
|
||||
Globals.Logger.Warning("Item found with a <0 SystemID: " + item.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
outDats[item.SystemID].Add(key, item);
|
||||
});
|
||||
});
|
||||
|
||||
Globals.Logger.User("Populating complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
|
||||
// Finally, loop through and output each of the DATs
|
||||
start = DateTime.Now;
|
||||
Globals.Logger.User("Outputting all created DATs");
|
||||
|
||||
Parallel.For((skip ? 1 : 0), inputs.Count, j =>
|
||||
{
|
||||
// If we have an output directory set, replace the path
|
||||
string path = "";
|
||||
if (inplace)
|
||||
{
|
||||
path = Path.GetDirectoryName(inputs[j].Split('¬')[0]);
|
||||
}
|
||||
else if (!String.IsNullOrEmpty(outDir))
|
||||
{
|
||||
string[] split = inputs[j].Split('¬');
|
||||
path = outDir + (split[0] == split[1]
|
||||
? Path.GetFileName(split[0])
|
||||
: (Path.GetDirectoryName(split[0]).Remove(0, split[1].Length))); ;
|
||||
}
|
||||
|
||||
// Try to output the file
|
||||
outDats[j].WriteToFile(path);
|
||||
});
|
||||
|
||||
Globals.Logger.User("Outputting complete in " + DateTime.Now.Subtract(start).ToString(@"hh\:mm\:ss\.fffff"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output user defined merge
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="datHeaders">Dat headers used optionally</param>
|
||||
public void MergeNoDiff(string outDir, List<string> inputs, List<DatFile> datHeaders)
|
||||
{
|
||||
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key].ToList();
|
||||
List<DatItem> newItems = new List<DatItem>();
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
DatItem newItem = item;
|
||||
string filename = inputs[newItem.SystemID].Split('¬')[0];
|
||||
string rootpath = inputs[newItem.SystemID].Split('¬')[1];
|
||||
|
||||
rootpath += (rootpath == "" ? "" : Path.DirectorySeparatorChar.ToString());
|
||||
filename = filename.Remove(0, rootpath.Length);
|
||||
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
||||
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
||||
+ newItem.Machine.Name;
|
||||
|
||||
lock (newItems)
|
||||
{
|
||||
newItems.Add(newItem);
|
||||
}
|
||||
});
|
||||
|
||||
Remove(key);
|
||||
AddRange(key, newItems);
|
||||
});
|
||||
}
|
||||
|
||||
// Try to output the file
|
||||
WriteToFile(outDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert, update, and filter a DAT file or set of files using a base
|
||||
/// </summary>
|
||||
/// <param name="inputFileNames">Names of the input files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="merge">True if input files should be merged into a single file, false otherwise</param>
|
||||
/// <param name="diff">Non-zero flag for diffing mode, zero otherwise</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="skip">True if the first cascaded diff file should be skipped on output, false otherwise</param>
|
||||
/// <param name="bare">True if the date should not be appended to the default name, false otherwise [OBSOLETE]</param>
|
||||
/// <param name="clean">True to clean the game names to WoD standard, false otherwise (default)</param>
|
||||
/// <param name="remUnicode">True if we should remove non-ASCII characters from output, false otherwise (default)</param>
|
||||
/// <param name="descAsName">True to allow SL DATs to have game names used instead of descriptions, false otherwise (default)</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="splitType">Type of the split that should be performed (split, merged, fully merged)</param>
|
||||
/// <param name="trim">True if we are supposed to trim names to NTFS length, false otherwise</param>
|
||||
/// <param name="single">True if all games should be replaced by '!', false otherwise</param>
|
||||
/// <param name="root">String representing root directory to compare against for length calculation</param>
|
||||
public void Update(List<string> inputFileNames, string outDir, bool inplace, bool clean, bool remUnicode, bool descAsName,
|
||||
Filter filter, SplitType splitType, bool trim, bool single, string root)
|
||||
{
|
||||
Parallel.ForEach(inputFileNames, Globals.ParallelOptions, inputFileName =>
|
||||
{
|
||||
// Clean the input string
|
||||
if (inputFileName != "")
|
||||
{
|
||||
inputFileName = Path.GetFullPath(inputFileName);
|
||||
}
|
||||
|
||||
if (File.Exists(inputFileName))
|
||||
{
|
||||
// If inplace is set, override the output dir
|
||||
string realOutDir = outDir;
|
||||
if (inplace)
|
||||
{
|
||||
realOutDir = Path.GetDirectoryName(inputFileName);
|
||||
}
|
||||
|
||||
DatFile innerDatdata = new DatFile(this);
|
||||
Globals.Logger.User("Processing \"" + Path.GetFileName(inputFileName) + "\"");
|
||||
innerDatdata.Parse(inputFileName, 0, 0, splitType, keep: true, clean: clean, remUnicode: remUnicode, descAsName: descAsName,
|
||||
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||
innerDatdata.Filter(filter, trim, single, root);
|
||||
|
||||
// Try to output the file
|
||||
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(inputFileName) : realOutDir), overwrite: (realOutDir != ""));
|
||||
}
|
||||
else if (Directory.Exists(inputFileName))
|
||||
{
|
||||
inputFileName = Path.GetFullPath(inputFileName) + Path.DirectorySeparatorChar;
|
||||
|
||||
// If inplace is set, override the output dir
|
||||
string realOutDir = outDir;
|
||||
if (inplace)
|
||||
{
|
||||
realOutDir = Path.GetDirectoryName(inputFileName);
|
||||
}
|
||||
|
||||
List<string> subFiles = Directory.EnumerateFiles(inputFileName, "*", SearchOption.AllDirectories).ToList();
|
||||
Parallel.ForEach(subFiles, Globals.ParallelOptions, file =>
|
||||
{
|
||||
Globals.Logger.User("Processing \"" + Path.GetFullPath(file).Remove(0, inputFileName.Length) + "\"");
|
||||
DatFile innerDatdata = new DatFile(this);
|
||||
innerDatdata.Parse(file, 0, 0, splitType, true, clean, descAsName,
|
||||
keepext: ((innerDatdata.DatFormat & DatFormat.TSV) != 0 || (innerDatdata.DatFormat & DatFormat.CSV) != 0));
|
||||
innerDatdata.Filter(filter, trim, single, root);
|
||||
|
||||
// Try to output the file
|
||||
innerDatdata.WriteToFile((realOutDir == "" ? Path.GetDirectoryName(file) : realOutDir + Path.GetDirectoryName(file).Remove(0, inputFileName.Length - 1)),
|
||||
overwrite: (realOutDir != ""));
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
Globals.Logger.Error("I'm sorry but " + inputFileName + " doesn't exist!");
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
433
SabreTools.Library/Dats/Partials/DatFile.DFD.cs
Normal file
433
SabreTools.Library/Dats/Partials/DatFile.DFD.cs
Normal file
@@ -0,0 +1,433 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using IOException = System.IO.IOException;
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
#endif
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Populate DAT from Directory [MODULAR DONE, FOR NOW]
|
||||
|
||||
/// <summary>
|
||||
/// Create a new Dat from a directory
|
||||
/// </summary>
|
||||
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped</param>
|
||||
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||
/// <param name="outDir">Output directory to </param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
public bool PopulateFromDir(string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles, bool enableGzip,
|
||||
SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
|
||||
{
|
||||
// If the description is defined but not the name, set the name from the description
|
||||
if (String.IsNullOrEmpty(Name) && !String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Name = Description;
|
||||
}
|
||||
|
||||
// If the name is defined but not the description, set the description from the name
|
||||
else if (!String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||
}
|
||||
|
||||
// If neither the name or description are defined, set them from the automatic values
|
||||
else if (String.IsNullOrEmpty(Name) && String.IsNullOrEmpty(Description))
|
||||
{
|
||||
Name = basePath.Split(Path.DirectorySeparatorChar).Last();
|
||||
Description = Name + (bare ? "" : " (" + Date + ")");
|
||||
}
|
||||
|
||||
// Process the input
|
||||
if (Directory.Exists(basePath))
|
||||
{
|
||||
Globals.Logger.Verbose("Folder found: " + basePath);
|
||||
|
||||
// Process the files in the main folder
|
||||
List<string> files = Directory.EnumerateFiles(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
Parallel.ForEach(files, Globals.ParallelOptions, item =>
|
||||
{
|
||||
PopulateFromDirCheckFile(item, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
|
||||
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
});
|
||||
|
||||
// Find all top-level subfolders
|
||||
files = Directory.EnumerateDirectories(basePath, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
Parallel.ForEach(files, Globals.ParallelOptions, item =>
|
||||
{
|
||||
List<string> subfiles = Directory.EnumerateFiles(item, "*", SearchOption.AllDirectories).ToList();
|
||||
Parallel.ForEach(subfiles, Globals.ParallelOptions, subitem =>
|
||||
{
|
||||
PopulateFromDirCheckFile(subitem, basePath, omitFromScan, bare, archivesAsFiles, enableGzip, skipFileType,
|
||||
addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
});
|
||||
});
|
||||
|
||||
// Now find all folders that are empty, if we are supposed to
|
||||
if (!Romba && addBlanks)
|
||||
{
|
||||
List<string> empties = FileTools.GetEmptyDirectories(basePath).ToList();
|
||||
Parallel.ForEach(empties, Globals.ParallelOptions, dir =>
|
||||
{
|
||||
// Get the full path for the directory
|
||||
string fulldir = Path.GetFullPath(dir);
|
||||
|
||||
// Set the temporary variables
|
||||
string gamename = "";
|
||||
string romname = "";
|
||||
|
||||
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = fulldir.Remove(0, basePath.Length + 1);
|
||||
romname = "_";
|
||||
}
|
||||
|
||||
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = fulldir.Remove(0, basePath.Length + 1).Split(Path.DirectorySeparatorChar)[0];
|
||||
romname = Path.Combine(fulldir.Remove(0, basePath.Length + 1 + gamename.Length), "_");
|
||||
}
|
||||
|
||||
// Sanitize the names
|
||||
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(1);
|
||||
}
|
||||
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||
}
|
||||
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(1);
|
||||
}
|
||||
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(0, romname.Length - 1);
|
||||
}
|
||||
|
||||
Globals.Logger.Verbose("Adding blank empty folder: " + gamename);
|
||||
this["null"].Add(new Rom(romname, gamename, omitFromScan));
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (File.Exists(basePath))
|
||||
{
|
||||
PopulateFromDirCheckFile(basePath, Path.GetDirectoryName(Path.GetDirectoryName(basePath)), omitFromScan, bare, archivesAsFiles, enableGzip,
|
||||
skipFileType, addBlanks, addDate, tempDir, copyFiles, headerToCheckAgainst);
|
||||
}
|
||||
|
||||
// Now that we're done, delete the temp folder (if it's not the default)
|
||||
Globals.Logger.User("Cleaning temp folder");
|
||||
if (tempDir != Path.GetTempPath())
|
||||
{
|
||||
FileTools.TryDeleteDirectory(tempDir);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check a given file for hashes, based on current settings
|
||||
/// </summary>
|
||||
/// <param name="item">Filename of the item to be checked</param>
|
||||
/// <param name="basePath">Base folder to be used in creating the DAT</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="bare">True if the date should be omitted from the DAT, false otherwise</param>
|
||||
/// <param name="archivesAsFiles">True if archives should be treated as files, false otherwise</param>
|
||||
/// <param name="enableGzip">True if GZIP archives should be treated as files, false otherwise</param>
|
||||
/// <param name="skipFileType">Type of files that should be skipped</param>
|
||||
/// <param name="addBlanks">True if blank items should be created for empty folders, false otherwise</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="tempDir">Name of the directory to create a temp folder in (blank is current directory)</param>
|
||||
/// <param name="copyFiles">True if files should be copied to the temp directory before hashing, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
private void PopulateFromDirCheckFile(string item, string basePath, Hash omitFromScan, bool bare, bool archivesAsFiles,
|
||||
bool enableGzip, SkipFileType skipFileType, bool addBlanks, bool addDate, string tempDir, bool copyFiles, string headerToCheckAgainst)
|
||||
{
|
||||
// Define the temporary directory
|
||||
string tempSubDir = Path.GetFullPath(Path.Combine(tempDir, Path.GetRandomFileName())) + Path.DirectorySeparatorChar;
|
||||
|
||||
// Special case for if we are in Romba mode (all names are supposed to be SHA-1 hashes)
|
||||
if (Romba)
|
||||
{
|
||||
Rom rom = ArchiveTools.GetTorrentGZFileInfo(item);
|
||||
|
||||
// If the rom is valid, write it out
|
||||
if (rom != null && rom.Name != null)
|
||||
{
|
||||
// Add the list if it doesn't exist already
|
||||
Add(rom.Size + "-" + rom.CRC, rom);
|
||||
Globals.Logger.User("File added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||
}
|
||||
else
|
||||
{
|
||||
Globals.Logger.User("File not added: " + Path.GetFileNameWithoutExtension(item) + Environment.NewLine);
|
||||
return;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// If we're copying files, copy it first and get the new filename
|
||||
string newItem = item;
|
||||
string newBasePath = basePath;
|
||||
if (copyFiles)
|
||||
{
|
||||
newBasePath = Path.Combine(tempDir, Path.GetRandomFileName());
|
||||
newItem = Path.GetFullPath(Path.Combine(newBasePath, Path.GetFullPath(item).Remove(0, basePath.Length + 1)));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(newItem));
|
||||
File.Copy(item, newItem, true);
|
||||
}
|
||||
|
||||
// Create a list for all found items
|
||||
List<Rom> extracted = null;
|
||||
|
||||
// Temporarily set the archivesAsFiles if we have a GZip archive and we're not supposed to use it as one
|
||||
if (archivesAsFiles && !enableGzip && newItem.EndsWith(".gz"))
|
||||
{
|
||||
archivesAsFiles = false;
|
||||
}
|
||||
|
||||
// If we don't have archives as files, try to scan the file as an archive
|
||||
if (!archivesAsFiles)
|
||||
{
|
||||
// If all deep hash skip flags are set, do a quickscan
|
||||
if (omitFromScan == Hash.SecureHashes)
|
||||
{
|
||||
extracted = ArchiveTools.GetArchiveFileInfo(newItem, date: addDate);
|
||||
}
|
||||
// Otherwise, get the list with whatever hashes are wanted
|
||||
else
|
||||
{
|
||||
extracted = ArchiveTools.GetExtendedArchiveFileInfo(newItem, omitFromScan: omitFromScan, date: addDate);
|
||||
}
|
||||
}
|
||||
|
||||
// If the file should be skipped based on type, do so now
|
||||
if ((extracted != null && skipFileType == SkipFileType.Archive)
|
||||
|| (extracted == null && skipFileType == SkipFileType.File))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the extracted list is null, just scan the item itself
|
||||
if (extracted == null || archivesAsFiles)
|
||||
{
|
||||
PopulateFromDirProcessFile(newItem, "", newBasePath, omitFromScan, addDate, headerToCheckAgainst);
|
||||
}
|
||||
// Otherwise, add all of the found items
|
||||
else
|
||||
{
|
||||
// First take care of the found items
|
||||
Parallel.ForEach(extracted, Globals.ParallelOptions, rom =>
|
||||
{
|
||||
PopulateFromDirProcessFileHelper(newItem,
|
||||
rom,
|
||||
basePath,
|
||||
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
||||
});
|
||||
|
||||
// Then, if we're looking for blanks, get all of the blank folders and add them
|
||||
if (addBlanks)
|
||||
{
|
||||
List<string> empties = ArchiveTools.GetEmptyFoldersInArchive(newItem);
|
||||
Parallel.ForEach(empties, Globals.ParallelOptions, empty =>
|
||||
{
|
||||
Rom emptyRom = new Rom(Path.Combine(empty, "_"), newItem, omitFromScan);
|
||||
PopulateFromDirProcessFileHelper(newItem,
|
||||
emptyRom,
|
||||
basePath,
|
||||
(Path.GetDirectoryName(Path.GetFullPath(item)) + Path.DirectorySeparatorChar).Remove(0, basePath.Length) + Path.GetFileNameWithoutExtension(item));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Cue to delete the file if it's a copy
|
||||
if (copyFiles && item != newItem)
|
||||
{
|
||||
FileTools.TryDeleteDirectory(newBasePath);
|
||||
}
|
||||
|
||||
// Delete the sub temp directory
|
||||
FileTools.TryDeleteDirectory(tempSubDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Process a single file as a file
|
||||
/// </summary>
|
||||
/// <param name="item">File to be added</param>
|
||||
/// <param name="parent">Parent game to be used</param>
|
||||
/// <param name="basePath">Path the represents the parent directory</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated</param>
|
||||
/// <param name="addDate">True if dates should be archived for all files, false otherwise</param>
|
||||
/// <param name="headerToCheckAgainst">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
private void PopulateFromDirProcessFile(string item, string parent, string basePath, Hash omitFromScan,
|
||||
bool addDate, string headerToCheckAgainst)
|
||||
{
|
||||
Globals.Logger.Verbose(Path.GetFileName(item) + " treated like a file");
|
||||
Rom rom = FileTools.GetFileInfo(item, omitFromScan: omitFromScan, date: addDate, header: headerToCheckAgainst);
|
||||
|
||||
PopulateFromDirProcessFileHelper(item, rom, basePath, parent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Process a single file as a file (with found Rom data)
|
||||
/// </summary>
|
||||
/// <param name="item">File to be added</param>
|
||||
/// <param name="item">Rom data to be used to write to file</param>
|
||||
/// <param name="basepath">Path the represents the parent directory</param>
|
||||
/// <param name="parent">Parent game to be used</param>
|
||||
private void PopulateFromDirProcessFileHelper(string item, DatItem datItem, string basepath, string parent)
|
||||
{
|
||||
// If the datItem isn't a Rom or Disk, return
|
||||
if (datItem.Type != ItemType.Rom && datItem.Type != ItemType.Disk)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
string key = "";
|
||||
if (datItem.Type == ItemType.Rom)
|
||||
{
|
||||
key = ((Rom)datItem).Size + "-" + ((Rom)datItem).CRC;
|
||||
}
|
||||
else
|
||||
{
|
||||
key = ((Disk)datItem).MD5;
|
||||
}
|
||||
|
||||
// Add the list if it doesn't exist already
|
||||
Add(key);
|
||||
|
||||
try
|
||||
{
|
||||
// If the basepath ends with a directory separator, remove it
|
||||
if (!basepath.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
basepath += Path.DirectorySeparatorChar.ToString();
|
||||
}
|
||||
|
||||
// Make sure we have the full item path
|
||||
item = Path.GetFullPath(item);
|
||||
|
||||
// Get the data to be added as game and item names
|
||||
string gamename = "";
|
||||
string romname = "";
|
||||
|
||||
// If the parent is blank, then we have a non-archive file
|
||||
if (parent == "")
|
||||
{
|
||||
// If we have a SuperDAT, we want anything that's not the base path as the game, and the file as the rom
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = Path.GetDirectoryName(item.Remove(0, basepath.Length));
|
||||
romname = Path.GetFileName(item);
|
||||
}
|
||||
|
||||
// Otherwise, we want just the top level folder as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = item.Remove(0, basepath.Length).Split(Path.DirectorySeparatorChar)[0];
|
||||
romname = item.Remove(0, (Path.Combine(basepath, gamename).Length));
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we assume that we have an archive
|
||||
else
|
||||
{
|
||||
// If we have a SuperDAT, we want the archive name as the game, and the file as everything else (?)
|
||||
if (Type == "SuperDAT")
|
||||
{
|
||||
gamename = parent;
|
||||
romname = datItem.Name;
|
||||
}
|
||||
|
||||
// Otherwise, we want the archive name as the game, and the file as everything else
|
||||
else
|
||||
{
|
||||
gamename = parent;
|
||||
romname = datItem.Name;
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize the names
|
||||
if (romname == null)
|
||||
{
|
||||
romname = "";
|
||||
}
|
||||
if (gamename.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(1);
|
||||
}
|
||||
if (gamename.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
gamename = gamename.Substring(0, gamename.Length - 1);
|
||||
}
|
||||
if (romname.StartsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(1);
|
||||
}
|
||||
if (romname.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
romname = romname.Substring(0, romname.Length - 1);
|
||||
}
|
||||
if (!String.IsNullOrEmpty(gamename) && String.IsNullOrEmpty(romname))
|
||||
{
|
||||
romname = gamename;
|
||||
gamename = "Default";
|
||||
}
|
||||
|
||||
// Update rom information
|
||||
datItem.Name = romname;
|
||||
if (datItem.Machine == null)
|
||||
{
|
||||
datItem.Machine = new Machine
|
||||
{
|
||||
Name = gamename,
|
||||
Description = gamename,
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
datItem.Machine.Name = gamename;
|
||||
datItem.Machine.Description = gamename;
|
||||
}
|
||||
|
||||
// Add the file information to the DAT
|
||||
Add(key, datItem);
|
||||
|
||||
Globals.Logger.User("File added: " + romname + Environment.NewLine);
|
||||
}
|
||||
catch (IOException ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
1065
SabreTools.Library/Dats/Partials/DatFile.Manipulate.cs
Normal file
1065
SabreTools.Library/Dats/Partials/DatFile.Manipulate.cs
Normal file
File diff suppressed because it is too large
Load Diff
2984
SabreTools.Library/Dats/Partials/DatFile.Parsers.cs
Normal file
2984
SabreTools.Library/Dats/Partials/DatFile.Parsers.cs
Normal file
File diff suppressed because it is too large
Load Diff
1039
SabreTools.Library/Dats/Partials/DatFile.Rebuild.cs
Normal file
1039
SabreTools.Library/Dats/Partials/DatFile.Rebuild.cs
Normal file
File diff suppressed because it is too large
Load Diff
638
SabreTools.Library/Dats/Partials/DatFile.Splitters.cs
Normal file
638
SabreTools.Library/Dats/Partials/DatFile.Splitters.cs
Normal file
@@ -0,0 +1,638 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Splitting
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by input extensions
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <param name="extA">List of extensions to split on (first DAT)</param>
|
||||
/// <param name="extB">List of extensions to split on (second DAT)</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByExt(string outDir, string basepath, List<string> extA, List<string> extB)
|
||||
{
|
||||
// Make sure all of the extensions have a dot at the beginning
|
||||
List<string> newExtA = new List<string>();
|
||||
Parallel.ForEach(extA, Globals.ParallelOptions, s =>
|
||||
{
|
||||
lock (newExtA)
|
||||
{
|
||||
newExtA.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||
}
|
||||
});
|
||||
string newExtAString = string.Join(",", newExtA);
|
||||
|
||||
List<string> newExtB = new List<string>();
|
||||
Parallel.ForEach(extB, Globals.ParallelOptions, s =>
|
||||
{
|
||||
lock (newExtB)
|
||||
{
|
||||
newExtB.Add((s.StartsWith(".") ? s : "." + s).ToUpperInvariant());
|
||||
}
|
||||
});
|
||||
string newExtBString = string.Join(",", newExtB);
|
||||
|
||||
// Set all of the appropriate outputs for each of the subsets
|
||||
DatFile datdataA = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (" + newExtAString + ")",
|
||||
Name = this.Name + " (" + newExtAString + ")",
|
||||
Description = this.Description + " (" + newExtAString + ")",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
DatFormat = this.DatFormat,
|
||||
};
|
||||
DatFile datdataB = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (" + newExtBString + ")",
|
||||
Name = this.Name + " (" + newExtBString + ")",
|
||||
Description = this.Description + " (" + newExtBString + ")",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
DatFormat = this.DatFormat,
|
||||
};
|
||||
|
||||
// If roms is empty, return false
|
||||
if (Count == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Now separate the roms accordingly
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
if (newExtA.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
|
||||
{
|
||||
datdataA.Add(key, item);
|
||||
}
|
||||
else if (newExtB.Contains(Path.GetExtension(item.Name.ToUpperInvariant())))
|
||||
{
|
||||
datdataB.Add(key, item);
|
||||
}
|
||||
else
|
||||
{
|
||||
datdataA.Add(key, item);
|
||||
datdataB.Add(key, item);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Then write out both files
|
||||
bool success = datdataA.WriteToFile(outDir);
|
||||
success &= datdataB.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by best available hashes
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByHash(string outDir, string basepath)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// Create each of the respective output DATs
|
||||
Globals.Logger.User("Creating and populating new DATs");
|
||||
DatFile nodump = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Nodump)",
|
||||
Name = this.Name + " (Nodump)",
|
||||
Description = this.Description + " (Nodump)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile sha512 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-512)",
|
||||
Name = this.Name + " (SHA-512)",
|
||||
Description = this.Description + " (SHA-512)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile sha384 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-384)",
|
||||
Name = this.Name + " (SHA-384)",
|
||||
Description = this.Description + " (SHA-384)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile sha256 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-256)",
|
||||
Name = this.Name + " (SHA-256)",
|
||||
Description = this.Description + " (SHA-256)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile sha1 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (SHA-1)",
|
||||
Name = this.Name + " (SHA-1)",
|
||||
Description = this.Description + " (SHA-1)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile md5 = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (MD5)",
|
||||
Name = this.Name + " (MD5)",
|
||||
Description = this.Description + " (MD5)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile crc = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (CRC)",
|
||||
Name = this.Name + " (CRC)",
|
||||
Description = this.Description + " (CRC)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile other = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Other)",
|
||||
Name = this.Name + " (Other)",
|
||||
Description = this.Description + " (Other)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
|
||||
// Now populate each of the DAT objects in turn
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
// If the file is not a Rom or Disk, continue
|
||||
if (item.Type != ItemType.Disk && item.Type != ItemType.Rom)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the file is a nodump
|
||||
if ((item.Type == ItemType.Rom && ((Rom)item).ItemStatus == ItemStatus.Nodump)
|
||||
|| (item.Type == ItemType.Disk && ((Disk)item).ItemStatus == ItemStatus.Nodump))
|
||||
{
|
||||
nodump.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-512
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA512))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA512)))
|
||||
{
|
||||
sha512.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-384
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA384))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA384)))
|
||||
{
|
||||
sha384.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-256
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA256))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA256)))
|
||||
{
|
||||
sha256.Add(key, item);
|
||||
}
|
||||
// If the file has a SHA-1
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
|
||||
{
|
||||
sha1.Add(key, item);
|
||||
}
|
||||
// If the file has no SHA-1 but has an MD5
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).MD5))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).MD5)))
|
||||
{
|
||||
md5.Add(key, item);
|
||||
}
|
||||
// If the file has no MD5 but a CRC
|
||||
else if ((item.Type == ItemType.Rom && !String.IsNullOrEmpty(((Rom)item).SHA1))
|
||||
|| (item.Type == ItemType.Disk && !String.IsNullOrEmpty(((Disk)item).SHA1)))
|
||||
{
|
||||
crc.Add(key, item);
|
||||
}
|
||||
else
|
||||
{
|
||||
other.Add(key, item);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Now, output all of the files to the output directory
|
||||
Globals.Logger.User("DAT information created, outputting new files");
|
||||
bool success = true;
|
||||
success &= nodump.WriteToFile(outDir);
|
||||
success &= sha512.WriteToFile(outDir);
|
||||
success &= sha384.WriteToFile(outDir);
|
||||
success &= sha256.WriteToFile(outDir);
|
||||
success &= sha1.WriteToFile(outDir);
|
||||
success &= md5.WriteToFile(outDir);
|
||||
success &= crc.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a SuperDAT by lowest available directory level
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <param name="shortname">True if short names should be used, false otherwise</param>
|
||||
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByLevel(string outDir, string basepath, bool shortname, bool basedat)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// First, organize by games so that we can do the right thing
|
||||
BucketBy(SortedBy.Game, false /* mergeroms */, lower: false, norename: true);
|
||||
|
||||
// Create a temporary DAT to add things to
|
||||
DatFile tempDat = new DatFile(this)
|
||||
{
|
||||
Name = null,
|
||||
};
|
||||
|
||||
// Sort the input keys
|
||||
List<string> keys = Keys.ToList();
|
||||
keys.Sort(SplitByLevelSort);
|
||||
|
||||
// Then, we loop over the games
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Here, the key is the name of the game to be used for comparison
|
||||
if (tempDat.Name != null && tempDat.Name != Style.GetDirectoryName(key))
|
||||
{
|
||||
// Process and output the DAT
|
||||
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
|
||||
|
||||
// Reset the DAT for the next items
|
||||
tempDat = new DatFile(this)
|
||||
{
|
||||
Name = null,
|
||||
};
|
||||
}
|
||||
|
||||
// Clean the input list and set all games to be pathless
|
||||
List<DatItem> items = this[key];
|
||||
items.ForEach(item => item.Machine.Name = Style.GetFileName(item.Machine.Name));
|
||||
items.ForEach(item => item.Machine.Description = Style.GetFileName(item.Machine.Description));
|
||||
|
||||
// Now add the game to the output DAT
|
||||
tempDat.AddRange(key, items);
|
||||
|
||||
// Then set the DAT name to be the parent directory name
|
||||
tempDat.Name = Style.GetDirectoryName(key);
|
||||
});
|
||||
|
||||
// Then we write the last DAT out since it would be skipped otherwise
|
||||
SplitByLevelHelper(tempDat, outDir, shortname, basedat);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper function for SplitByLevel to sort the input game names
|
||||
/// </summary>
|
||||
/// <param name="a">First string to compare</param>
|
||||
/// <param name="b">Second string to compare</param>
|
||||
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
|
||||
private int SplitByLevelSort(string a, string b)
|
||||
{
|
||||
NaturalComparer nc = new NaturalComparer();
|
||||
int adeep = a.Count(c => c == '/' || c == '\\');
|
||||
int bdeep = b.Count(c => c == '/' || c == '\\');
|
||||
|
||||
if (adeep == bdeep)
|
||||
{
|
||||
return nc.Compare(a, b);
|
||||
}
|
||||
return adeep - bdeep;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper function for SplitByLevel to clean and write out a DAT
|
||||
/// </summary>
|
||||
/// <param name="datFile">DAT to clean and write out</param>
|
||||
/// <param name="outDir">Directory to write out to</param>
|
||||
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
|
||||
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
|
||||
private void SplitByLevelHelper(DatFile datFile, string outDir, bool shortname, bool restore)
|
||||
{
|
||||
// Get the name from the DAT to use separately
|
||||
string name = datFile.Name;
|
||||
string expName = name.Replace("/", " - ").Replace("\\", " - ");
|
||||
|
||||
// Get the path that the file will be written out to
|
||||
string path = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||
? outDir
|
||||
: Path.Combine(outDir, name));
|
||||
|
||||
// Now set the new output values
|
||||
datFile.FileName = HttpUtility.HtmlDecode(String.IsNullOrEmpty(name)
|
||||
? FileName
|
||||
: (shortname
|
||||
? Style.GetFileName(name)
|
||||
: expName
|
||||
)
|
||||
);
|
||||
datFile.FileName = (restore ? FileName + " (" + datFile.FileName + ")" : datFile.FileName);
|
||||
datFile.Name = Name + " (" + expName + ")";
|
||||
datFile.Description = (String.IsNullOrEmpty(Description) ? datFile.Name : Description + " (" + expName + ")");
|
||||
datFile.Type = null;
|
||||
|
||||
// Write out the temporary DAT to the proper directory
|
||||
datFile.WriteToFile(path);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a DAT by type of Rom
|
||||
/// </summary>
|
||||
/// <param name="outDir">Name of the directory to write the DATs out to</param>
|
||||
/// <param name="basepath">Parent path for replacement</param>
|
||||
/// <returns>True if split succeeded, false otherwise</returns>
|
||||
public bool SplitByType(string outDir, string basepath)
|
||||
{
|
||||
// Sanitize the basepath to be more predictable
|
||||
basepath = (basepath.EndsWith(Path.DirectorySeparatorChar.ToString()) ? basepath : basepath + Path.DirectorySeparatorChar);
|
||||
|
||||
// Create each of the respective output DATs
|
||||
Globals.Logger.User("Creating and populating new DATs");
|
||||
DatFile romdat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (ROM)",
|
||||
Name = this.Name + " (ROM)",
|
||||
Description = this.Description + " (ROM)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile diskdat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Disk)",
|
||||
Name = this.Name + " (Disk)",
|
||||
Description = this.Description + " (Disk)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
DatFile sampledat = new DatFile
|
||||
{
|
||||
FileName = this.FileName + " (Sample)",
|
||||
Name = this.Name + " (Sample)",
|
||||
Description = this.Description + " (Sample)",
|
||||
Category = this.Category,
|
||||
Version = this.Version,
|
||||
Date = this.Date,
|
||||
Author = this.Author,
|
||||
Email = this.Email,
|
||||
Homepage = this.Homepage,
|
||||
Url = this.Url,
|
||||
Comment = this.Comment,
|
||||
Header = this.Header,
|
||||
Type = this.Type,
|
||||
ForceMerging = this.ForceMerging,
|
||||
ForceNodump = this.ForceNodump,
|
||||
ForcePacking = this.ForcePacking,
|
||||
DatFormat = this.DatFormat,
|
||||
MergeRoms = this.MergeRoms,
|
||||
};
|
||||
|
||||
// Now populate each of the DAT objects in turn
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
// If the file is a Rom
|
||||
if (item.Type == ItemType.Rom)
|
||||
{
|
||||
romdat.Add(key, item);
|
||||
}
|
||||
// If the file is a Disk
|
||||
else if (item.Type == ItemType.Disk)
|
||||
{
|
||||
diskdat.Add(key, item);
|
||||
}
|
||||
// If the file is a Sample
|
||||
else if (item.Type == ItemType.Sample)
|
||||
{
|
||||
sampledat.Add(key, item);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Get the output directory
|
||||
if (outDir != "")
|
||||
{
|
||||
outDir = outDir + Path.GetDirectoryName(this.FileName).Remove(0, basepath.Length - 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outDir = Path.GetDirectoryName(this.FileName);
|
||||
}
|
||||
|
||||
// Now, output all of the files to the output directory
|
||||
Globals.Logger.User("DAT information created, outputting new files");
|
||||
bool success = true;
|
||||
success &= romdat.WriteToFile(outDir);
|
||||
success &= diskdat.WriteToFile(outDir);
|
||||
success &= sampledat.WriteToFile(outDir);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
752
SabreTools.Library/Dats/Partials/DatFile.Statistics.cs
Normal file
752
SabreTools.Library/Dats/Partials/DatFile.Statistics.cs
Normal file
@@ -0,0 +1,752 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
using StreamWriter = System.IO.StreamWriter;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
/*
|
||||
* TODO: Make output standard width (HTML, without making the entire thing a table)
|
||||
* TODO: Multithreading? Either StringBuilder or locking
|
||||
*/
|
||||
public partial class DatFile
|
||||
{
|
||||
#region Instance Methods
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Recalculate the statistics for the Dat
|
||||
/// </summary>
|
||||
public void RecalculateStats()
|
||||
{
|
||||
// Wipe out any stats already there
|
||||
RomCount = 0;
|
||||
DiskCount = 0;
|
||||
TotalSize = 0;
|
||||
CRCCount = 0;
|
||||
MD5Count = 0;
|
||||
SHA1Count = 0;
|
||||
SHA256Count = 0;
|
||||
BaddumpCount = 0;
|
||||
NodumpCount = 0;
|
||||
|
||||
// If we have a blank Dat in any way, return
|
||||
if (this == null || Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Loop through and add
|
||||
List<string> keys = Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = this[key];
|
||||
Parallel.ForEach(items, Globals.ParallelOptions, item =>
|
||||
{
|
||||
switch (item.Type)
|
||||
{
|
||||
case ItemType.Archive:
|
||||
break;
|
||||
case ItemType.BiosSet:
|
||||
break;
|
||||
case ItemType.Disk:
|
||||
Disk disk = (Disk)item;
|
||||
DiskCount += 1;
|
||||
MD5Count += (String.IsNullOrEmpty(disk.MD5) ? 0 : 1);
|
||||
SHA1Count += (String.IsNullOrEmpty(disk.SHA1) ? 0 : 1);
|
||||
SHA256Count += (String.IsNullOrEmpty(disk.SHA256) ? 0 : 1);
|
||||
SHA384Count += (String.IsNullOrEmpty(disk.SHA384) ? 0 : 1);
|
||||
SHA512Count += (String.IsNullOrEmpty(disk.SHA512) ? 0 : 1);
|
||||
BaddumpCount += (disk.ItemStatus == ItemStatus.BadDump ? 1 : 0);
|
||||
NodumpCount += (disk.ItemStatus == ItemStatus.Nodump ? 1 : 0);
|
||||
break;
|
||||
case ItemType.Release:
|
||||
break;
|
||||
case ItemType.Rom:
|
||||
Rom rom = (Rom)item;
|
||||
RomCount += 1;
|
||||
TotalSize += (rom.ItemStatus == ItemStatus.Nodump ? 0 : rom.Size);
|
||||
CRCCount += (String.IsNullOrEmpty(rom.CRC) ? 0 : 1);
|
||||
MD5Count += (String.IsNullOrEmpty(rom.MD5) ? 0 : 1);
|
||||
SHA1Count += (String.IsNullOrEmpty(rom.SHA1) ? 0 : 1);
|
||||
SHA256Count += (String.IsNullOrEmpty(rom.SHA256) ? 0 : 1);
|
||||
SHA384Count += (String.IsNullOrEmpty(rom.SHA384) ? 0 : 1);
|
||||
SHA512Count += (String.IsNullOrEmpty(rom.SHA512) ? 0 : 1);
|
||||
BaddumpCount += (rom.ItemStatus == ItemStatus.BadDump ? 1 : 0);
|
||||
NodumpCount += (rom.ItemStatus == ItemStatus.Nodump ? 1 : 0);
|
||||
break;
|
||||
case ItemType.Sample:
|
||||
break;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output the stats for the Dat in a human-readable format
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">Set the statistics output format to use</param>
|
||||
/// <param name="recalculate">True if numbers should be recalculated for the DAT, false otherwise (default)</param>
|
||||
/// <param name="game">Number of games to use, -1 means recalculate games (default)</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise (default)</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise (default)</param>
|
||||
public void OutputStats(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat,
|
||||
bool recalculate = false, long game = -1, bool baddumpCol = false, bool nodumpCol = false)
|
||||
{
|
||||
// If we're supposed to recalculate the statistics, do so
|
||||
if (recalculate)
|
||||
{
|
||||
RecalculateStats();
|
||||
}
|
||||
|
||||
BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
|
||||
if (TotalSize < 0)
|
||||
{
|
||||
TotalSize = Int64.MaxValue + TotalSize;
|
||||
}
|
||||
|
||||
// Log the results to screen
|
||||
string results = @"For '" + FileName + @"':
|
||||
--------------------------------------------------
|
||||
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||
Games found: " + (game == -1 ? Keys.Count() : game) + @"
|
||||
Roms found: " + RomCount + @"
|
||||
Disks found: " + DiskCount + @"
|
||||
Roms with CRC: " + CRCCount + @"
|
||||
Roms with MD5: " + MD5Count + @"
|
||||
Roms with SHA-1: " + SHA1Count + @"
|
||||
Roms with SHA-256: " + SHA256Count + @"
|
||||
Roms with SHA-384: " + SHA384Count + @"
|
||||
Roms with SHA-512: " + SHA512Count + "\n";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
results += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
results += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||
}
|
||||
|
||||
// For spacing between DATs
|
||||
results += "\n\n";
|
||||
|
||||
Globals.Logger.User(results);
|
||||
|
||||
// Now write it out to file as well
|
||||
string line = "";
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
line = @"'" + FileName + @"':
|
||||
--------------------------------------------------
|
||||
Uncompressed size: " + Style.GetBytesReadable(TotalSize) + @"
|
||||
Games found: " + (game == -1 ? Keys.Count() : game) + @"
|
||||
Roms found: " + RomCount + @"
|
||||
Disks found: " + DiskCount + @"
|
||||
Roms with CRC: " + CRCCount + @"
|
||||
Roms with SHA-1: " + SHA1Count + @"
|
||||
Roms with SHA-256: " + SHA256Count + @"
|
||||
Roms with SHA-384: " + SHA384Count + @"
|
||||
Roms with SHA-512: " + SHA512Count + "\n";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += " Roms with BadDump status: " + BaddumpCount + "\n";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += " Roms with Nodump status: " + NodumpCount + "\n";
|
||||
}
|
||||
|
||||
// For spacing between DATs
|
||||
line += "\n\n";
|
||||
|
||||
outputs[StatDatFormat.None].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
line = "\"" + FileName + "\","
|
||||
+ "\"" + TotalSize + "\","
|
||||
+ "\"" + (game == -1 ? Keys.Count() : game) + "\","
|
||||
+ "\"" + RomCount + "\","
|
||||
+ "\"" + DiskCount + "\","
|
||||
+ "\"" + CRCCount + "\","
|
||||
+ "\"" + MD5Count + "\","
|
||||
+ "\"" + SHA1Count + "\","
|
||||
+ "\"" + SHA256Count + "\","
|
||||
+ "\"" + SHA384Count + "\","
|
||||
+ "\"" + SHA512Count + "\"";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += ",\"" + BaddumpCount + "\"";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += ",\"" + NodumpCount + "\"";
|
||||
}
|
||||
|
||||
line += "\n";
|
||||
outputs[StatDatFormat.CSV].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
line = "\t\t\t<tr" + (FileName.StartsWith("DIR: ")
|
||||
? " class=\"dir\"><td>" + HttpUtility.HtmlEncode(FileName.Remove(0, 5))
|
||||
: "><td>" + HttpUtility.HtmlEncode(FileName)) + "</td>"
|
||||
+ "<td align=\"right\">" + Style.GetBytesReadable(TotalSize) + "</td>"
|
||||
+ "<td align=\"right\">" + (game == -1 ? Keys.Count() : game) + "</td>"
|
||||
+ "<td align=\"right\">" + RomCount + "</td>"
|
||||
+ "<td align=\"right\">" + DiskCount + "</td>"
|
||||
+ "<td align=\"right\">" + CRCCount + "</td>"
|
||||
+ "<td align=\"right\">" + MD5Count + "</td>"
|
||||
+ "<td align=\"right\">" + SHA1Count + "</td>"
|
||||
+ "<td align=\"right\">" + SHA256Count + "</td>";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += "<td align=\"right\">" + BaddumpCount + "</td>";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += "<td align=\"right\">" + NodumpCount + "</td>";
|
||||
}
|
||||
|
||||
line += "</tr>\n";
|
||||
outputs[StatDatFormat.HTML].Write(line);
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
line = "\"" + FileName + "\"\t"
|
||||
+ "\"" + TotalSize + "\"\t"
|
||||
+ "\"" + (game == -1 ? Keys.Count() : game) + "\"\t"
|
||||
+ "\"" + RomCount + "\"\t"
|
||||
+ "\"" + DiskCount + "\"\t"
|
||||
+ "\"" + CRCCount + "\"\t"
|
||||
+ "\"" + MD5Count + "\"\t"
|
||||
+ "\"" + SHA1Count + "\"\t"
|
||||
+ "\"" + SHA256Count + "\"\t"
|
||||
+ "\"" + SHA384Count + "\"\t"
|
||||
+ "\"" + SHA512Count + "\"";
|
||||
|
||||
if (baddumpCol)
|
||||
{
|
||||
line += "\t\"" + BaddumpCount + "\"";
|
||||
}
|
||||
if (nodumpCol)
|
||||
{
|
||||
line += "\t\"" + NodumpCount + "\"";
|
||||
}
|
||||
|
||||
line += "\n";
|
||||
outputs[StatDatFormat.TSV].Write(line);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Instance Methods
|
||||
|
||||
#region Static Methods
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Output the stats for a list of input dats as files in a human-readable format
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of input files and folders</param>
|
||||
/// <param name="reportName">Name of the output file</param>
|
||||
/// <param name="single">True if single DAT stats are output, false otherwise</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
/// <param name="statDatFormat" > Set the statistics output format to use</param>
|
||||
public static void OutputStats(List<string> inputs, string reportName, string outDir, bool single,
|
||||
bool baddumpCol, bool nodumpCol, StatDatFormat statDatFormat)
|
||||
{
|
||||
// If there's no output format, set the default
|
||||
if (statDatFormat == 0x0)
|
||||
{
|
||||
statDatFormat = StatDatFormat.None;
|
||||
}
|
||||
|
||||
// Get the proper output file name
|
||||
if (String.IsNullOrEmpty(outDir))
|
||||
{
|
||||
outDir = Environment.CurrentDirectory;
|
||||
}
|
||||
if (String.IsNullOrEmpty(reportName))
|
||||
{
|
||||
reportName = "report";
|
||||
}
|
||||
outDir = Path.GetFullPath(outDir);
|
||||
|
||||
// Get the dictionary of desired outputs
|
||||
Dictionary<StatDatFormat, StreamWriter> outputs = OutputStatsGetOutputWriters(statDatFormat, reportName, outDir);
|
||||
|
||||
// Make sure we have all files
|
||||
List<Tuple<string, string>> newinputs = new List<Tuple<string, string>>(); // item, basepath
|
||||
Parallel.ForEach(inputs, Globals.ParallelOptions, input =>
|
||||
{
|
||||
if (File.Exists(input))
|
||||
{
|
||||
lock (newinputs)
|
||||
{
|
||||
newinputs.Add(Tuple.Create(Path.GetFullPath(input), Path.GetDirectoryName(Path.GetFullPath(input))));
|
||||
}
|
||||
}
|
||||
if (Directory.Exists(input))
|
||||
{
|
||||
foreach (string file in Directory.GetFiles(input, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
lock (newinputs)
|
||||
{
|
||||
newinputs.Add(Tuple.Create(Path.GetFullPath(file), Path.GetFullPath(input)));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
newinputs = newinputs
|
||||
.OrderBy(i => Path.GetDirectoryName(i.Item1))
|
||||
.ThenBy(i => Path.GetFileName(i.Item1))
|
||||
.ToList();
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Init all total variables
|
||||
long totalSize = 0;
|
||||
long totalGame = 0;
|
||||
long totalRom = 0;
|
||||
long totalDisk = 0;
|
||||
long totalCRC = 0;
|
||||
long totalMD5 = 0;
|
||||
long totalSHA1 = 0;
|
||||
long totalSHA256 = 0;
|
||||
long totalBaddump = 0;
|
||||
long totalNodump = 0;
|
||||
|
||||
// Init directory-level variables
|
||||
string lastdir = null;
|
||||
string basepath = null;
|
||||
long dirSize = 0;
|
||||
long dirGame = 0;
|
||||
long dirRom = 0;
|
||||
long dirDisk = 0;
|
||||
long dirCRC = 0;
|
||||
long dirMD5 = 0;
|
||||
long dirSHA1 = 0;
|
||||
long dirSHA256 = 0;
|
||||
long dirBaddump = 0;
|
||||
long dirNodump = 0;
|
||||
|
||||
// Now process each of the input files
|
||||
foreach (Tuple<string, string> filename in newinputs)
|
||||
{
|
||||
// Get the directory for the current file
|
||||
string thisdir = Path.GetDirectoryName(filename.Item1);
|
||||
basepath = Path.GetDirectoryName(filename.Item2);
|
||||
|
||||
// If we don't have the first file and the directory has changed, show the previous directory stats and reset
|
||||
if (lastdir != null && thisdir != lastdir)
|
||||
{
|
||||
// Output separator if needed
|
||||
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
DatFile lastdirdat = new DatFile
|
||||
{
|
||||
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||
TotalSize = dirSize,
|
||||
RomCount = dirRom,
|
||||
DiskCount = dirDisk,
|
||||
CRCCount = dirCRC,
|
||||
MD5Count = dirMD5,
|
||||
SHA1Count = dirSHA1,
|
||||
SHA256Count = dirSHA256,
|
||||
BaddumpCount = dirBaddump,
|
||||
NodumpCount = dirNodump,
|
||||
};
|
||||
lastdirdat.OutputStats(outputs, statDatFormat,
|
||||
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
|
||||
// Write the mid-footer, if any
|
||||
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Reset the directory stats
|
||||
dirSize = 0;
|
||||
dirGame = 0;
|
||||
dirRom = 0;
|
||||
dirDisk = 0;
|
||||
dirCRC = 0;
|
||||
dirMD5 = 0;
|
||||
dirSHA1 = 0;
|
||||
dirSHA256 = 0;
|
||||
dirBaddump = 0;
|
||||
dirNodump = 0;
|
||||
}
|
||||
|
||||
Globals.Logger.Verbose("Beginning stat collection for '" + filename.Item1 + "'", false);
|
||||
List<string> games = new List<string>();
|
||||
DatFile datdata = new DatFile();
|
||||
datdata.Parse(filename.Item1, 0, 0);
|
||||
datdata.BucketBy(SortedBy.Game, false /* mergeroms */, norename: true);
|
||||
|
||||
// Output single DAT stats (if asked)
|
||||
Globals.Logger.User("Adding stats for file '" + filename.Item1 + "'\n", false);
|
||||
if (single)
|
||||
{
|
||||
datdata.OutputStats(outputs, statDatFormat,
|
||||
baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
}
|
||||
|
||||
// Add single DAT stats to dir
|
||||
dirSize += datdata.TotalSize;
|
||||
dirGame += datdata.Keys.Count();
|
||||
dirRom += datdata.RomCount;
|
||||
dirDisk += datdata.DiskCount;
|
||||
dirCRC += datdata.CRCCount;
|
||||
dirMD5 += datdata.MD5Count;
|
||||
dirSHA1 += datdata.SHA1Count;
|
||||
dirSHA256 += datdata.SHA256Count;
|
||||
dirBaddump += datdata.BaddumpCount;
|
||||
dirNodump += datdata.NodumpCount;
|
||||
|
||||
// Add single DAT stats to totals
|
||||
totalSize += datdata.TotalSize;
|
||||
totalGame += datdata.Keys.Count();
|
||||
totalRom += datdata.RomCount;
|
||||
totalDisk += datdata.DiskCount;
|
||||
totalCRC += datdata.CRCCount;
|
||||
totalMD5 += datdata.MD5Count;
|
||||
totalSHA1 += datdata.SHA1Count;
|
||||
totalSHA256 += datdata.SHA256Count;
|
||||
totalBaddump += datdata.BaddumpCount;
|
||||
totalNodump += datdata.NodumpCount;
|
||||
|
||||
// Make sure to assign the new directory
|
||||
lastdir = thisdir;
|
||||
}
|
||||
|
||||
// Output the directory stats one last time
|
||||
OutputStatsWriteMidSeparator(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
if (single)
|
||||
{
|
||||
DatFile dirdat = new DatFile
|
||||
{
|
||||
FileName = "DIR: " + HttpUtility.HtmlEncode(lastdir.Remove(0, basepath.Length + (basepath.Length == 0 ? 0 : 1))),
|
||||
TotalSize = dirSize,
|
||||
RomCount = dirRom,
|
||||
DiskCount = dirDisk,
|
||||
CRCCount = dirCRC,
|
||||
MD5Count = dirMD5,
|
||||
SHA1Count = dirSHA1,
|
||||
SHA256Count = dirSHA256,
|
||||
BaddumpCount = dirBaddump,
|
||||
NodumpCount = dirNodump,
|
||||
};
|
||||
dirdat.OutputStats(outputs, statDatFormat,
|
||||
game: dirGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
}
|
||||
|
||||
// Write the mid-footer, if any
|
||||
OutputStatsWriteMidFooter(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Write the header, if any
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
|
||||
// Reset the directory stats
|
||||
dirSize = 0;
|
||||
dirGame = 0;
|
||||
dirRom = 0;
|
||||
dirDisk = 0;
|
||||
dirCRC = 0;
|
||||
dirMD5 = 0;
|
||||
dirSHA1 = 0;
|
||||
dirSHA256 = 0;
|
||||
dirNodump = 0;
|
||||
|
||||
// Output total DAT stats
|
||||
DatFile totaldata = new DatFile
|
||||
{
|
||||
FileName = "DIR: All DATs",
|
||||
TotalSize = totalSize,
|
||||
RomCount = totalRom,
|
||||
DiskCount = totalDisk,
|
||||
CRCCount = totalCRC,
|
||||
MD5Count = totalMD5,
|
||||
SHA1Count = totalSHA1,
|
||||
SHA256Count = totalSHA256,
|
||||
BaddumpCount = totalBaddump,
|
||||
NodumpCount = totalNodump,
|
||||
};
|
||||
totaldata.OutputStats(outputs, statDatFormat,
|
||||
game: totalGame, baddumpCol: baddumpCol, nodumpCol: nodumpCol);
|
||||
|
||||
// Output footer if needed
|
||||
OutputStatsWriteFooter(outputs, statDatFormat);
|
||||
|
||||
// Flush and dispose of the stream writers
|
||||
foreach (StatDatFormat format in outputs.Keys)
|
||||
{
|
||||
outputs[format].Flush();
|
||||
outputs[format].Dispose();
|
||||
}
|
||||
|
||||
Globals.Logger.User(@"
|
||||
Please check the log folder if the stats scrolled offscreen", false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the proper extension for the stat output format
|
||||
/// </summary>
|
||||
/// <param name="statDatFormat">StatDatFormat to get the extension for</param>
|
||||
/// <param name="reportName">Name of the input file to use</param>
|
||||
/// <param name="outDir">Output path to use</param>
|
||||
/// <returns>Dictionary of file types to StreamWriters</returns>
|
||||
private static Dictionary<StatDatFormat, StreamWriter> OutputStatsGetOutputWriters(StatDatFormat statDatFormat, string reportName, string outDir)
|
||||
{
|
||||
Dictionary<StatDatFormat, StreamWriter> output = new Dictionary<StatDatFormat, StreamWriter>();
|
||||
|
||||
// First try to create the output directory if we need to
|
||||
if (!Directory.Exists(outDir))
|
||||
{
|
||||
Directory.CreateDirectory(outDir);
|
||||
}
|
||||
|
||||
// For each output format, get the appropriate stream writer
|
||||
if ((statDatFormat & StatDatFormat.None) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".txt";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.None, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.CSV) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.CSV, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.HTML) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".html";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.HTML, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
if ((statDatFormat & StatDatFormat.TSV) != 0)
|
||||
{
|
||||
reportName = Style.GetFileNameWithoutExtension(reportName) + ".csv";
|
||||
reportName = Path.Combine(outDir, reportName);
|
||||
|
||||
// Create the StreamWriter for this file
|
||||
output.Add(StatDatFormat.TSV, new StreamWriter(FileTools.TryCreate(reportName)));
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the header to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
outputs[StatDatFormat.CSV].Write("\"File Name\",\"Total Size\",\"Games\",\"Roms\",\"Disks\",\"# with CRC\",\"# with MD5\",\"# with SHA-1\",\"# with SHA-256\""
|
||||
+ (baddumpCol ? ",\"BadDumps\"" : "") + (nodumpCol ? ",\"Nodumps\"" : "") + "\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@"<!DOCTYPE html>
|
||||
<html>
|
||||
<header>
|
||||
<title>DAT Statistics Report</title>
|
||||
<style>
|
||||
body {
|
||||
background-color: lightgray;
|
||||
}
|
||||
.dir {
|
||||
color: #0088FF;
|
||||
}
|
||||
.right {
|
||||
align: right;
|
||||
}
|
||||
</style>
|
||||
</header>
|
||||
<body>
|
||||
<h2>DAT Statistics Report (" + DateTime.Now.ToShortDateString() + @")</h2>
|
||||
<table border=""1"" cellpadding=""5"" cellspacing=""0"">
|
||||
");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
outputs[StatDatFormat.TSV].Write("\"File Name\"\t\"Total Size\"\t\"Games\"\t\"Roms\"\t\"Disks\"\t\"# with CRC\"\t\"# with MD5\"\t\"# with SHA-1\"\t\"# with SHA-256\""
|
||||
+ (baddumpCol ? "\t\"BadDumps\"" : "") + (nodumpCol ? "\t\"Nodumps\"" : "") + "\n");
|
||||
}
|
||||
|
||||
// Now write the mid header for those who need it
|
||||
OutputStatsWriteMidHeader(outputs, statDatFormat, baddumpCol, nodumpCol);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the mid-header to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidHeader(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@" <tr bgcolor=""gray""><th>File Name</th><th align=""right"">Total Size</th><th align=""right"">Games</th><th align=""right"">Roms</th>"
|
||||
+ @"<th align=""right"">Disks</th><th align=""right""># with CRC</th><th align=""right""># with MD5</th><th align=""right""># with SHA-1</th><th align=""right""># with SHA-256</th>"
|
||||
+ (baddumpCol ? "<th class=\".right\">Baddumps</th>" : "") + (nodumpCol ? "<th class=\".right\">Nodumps</th>" : "") + "</tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the separator to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidSeparator(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write("<tr><td colspan=\""
|
||||
+ (baddumpCol && nodumpCol
|
||||
? "12"
|
||||
: (baddumpCol ^ nodumpCol
|
||||
? "11"
|
||||
: "10")
|
||||
)
|
||||
+ "\"></td></tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the footer-separator to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="outputs">Dictionary representing the outputs</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
/// <param name="baddumpCol">True if baddumps should be included in output, false otherwise</param>
|
||||
/// <param name="nodumpCol">True if nodumps should be included in output, false otherwise</param>
|
||||
private static void OutputStatsWriteMidFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat, bool baddumpCol, bool nodumpCol)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
outputs[StatDatFormat.None].Write("\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
outputs[StatDatFormat.CSV].Write("\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write("<tr border=\"0\"><td colspan=\""
|
||||
+ (baddumpCol && nodumpCol
|
||||
? "12"
|
||||
: (baddumpCol ^ nodumpCol
|
||||
? "11"
|
||||
: "10")
|
||||
)
|
||||
+ "\"></td></tr>\n");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
outputs[StatDatFormat.TSV].Write("\n");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the footer to the stream, if any exists
|
||||
/// </summary>
|
||||
/// <param name="sw">StreamWriter representing the output</param>
|
||||
/// <param name="statDatFormat">StatDatFormat representing output format</param>
|
||||
private static void OutputStatsWriteFooter(Dictionary<StatDatFormat, StreamWriter> outputs, StatDatFormat statDatFormat)
|
||||
{
|
||||
if (outputs.ContainsKey(StatDatFormat.None))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.CSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.HTML))
|
||||
{
|
||||
outputs[StatDatFormat.HTML].Write(@" </table>
|
||||
</body>
|
||||
</html>
|
||||
");
|
||||
}
|
||||
if (outputs.ContainsKey(StatDatFormat.TSV))
|
||||
{
|
||||
// Nothing
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion // Static Methods
|
||||
}
|
||||
}
|
||||
1587
SabreTools.Library/Dats/Partials/DatFile.Writers.cs
Normal file
1587
SabreTools.Library/Dats/Partials/DatFile.Writers.cs
Normal file
File diff suppressed because it is too large
Load Diff
118
SabreTools.Library/Dats/Release.cs
Normal file
118
SabreTools.Library/Dats/Release.cs
Normal file
@@ -0,0 +1,118 @@
|
||||
using System;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Release : DatItem, ICloneable
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
private string _region;
|
||||
private string _language;
|
||||
private string _date;
|
||||
private bool? _default;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
public string Region
|
||||
{
|
||||
get { return _region; }
|
||||
set { _region = value; }
|
||||
}
|
||||
public string Language
|
||||
{
|
||||
get { return _language; }
|
||||
set { _language = value; }
|
||||
}
|
||||
public string Date
|
||||
{
|
||||
get { return _date; }
|
||||
set { _date = value; }
|
||||
}
|
||||
public bool? Default
|
||||
{
|
||||
get { return _default; }
|
||||
set { _default = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Release object
|
||||
/// </summary>
|
||||
public Release()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.Release;
|
||||
_region = "";
|
||||
_language = "";
|
||||
_date = "";
|
||||
_default = null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new Release()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
|
||||
Region = this.Region,
|
||||
Language = this.Language,
|
||||
Date = this.Date,
|
||||
Default = this.Default,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
// If we don't have a release return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as a reease
|
||||
Release newOther = (Release)other;
|
||||
|
||||
// If the archive information matches
|
||||
return (_name == newOther.Name
|
||||
&& _region == newOther.Region
|
||||
&& _language == newOther.Language
|
||||
&& _date == newOther.Date
|
||||
&& _default == newOther.Default);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
177
SabreTools.Library/Dats/Rom.cs
Normal file
177
SabreTools.Library/Dats/Rom.cs
Normal file
@@ -0,0 +1,177 @@
|
||||
using System;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Rom : Disk, ICloneable
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
// Rom information
|
||||
protected long _size;
|
||||
protected string _crc;
|
||||
private string _date;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Publicly facing variables
|
||||
|
||||
// Rom information
|
||||
public long Size
|
||||
{
|
||||
get { return _size; }
|
||||
set { _size = value; }
|
||||
}
|
||||
public string CRC
|
||||
{
|
||||
get { return _crc; }
|
||||
set { _crc = value; }
|
||||
}
|
||||
public string Date
|
||||
{
|
||||
get { return _date; }
|
||||
set { _date = value; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Rom object
|
||||
/// </summary>
|
||||
public Rom()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.Rom;
|
||||
_dupeType = 0x00;
|
||||
_itemStatus = ItemStatus.None;
|
||||
_date = "";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a "blank" Rom object
|
||||
/// </summary>
|
||||
/// <param name="name"></param>
|
||||
/// <param name="machineName"></param>
|
||||
/// <param name="omitFromScan"></param>
|
||||
/// <remarks>TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually</remarks>
|
||||
public Rom(string name, string machineName, Hash omitFromScan = Hash.DeepHashes)
|
||||
{
|
||||
_name = name;
|
||||
_itemType = ItemType.Rom;
|
||||
_size = -1;
|
||||
if ((omitFromScan & Hash.CRC) == 0)
|
||||
{
|
||||
_crc = "null";
|
||||
}
|
||||
if ((omitFromScan & Hash.MD5) == 0)
|
||||
{
|
||||
_md5 = "null";
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA1) == 0)
|
||||
{
|
||||
_sha1 = "null";
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA256) == 0)
|
||||
{
|
||||
_sha256 = "null";
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA384) == 0)
|
||||
{
|
||||
_sha384 = "null";
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA512) == 0)
|
||||
{
|
||||
_sha512 = "null";
|
||||
}
|
||||
_itemStatus = ItemStatus.None;
|
||||
|
||||
_machine = new Machine
|
||||
{
|
||||
Name = machineName,
|
||||
Description = machineName,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public new object Clone()
|
||||
{
|
||||
return new Rom()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
|
||||
MD5 = this.MD5,
|
||||
SHA1 = this.SHA1,
|
||||
SHA256 = this.SHA256,
|
||||
SHA384 = this.SHA384,
|
||||
SHA512 = this.SHA512,
|
||||
ItemStatus = this.ItemStatus,
|
||||
Size = this.Size,
|
||||
CRC = this.CRC,
|
||||
Date = this.Date,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
bool dupefound = false;
|
||||
|
||||
// If we don't have a rom, return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as a rom
|
||||
Rom newOther = (Rom)other;
|
||||
|
||||
// If either is a nodump, it's never a match
|
||||
if (_itemStatus == ItemStatus.Nodump || newOther.ItemStatus == ItemStatus.Nodump)
|
||||
{
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
if ((this.Size == newOther.Size)
|
||||
&& ((String.IsNullOrEmpty(this.CRC) || String.IsNullOrEmpty(newOther.CRC)) || this.CRC == newOther.CRC)
|
||||
&& ((String.IsNullOrEmpty(this.MD5) || String.IsNullOrEmpty(newOther.MD5)) || this.MD5 == newOther.MD5)
|
||||
&& ((String.IsNullOrEmpty(this.SHA1) || String.IsNullOrEmpty(newOther.SHA1)) || this.SHA1 == newOther.SHA1)
|
||||
&& ((String.IsNullOrEmpty(this.SHA256) || String.IsNullOrEmpty(newOther.SHA256)) || this.SHA256 == newOther.SHA256)
|
||||
&& ((String.IsNullOrEmpty(this.SHA384) || String.IsNullOrEmpty(newOther.SHA384)) || this.SHA384 == newOther.SHA384)
|
||||
&& ((String.IsNullOrEmpty(this.SHA512) || String.IsNullOrEmpty(newOther.SHA512)) || this.SHA512 == newOther.SHA512))
|
||||
{
|
||||
dupefound = true;
|
||||
}
|
||||
|
||||
return dupefound;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
71
SabreTools.Library/Dats/Sample.cs
Normal file
71
SabreTools.Library/Dats/Sample.cs
Normal file
@@ -0,0 +1,71 @@
|
||||
using System;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Dats
|
||||
{
|
||||
public class Sample : DatItem, ICloneable
|
||||
{
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a default, empty Sample object
|
||||
/// </summary>
|
||||
public Sample()
|
||||
{
|
||||
_name = "";
|
||||
_itemType = ItemType.Sample;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cloning Methods
|
||||
|
||||
public object Clone()
|
||||
{
|
||||
return new Sample()
|
||||
{
|
||||
Name = this.Name,
|
||||
Type = this.Type,
|
||||
Dupe = this.Dupe,
|
||||
|
||||
Machine = this.Machine,
|
||||
|
||||
Supported = this.Supported,
|
||||
Publisher = this.Publisher,
|
||||
Infos = this.Infos,
|
||||
PartName = this.PartName,
|
||||
PartInterface = this.PartInterface,
|
||||
Features = this.Features,
|
||||
AreaName = this.AreaName,
|
||||
AreaSize = this.AreaSize,
|
||||
|
||||
SystemID = this.SystemID,
|
||||
System = this.System,
|
||||
SourceID = this.SourceID,
|
||||
Source = this.Source,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparision Methods
|
||||
|
||||
public override bool Equals(DatItem other)
|
||||
{
|
||||
// If we don't have a sample, return false
|
||||
if (_itemType != other.Type)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, treat it as a sample
|
||||
Sample newOther = (Sample)other;
|
||||
|
||||
// If the archive information matches
|
||||
return (_name == newOther.Name);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
113
SabreTools.Library/External/CoreRarArchive.cs
vendored
Normal file
113
SabreTools.Library/External/CoreRarArchive.cs
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
/// <summary>
|
||||
/// http://www.rarlab.com/technote.htm#srvheaders
|
||||
/// </summary>
|
||||
namespace SabreTools.Helper.Tools
|
||||
{
|
||||
public class CoreRarArchive
|
||||
{
|
||||
// SFX Module Information
|
||||
public byte[] SFX;
|
||||
|
||||
// Standard Header Information
|
||||
public uint HeaderCRC32;
|
||||
public uint HeaderSize; // vint
|
||||
public RarHeaderFlags HeaderFlags; // vint
|
||||
public uint ExtraAreaSize; // vint
|
||||
public RarArchiveFlags ArchiveFlags; // vint
|
||||
public uint VolumeNumber; // vint
|
||||
public byte[] ExtraArea;
|
||||
|
||||
// Encryption Header Information
|
||||
public uint EncryptionHeaderCRC32;
|
||||
public uint EncryptionHeaderSize; // vint
|
||||
public RarHeaderFlags EncryptionHeaderFlags; // vint
|
||||
public uint EncryptionVersion; // vint
|
||||
public uint EncryptionFlags; // vint
|
||||
public byte KDFCount;
|
||||
public byte[] Salt = new byte[16];
|
||||
public byte[] CheckValue = new byte[12];
|
||||
|
||||
// Locator Information
|
||||
public uint LocatorSize; // vint
|
||||
public uint LocatorFlags; // vint
|
||||
public uint QuickOpenOffset; // vint
|
||||
public uint RecoveryRecordOffset; // vint
|
||||
|
||||
// Entry Information
|
||||
public List<CoreRarArchiveEntry> Entries = new List<CoreRarArchiveEntry>();
|
||||
}
|
||||
|
||||
public class CoreRarArchiveEntry
|
||||
{
|
||||
// Standard Entry Information
|
||||
public uint HeaderCRC32;
|
||||
public uint HeaderSize; // vint
|
||||
public RarHeaderType HeaderType; // vint
|
||||
public RarHeaderFlags HeaderFlags; // vint
|
||||
public uint ExtraAreaSize; // vint
|
||||
public uint DataAreaSize; // vint
|
||||
public RarFileFlags FileFlags; // vint
|
||||
public uint UnpackedSize; // vint
|
||||
public uint Attributes; // vint
|
||||
public uint mtime;
|
||||
public uint DataCRC32;
|
||||
public uint CompressionInformation; // vint
|
||||
public uint HostOS; // vint
|
||||
public uint NameLength; // vint
|
||||
public byte[] Name;
|
||||
public byte[] DataArea;
|
||||
|
||||
// File Encryption Information
|
||||
public uint EncryptionSize; // vint
|
||||
public RarEncryptionFlags EncryptionFlags; // vint
|
||||
public byte KDFCount;
|
||||
public byte[] Salt = new byte[16];
|
||||
public byte[] IV = new byte[16];
|
||||
public byte[] CheckValue = new byte[12];
|
||||
|
||||
// File Hash Information
|
||||
public uint HashSize; // vint
|
||||
public uint HashType; // vint
|
||||
public byte[] HashData = new byte[32];
|
||||
|
||||
// File Time Information
|
||||
public uint TimeSize; // vint
|
||||
public RarTimeFlags TimeFlags; // vint
|
||||
public uint TimeMtime;
|
||||
public ulong TimeMtime64;
|
||||
public uint TimeCtime;
|
||||
public ulong TimeCtime64;
|
||||
public uint TimeLtime;
|
||||
public ulong TimeLtime64;
|
||||
|
||||
// File Version Information
|
||||
public uint VersionSize; // vint
|
||||
public const uint VersionFlags = 0; // vint
|
||||
public uint VersionNumber; // vint
|
||||
|
||||
// File System Redirection Record
|
||||
public uint RedirectionSize; // vint
|
||||
public RarRedirectionType RedirectionType; // vint
|
||||
public uint RedirectionFlags; // vint
|
||||
public uint RedirectionNameLength; // vint
|
||||
public byte[] RedirectionName;
|
||||
|
||||
// Unix Owner Record
|
||||
public uint UnixOwnerSize; // vint
|
||||
public RarUnixOwnerRecordFlags UnixOwnerFlags; // vint
|
||||
public uint UnixOwnerUserNameLength; // vint
|
||||
public byte[] UnixOwnerUserName;
|
||||
public uint UnixOwnerGroupNameLength; // vint
|
||||
public byte[] UnixOwnerGroupName;
|
||||
public uint UnixOwnerUserId; // vint
|
||||
public uint UnixOwnerGroupId; // vint
|
||||
|
||||
// Service Data Information
|
||||
public uint ServiceSize; // vint
|
||||
public byte[] ServiceData;
|
||||
}
|
||||
}
|
||||
93
SabreTools.Library/External/NaturalSort/NaturalComparer.cs
vendored
Normal file
93
SabreTools.Library/External/NaturalSort/NaturalComparer.cs
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
/*
|
||||
*
|
||||
* Links for info and original source code:
|
||||
*
|
||||
* https://blog.codinghorror.com/sorting-for-humans-natural-sort-order/
|
||||
* http://www.codeproject.com/Articles/22517/Natural-Sort-Comparer
|
||||
*
|
||||
* Exact code implementation used with permission, originally by motoschifo
|
||||
*
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
namespace NaturalSort
|
||||
{
|
||||
public class NaturalComparer : Comparer<string>, IDisposable
|
||||
{
|
||||
private Dictionary<string, string[]> table;
|
||||
|
||||
public NaturalComparer()
|
||||
{
|
||||
table = new Dictionary<string, string[]>();
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
table.Clear();
|
||||
table = null;
|
||||
}
|
||||
|
||||
public override int Compare(string x, string y)
|
||||
{
|
||||
if (x == y)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
string[] x1, y1;
|
||||
if (!table.TryGetValue(x, out x1))
|
||||
{
|
||||
//x1 = Regex.Split(x.Replace(" ", ""), "([0-9]+)");
|
||||
x1 = Regex.Split(x, "([0-9]+)").Where(s => s != "").ToArray();
|
||||
table.Add(x, x1);
|
||||
}
|
||||
if (!table.TryGetValue(y, out y1))
|
||||
{
|
||||
//y1 = Regex.Split(y.Replace(" ", ""), "([0-9]+)");
|
||||
y1 = Regex.Split(y, "([0-9]+)").Where(s => s != "").ToArray();
|
||||
table.Add(y, y1);
|
||||
}
|
||||
|
||||
for (int i = 0; i < x1.Length && i < y1.Length; i++)
|
||||
{
|
||||
if (x1[i] != y1[i])
|
||||
{
|
||||
return PartCompare(x1[i], y1[i]);
|
||||
}
|
||||
}
|
||||
if (y1.Length > x1.Length)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else if (x1.Length > y1.Length)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private static int PartCompare(string left, string right)
|
||||
{
|
||||
int x, y;
|
||||
if (!int.TryParse(left, out x))
|
||||
{
|
||||
return Style.CompareNumeric(left, right);
|
||||
}
|
||||
|
||||
if (!int.TryParse(right, out y))
|
||||
{
|
||||
return Style.CompareNumeric(left, right);
|
||||
}
|
||||
|
||||
return x.CompareTo(y);
|
||||
}
|
||||
}
|
||||
}
|
||||
93
SabreTools.Library/External/NaturalSort/NaturalReversedComparer.cs
vendored
Normal file
93
SabreTools.Library/External/NaturalSort/NaturalReversedComparer.cs
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
/*
|
||||
*
|
||||
* Links for info and original source code:
|
||||
*
|
||||
* https://blog.codinghorror.com/sorting-for-humans-natural-sort-order/
|
||||
* http://www.codeproject.com/Articles/22517/Natural-Sort-Comparer
|
||||
*
|
||||
* Exact code implementation used with permission, originally by motoschifo
|
||||
*
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
namespace NaturalSort
|
||||
{
|
||||
public class NaturalReversedComparer : Comparer<string>, IDisposable
|
||||
{
|
||||
private Dictionary<string, string[]> table;
|
||||
|
||||
public NaturalReversedComparer()
|
||||
{
|
||||
table = new Dictionary<string, string[]>();
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
table.Clear();
|
||||
table = null;
|
||||
}
|
||||
|
||||
public override int Compare(string x, string y)
|
||||
{
|
||||
if (x == y)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
string[] x1, y1;
|
||||
if (!table.TryGetValue(x, out x1))
|
||||
{
|
||||
//x1 = Regex.Split(x.Replace(" ", ""), "([0-9]+)");
|
||||
x1 = Regex.Split(x, "([0-9]+)").Where(s => s != "").ToArray();
|
||||
table.Add(x, x1);
|
||||
}
|
||||
if (!table.TryGetValue(y, out y1))
|
||||
{
|
||||
//y1 = Regex.Split(y.Replace(" ", ""), "([0-9]+)");
|
||||
y1 = Regex.Split(y, "([0-9]+)").Where(s => s != "").ToArray();
|
||||
table.Add(y, y1);
|
||||
}
|
||||
|
||||
for (int i = 0; i < x1.Length && i < y1.Length; i++)
|
||||
{
|
||||
if (x1[i] != y1[i])
|
||||
{
|
||||
return PartCompare(x1[i], y1[i]);
|
||||
}
|
||||
}
|
||||
if (y1.Length > x1.Length)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else if (x1.Length > y1.Length)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private static int PartCompare(string left, string right)
|
||||
{
|
||||
int x, y;
|
||||
if (!int.TryParse(left, out x))
|
||||
{
|
||||
return Style.CompareNumeric(right, left);
|
||||
}
|
||||
|
||||
if (!int.TryParse(right, out y))
|
||||
{
|
||||
return Style.CompareNumeric(right, left);
|
||||
}
|
||||
|
||||
return -x.CompareTo(y);
|
||||
}
|
||||
}
|
||||
}
|
||||
154
SabreTools.Library/External/OptimizedCRC.cs
vendored
Normal file
154
SabreTools.Library/External/OptimizedCRC.cs
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2012-2015 Eugene Larchenko (spct@mail.ru)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace OCRC
|
||||
{
|
||||
public class OptimizedCRC : IDisposable
|
||||
{
|
||||
private const uint kCrcPoly = 0xEDB88320;
|
||||
private const uint kInitial = 0xFFFFFFFF;
|
||||
private const int CRC_NUM_TABLES = 8;
|
||||
private static readonly uint[] Table;
|
||||
|
||||
static OptimizedCRC()
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
Table = new uint[256 * CRC_NUM_TABLES];
|
||||
int i;
|
||||
for (i = 0; i < 256; i++)
|
||||
{
|
||||
uint r = (uint)i;
|
||||
for (int j = 0; j < 8; j++)
|
||||
{
|
||||
r = (r >> 1) ^ (kCrcPoly & ~((r & 1) - 1));
|
||||
}
|
||||
Table[i] = r;
|
||||
}
|
||||
for (; i < 256 * CRC_NUM_TABLES; i++)
|
||||
{
|
||||
uint r = Table[i - 256];
|
||||
Table[i] = Table[r & 0xFF] ^ (r >> 8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public uint UnsignedValue;
|
||||
|
||||
public OptimizedCRC()
|
||||
{
|
||||
Init();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reset CRC
|
||||
/// </summary>
|
||||
public void Init()
|
||||
{
|
||||
UnsignedValue = kInitial;
|
||||
}
|
||||
|
||||
public int Value
|
||||
{
|
||||
get { return (int)~UnsignedValue; }
|
||||
}
|
||||
|
||||
public void Update(byte[] data, int offset, int count)
|
||||
{
|
||||
new ArraySegment<byte>(data, offset, count); // check arguments
|
||||
if (count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var table = OptimizedCRC.Table;
|
||||
|
||||
uint crc = UnsignedValue;
|
||||
|
||||
for (; (offset & 7) != 0 && count != 0; count--)
|
||||
{
|
||||
crc = (crc >> 8) ^ table[(byte)crc ^ data[offset++]];
|
||||
}
|
||||
|
||||
if (count >= 8)
|
||||
{
|
||||
/*
|
||||
* Idea from 7-zip project sources (http://7-zip.org/sdk.html)
|
||||
*/
|
||||
|
||||
int end = (count - 8) & ~7;
|
||||
count -= end;
|
||||
end += offset;
|
||||
|
||||
while (offset != end)
|
||||
{
|
||||
crc ^= (uint)(data[offset] + (data[offset + 1] << 8) + (data[offset + 2] << 16) + (data[offset + 3] << 24));
|
||||
uint high = (uint)(data[offset + 4] + (data[offset + 5] << 8) + (data[offset + 6] << 16) + (data[offset + 7] << 24));
|
||||
offset += 8;
|
||||
|
||||
crc = table[(byte)crc + 0x700]
|
||||
^ table[(byte)(crc >>= 8) + 0x600]
|
||||
^ table[(byte)(crc >>= 8) + 0x500]
|
||||
^ table[/*(byte)*/(crc >> 8) + 0x400]
|
||||
^ table[(byte)(high) + 0x300]
|
||||
^ table[(byte)(high >>= 8) + 0x200]
|
||||
^ table[(byte)(high >>= 8) + 0x100]
|
||||
^ table[/*(byte)*/(high >> 8) + 0x000];
|
||||
}
|
||||
}
|
||||
|
||||
while (count-- != 0)
|
||||
{
|
||||
crc = (crc >> 8) ^ table[(byte)crc ^ data[offset++]];
|
||||
}
|
||||
|
||||
UnsignedValue = crc;
|
||||
}
|
||||
|
||||
static public int Compute(byte[] data, int offset, int count)
|
||||
{
|
||||
var crc = new OptimizedCRC();
|
||||
crc.Update(data, offset, count);
|
||||
return crc.Value;
|
||||
}
|
||||
|
||||
static public int Compute(byte[] data)
|
||||
{
|
||||
return Compute(data, 0, data.Length);
|
||||
}
|
||||
|
||||
static public int Compute(ArraySegment<byte> block)
|
||||
{
|
||||
return Compute(block.Array, block.Offset, block.Count);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
UnsignedValue = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
1036
SabreTools.Library/External/SupportedFiles/ZipFile.cs
vendored
Normal file
1036
SabreTools.Library/External/SupportedFiles/ZipFile.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1038
SabreTools.Library/External/SupportedFiles/ZipFileEntry.cs
vendored
Normal file
1038
SabreTools.Library/External/SupportedFiles/ZipFileEntry.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
68
SabreTools.Library/External/Traverse.cs
vendored
Normal file
68
SabreTools.Library/External/Traverse.cs
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using IOException = System.IO.IOException;
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
#endif
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.External
|
||||
{
|
||||
public class Traverse
|
||||
{
|
||||
/// Original version: Microsoft (example code), updated by edc
|
||||
public void TraverseTreeParallelForEach(string root, Action<FileInfo> action)
|
||||
{
|
||||
List<string> dirs = new List<string>();
|
||||
|
||||
if (!Directory.Exists(root))
|
||||
{
|
||||
throw new ArgumentException();
|
||||
}
|
||||
|
||||
dirs.Add(root);
|
||||
|
||||
List<string> subdirs = new List<string>();
|
||||
|
||||
while (dirs.Count > 0 || subdirs.Count > 0)
|
||||
{
|
||||
foreach (string dir in subdirs)
|
||||
{
|
||||
dirs.Add(dir);
|
||||
}
|
||||
subdirs.Clear();
|
||||
|
||||
Parallel.ForEach(dirs, Globals.ParallelOptions, currentDir =>
|
||||
{
|
||||
string[] subDirs = Directory.GetDirectories(currentDir);
|
||||
|
||||
lock (subdirs)
|
||||
{
|
||||
foreach (string str in subDirs)
|
||||
{
|
||||
subdirs.Add(str);
|
||||
}
|
||||
}
|
||||
|
||||
var dir = new DirectoryInfo(currentDir);
|
||||
try
|
||||
{
|
||||
FileInfo[] files = dir.GetFiles("*.*", SearchOption.TopDirectoryOnly);
|
||||
Parallel.ForEach(files, Globals.ParallelOptions, info =>
|
||||
{
|
||||
action(info);
|
||||
});
|
||||
}
|
||||
catch { }
|
||||
});
|
||||
dirs.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
878
SabreTools.Library/External/Zlib/CRC32.cs
vendored
Normal file
878
SabreTools.Library/External/Zlib/CRC32.cs
vendored
Normal file
@@ -0,0 +1,878 @@
|
||||
// CRC32.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2011 Dino Chiesa.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Last Saved: <2011-August-02 18:25:54>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines the CRC32 class, which can do the CRC32 algorithm, using
|
||||
// arbitrary starting polynomials, and bit reversal. The bit reversal is what
|
||||
// distinguishes this CRC-32 used in BZip2 from the CRC-32 that is used in PKZIP
|
||||
// files, or GZIP files. This class does both.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Security.Cryptography;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes a CRC-32. The CRC-32 algorithm is parameterized - you
|
||||
/// can set the polynomial and enable or disable bit
|
||||
/// reversal. This can be used for GZIP, BZip2, or ZIP.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This type is used internally by DotNetZip; it is generally not used
|
||||
/// directly by applications wishing to create, read, or manipulate zip
|
||||
/// archive files.
|
||||
/// </remarks>
|
||||
|
||||
[Guid("ebc25cf6-9120-4283-b972-0e5520d0000C")]
|
||||
[System.Runtime.InteropServices.ComVisible(true)]
|
||||
#if !NETCF
|
||||
[System.Runtime.InteropServices.ClassInterface(System.Runtime.InteropServices.ClassInterfaceType.AutoDispatch)]
|
||||
#endif
|
||||
public class CRC32
|
||||
{
|
||||
/// <summary>
|
||||
/// Indicates the total number of bytes applied to the CRC.
|
||||
/// </summary>
|
||||
public Int64 TotalBytesRead
|
||||
{
|
||||
get
|
||||
{
|
||||
return _TotalBytesRead;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates the current CRC for all blocks slurped in.
|
||||
/// </summary>
|
||||
public Int32 Crc32Result
|
||||
{
|
||||
get
|
||||
{
|
||||
return unchecked((Int32)(~_register));
|
||||
}
|
||||
}
|
||||
public uint Crc32ResultU
|
||||
{
|
||||
get
|
||||
{
|
||||
return ~_register;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Returns the CRC32 for the specified stream.
|
||||
/// </summary>
|
||||
/// <param name="input">The stream over which to calculate the CRC32</param>
|
||||
/// <returns>the CRC32 calculation</returns>
|
||||
public Int32 GetCrc32(System.IO.Stream input)
|
||||
{
|
||||
return GetCrc32AndCopy(input, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the CRC32 for the specified stream, and writes the input into the
|
||||
/// output stream.
|
||||
/// </summary>
|
||||
/// <param name="input">The stream over which to calculate the CRC32</param>
|
||||
/// <param name="output">The stream into which to deflate the input</param>
|
||||
/// <returns>the CRC32 calculation</returns>
|
||||
public Int32 GetCrc32AndCopy(System.IO.Stream input, System.IO.Stream output)
|
||||
{
|
||||
if (input == null)
|
||||
{
|
||||
throw new Exception("The input stream must not be null.");
|
||||
}
|
||||
|
||||
unchecked
|
||||
{
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int readSize = BUFFER_SIZE;
|
||||
|
||||
_TotalBytesRead = 0;
|
||||
int count = input.Read(buffer, 0, readSize);
|
||||
if (output != null)
|
||||
{
|
||||
output.Write(buffer, 0, count);
|
||||
}
|
||||
_TotalBytesRead += count;
|
||||
while (count > 0)
|
||||
{
|
||||
SlurpBlock(buffer, 0, count);
|
||||
count = input.Read(buffer, 0, readSize);
|
||||
if (output != null) output.Write(buffer, 0, count);
|
||||
_TotalBytesRead += count;
|
||||
}
|
||||
|
||||
return (Int32)(~_register);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the CRC32 for the given (word,byte) combo. This is a
|
||||
/// computation defined by PKzip for PKZIP 2.0 (weak) encryption.
|
||||
/// </summary>
|
||||
/// <param name="W">The word to start with.</param>
|
||||
/// <param name="B">The byte to combine it with.</param>
|
||||
/// <returns>The CRC-ized result.</returns>
|
||||
public Int32 ComputeCrc32(Int32 W, byte B)
|
||||
{
|
||||
return _InternalComputeCrc32((UInt32)W, B);
|
||||
}
|
||||
|
||||
internal Int32 _InternalComputeCrc32(UInt32 W, byte B)
|
||||
{
|
||||
return (Int32)(crc32Table[(W ^ B) & 0xFF] ^ (W >> 8));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Update the value for the running CRC32 using the given block of bytes.
|
||||
/// This is useful when using the CRC32() class in a Stream.
|
||||
/// </summary>
|
||||
/// <param name="block">block of bytes to slurp</param>
|
||||
/// <param name="offset">starting point in the block</param>
|
||||
/// <param name="count">how many bytes within the block to slurp</param>
|
||||
public void SlurpBlock(byte[] block, int offset, int count)
|
||||
{
|
||||
if (block == null)
|
||||
{
|
||||
throw new Exception("The data buffer must not be null.");
|
||||
}
|
||||
|
||||
// bzip algorithm
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
int x = offset + i;
|
||||
byte b = block[x];
|
||||
if (this.reverseBits)
|
||||
{
|
||||
UInt32 temp = (_register >> 24) ^ b;
|
||||
_register = (_register << 8) ^ crc32Table[temp];
|
||||
}
|
||||
else
|
||||
{
|
||||
UInt32 temp = (_register & 0x000000FF) ^ b;
|
||||
_register = (_register >> 8) ^ crc32Table[temp];
|
||||
}
|
||||
}
|
||||
_TotalBytesRead += count;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Process one byte in the CRC.
|
||||
/// </summary>
|
||||
/// <param name = "b">the byte to include into the CRC . </param>
|
||||
public void UpdateCRC(byte b)
|
||||
{
|
||||
if (this.reverseBits)
|
||||
{
|
||||
UInt32 temp = (_register >> 24) ^ b;
|
||||
_register = (_register << 8) ^ crc32Table[temp];
|
||||
}
|
||||
else
|
||||
{
|
||||
UInt32 temp = (_register & 0x000000FF) ^ b;
|
||||
_register = (_register >> 8) ^ crc32Table[temp];
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Process a run of N identical bytes into the CRC.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This method serves as an optimization for updating the CRC when a
|
||||
/// run of identical bytes is found. Rather than passing in a buffer of
|
||||
/// length n, containing all identical bytes b, this method accepts the
|
||||
/// byte value and the length of the (virtual) buffer - the length of
|
||||
/// the run.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name = "b">the byte to include into the CRC. </param>
|
||||
/// <param name = "n">the number of times that byte should be repeated. </param>
|
||||
public void UpdateCRC(byte b, int n)
|
||||
{
|
||||
while (n-- > 0)
|
||||
{
|
||||
if (this.reverseBits)
|
||||
{
|
||||
uint temp = (_register >> 24) ^ b;
|
||||
_register = (_register << 8) ^ crc32Table[(temp >= 0)
|
||||
? temp
|
||||
: (temp + 256)];
|
||||
}
|
||||
else
|
||||
{
|
||||
UInt32 temp = (_register & 0x000000FF) ^ b;
|
||||
_register = (_register >> 8) ^ crc32Table[(temp >= 0)
|
||||
? temp
|
||||
: (temp + 256)];
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static uint ReverseBits(uint data)
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
uint ret = data;
|
||||
ret = (ret & 0x55555555) << 1 | (ret >> 1) & 0x55555555;
|
||||
ret = (ret & 0x33333333) << 2 | (ret >> 2) & 0x33333333;
|
||||
ret = (ret & 0x0F0F0F0F) << 4 | (ret >> 4) & 0x0F0F0F0F;
|
||||
ret = (ret << 24) | ((ret & 0xFF00) << 8) | ((ret >> 8) & 0xFF00) | (ret >> 24);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
private static byte ReverseBits(byte data)
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
uint u = (uint)data * 0x00020202;
|
||||
uint m = 0x01044010;
|
||||
uint s = u & m;
|
||||
uint t = (u << 2) & (m << 1);
|
||||
return (byte)((0x01001001 * (s + t)) >> 24);
|
||||
}
|
||||
}
|
||||
|
||||
private void GenerateLookupTable()
|
||||
{
|
||||
crc32Table = new UInt32[256];
|
||||
unchecked
|
||||
{
|
||||
UInt32 dwCrc;
|
||||
byte i = 0;
|
||||
do
|
||||
{
|
||||
dwCrc = i;
|
||||
for (byte j = 8; j > 0; j--)
|
||||
{
|
||||
if ((dwCrc & 1) == 1)
|
||||
{
|
||||
dwCrc = (dwCrc >> 1) ^ dwPolynomial;
|
||||
}
|
||||
else
|
||||
{
|
||||
dwCrc >>= 1;
|
||||
}
|
||||
}
|
||||
if (reverseBits)
|
||||
{
|
||||
crc32Table[ReverseBits(i)] = ReverseBits(dwCrc);
|
||||
}
|
||||
else
|
||||
{
|
||||
crc32Table[i] = dwCrc;
|
||||
}
|
||||
i++;
|
||||
} while (i != 0);
|
||||
}
|
||||
|
||||
#if VERBOSE
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("private static readonly UInt32[] crc32Table = {");
|
||||
for (int i = 0; i < crc32Table.Length; i+=4)
|
||||
{
|
||||
Console.Write(" ");
|
||||
for (int j=0; j < 4; j++)
|
||||
{
|
||||
Console.Write(" 0x{0:X8}U,", crc32Table[i+j]);
|
||||
}
|
||||
Console.WriteLine();
|
||||
}
|
||||
Console.WriteLine("};");
|
||||
Console.WriteLine();
|
||||
#endif
|
||||
}
|
||||
|
||||
private uint gf2_matrix_times(uint[] matrix, uint vec)
|
||||
{
|
||||
uint sum = 0;
|
||||
int i = 0;
|
||||
while (vec != 0)
|
||||
{
|
||||
if ((vec & 0x01) == 0x01)
|
||||
sum ^= matrix[i];
|
||||
vec >>= 1;
|
||||
i++;
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
private void gf2_matrix_square(uint[] square, uint[] mat)
|
||||
{
|
||||
for (int i = 0; i < 32; i++)
|
||||
square[i] = gf2_matrix_times(mat, mat[i]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Combines the given CRC32 value with the current running total.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This is useful when using a divide-and-conquer approach to
|
||||
/// calculating a CRC. Multiple threads can each calculate a
|
||||
/// CRC32 on a segment of the data, and then combine the
|
||||
/// individual CRC32 values at the end.
|
||||
/// </remarks>
|
||||
/// <param name="crc">the crc value to be combined with this one</param>
|
||||
/// <param name="length">the length of data the CRC value was calculated on</param>
|
||||
public void Combine(int crc, int length)
|
||||
{
|
||||
uint[] even = new uint[32]; // even-power-of-two zeros operator
|
||||
uint[] odd = new uint[32]; // odd-power-of-two zeros operator
|
||||
|
||||
if (length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
uint crc1 = ~_register;
|
||||
uint crc2 = (uint)crc;
|
||||
|
||||
// put operator for one zero bit in odd
|
||||
odd[0] = this.dwPolynomial; // the CRC-32 polynomial
|
||||
uint row = 1;
|
||||
for (int i = 1; i < 32; i++)
|
||||
{
|
||||
odd[i] = row;
|
||||
row <<= 1;
|
||||
}
|
||||
|
||||
// put operator for two zero bits in even
|
||||
gf2_matrix_square(even, odd);
|
||||
|
||||
// put operator for four zero bits in odd
|
||||
gf2_matrix_square(odd, even);
|
||||
|
||||
uint len2 = (uint)length;
|
||||
|
||||
// apply len2 zeros to crc1 (first square will put the operator for one
|
||||
// zero byte, eight zero bits, in even)
|
||||
do
|
||||
{
|
||||
// apply zeros operator for this bit of len2
|
||||
gf2_matrix_square(even, odd);
|
||||
|
||||
if ((len2 & 1) == 1)
|
||||
{
|
||||
crc1 = gf2_matrix_times(even, crc1);
|
||||
}
|
||||
len2 >>= 1;
|
||||
|
||||
if (len2 == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// another iteration of the loop with odd and even swapped
|
||||
gf2_matrix_square(odd, even);
|
||||
if ((len2 & 1) == 1)
|
||||
{
|
||||
crc1 = gf2_matrix_times(odd, crc1);
|
||||
}
|
||||
len2 >>= 1;
|
||||
} while (len2 != 0);
|
||||
|
||||
crc1 ^= crc2;
|
||||
|
||||
_register = ~crc1;
|
||||
|
||||
//return (int) crc1;
|
||||
return;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an instance of the CRC32 class using the default settings: no
|
||||
/// bit reversal, and a polynomial of 0xEDB88320.
|
||||
/// </summary>
|
||||
public CRC32()
|
||||
: this(false)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an instance of the CRC32 class, specifying whether to reverse
|
||||
/// data bits or not.
|
||||
/// </summary>
|
||||
/// <param name='reverseBits'>
|
||||
/// specify true if the instance should reverse data bits.
|
||||
/// </param>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// In the CRC-32 used by BZip2, the bits are reversed. Therefore if you
|
||||
/// want a CRC32 with compatibility with BZip2, you should pass true
|
||||
/// here. In the CRC-32 used by GZIP and PKZIP, the bits are not
|
||||
/// reversed; Therefore if you want a CRC32 with compatibility with
|
||||
/// those, you should pass false.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public CRC32(bool reverseBits) :
|
||||
this(unchecked((int)0xEDB88320), reverseBits)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an instance of the CRC32 class, specifying the polynomial and
|
||||
/// whether to reverse data bits or not.
|
||||
/// </summary>
|
||||
/// <param name='polynomial'>
|
||||
/// The polynomial to use for the CRC, expressed in the reversed (LSB)
|
||||
/// format: the highest ordered bit in the polynomial value is the
|
||||
/// coefficient of the 0th power; the second-highest order bit is the
|
||||
/// coefficient of the 1 power, and so on. Expressed this way, the
|
||||
/// polynomial for the CRC-32C used in IEEE 802.3, is 0xEDB88320.
|
||||
/// </param>
|
||||
/// <param name='reverseBits'>
|
||||
/// specify true if the instance should reverse data bits.
|
||||
/// </param>
|
||||
///
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// In the CRC-32 used by BZip2, the bits are reversed. Therefore if you
|
||||
/// want a CRC32 with compatibility with BZip2, you should pass true
|
||||
/// here for the <c>reverseBits</c> parameter. In the CRC-32 used by
|
||||
/// GZIP and PKZIP, the bits are not reversed; Therefore if you want a
|
||||
/// CRC32 with compatibility with those, you should pass false for the
|
||||
/// <c>reverseBits</c> parameter.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public CRC32(int polynomial, bool reverseBits)
|
||||
{
|
||||
this.reverseBits = reverseBits;
|
||||
this.dwPolynomial = (uint)polynomial;
|
||||
this.GenerateLookupTable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reset the CRC-32 class - clear the CRC "remainder register."
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Use this when employing a single instance of this class to compute
|
||||
/// multiple, distinct CRCs on multiple, distinct data blocks.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public void Reset()
|
||||
{
|
||||
_register = 0xFFFFFFFFU;
|
||||
}
|
||||
|
||||
// private member vars
|
||||
private UInt32 dwPolynomial;
|
||||
private Int64 _TotalBytesRead;
|
||||
private bool reverseBits;
|
||||
private UInt32[] crc32Table;
|
||||
private const int BUFFER_SIZE = 8192;
|
||||
private UInt32 _register = 0xFFFFFFFFU;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A Stream that calculates a CRC32 (a checksum) on all bytes read,
|
||||
/// or on all bytes written.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This class can be used to verify the CRC of a ZipEntry when
|
||||
/// reading from a stream, or to calculate a CRC when writing to a
|
||||
/// stream. The stream should be used to either read, or write, but
|
||||
/// not both. If you intermix reads and writes, the results are not
|
||||
/// defined.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// This class is intended primarily for use internally by the
|
||||
/// DotNetZip library.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public class CrcCalculatorStream : System.IO.Stream, System.IDisposable
|
||||
{
|
||||
private static readonly Int64 UnsetLengthLimit = -99;
|
||||
|
||||
internal System.IO.Stream _innerStream;
|
||||
private CRC32 _Crc32;
|
||||
private Int64 _lengthLimit = -99;
|
||||
private bool _leaveOpen;
|
||||
|
||||
/// <summary>
|
||||
/// The default constructor.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Instances returned from this constructor will leave the underlying
|
||||
/// stream open upon Close(). The stream uses the default CRC32
|
||||
/// algorithm, which implies a polynomial of 0xEDB88320.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="stream">The underlying stream</param>
|
||||
public CrcCalculatorStream(System.IO.Stream stream)
|
||||
: this(true, CrcCalculatorStream.UnsetLengthLimit, stream, null)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The constructor allows the caller to specify how to handle the
|
||||
/// underlying stream at close.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The stream uses the default CRC32 algorithm, which implies a
|
||||
/// polynomial of 0xEDB88320.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="stream">The underlying stream</param>
|
||||
/// <param name="leaveOpen">true to leave the underlying stream
|
||||
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
|
||||
public CrcCalculatorStream(System.IO.Stream stream, bool leaveOpen)
|
||||
: this(leaveOpen, CrcCalculatorStream.UnsetLengthLimit, stream, null)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A constructor allowing the specification of the length of the stream
|
||||
/// to read.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The stream uses the default CRC32 algorithm, which implies a
|
||||
/// polynomial of 0xEDB88320.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Instances returned from this constructor will leave the underlying
|
||||
/// stream open upon Close().
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="stream">The underlying stream</param>
|
||||
/// <param name="length">The length of the stream to slurp</param>
|
||||
public CrcCalculatorStream(System.IO.Stream stream, Int64 length)
|
||||
: this(true, length, stream, null)
|
||||
{
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentException("length");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A constructor allowing the specification of the length of the stream
|
||||
/// to read, as well as whether to keep the underlying stream open upon
|
||||
/// Close().
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The stream uses the default CRC32 algorithm, which implies a
|
||||
/// polynomial of 0xEDB88320.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="stream">The underlying stream</param>
|
||||
/// <param name="length">The length of the stream to slurp</param>
|
||||
/// <param name="leaveOpen">true to leave the underlying stream
|
||||
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
|
||||
public CrcCalculatorStream(System.IO.Stream stream, Int64 length, bool leaveOpen)
|
||||
: this(leaveOpen, length, stream, null)
|
||||
{
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentException("length");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A constructor allowing the specification of the length of the stream
|
||||
/// to read, as well as whether to keep the underlying stream open upon
|
||||
/// Close(), and the CRC32 instance to use.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The stream uses the specified CRC32 instance, which allows the
|
||||
/// application to specify how the CRC gets calculated.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="stream">The underlying stream</param>
|
||||
/// <param name="length">The length of the stream to slurp</param>
|
||||
/// <param name="leaveOpen">true to leave the underlying stream
|
||||
/// open upon close of the <c>CrcCalculatorStream</c>; false otherwise.</param>
|
||||
/// <param name="crc32">the CRC32 instance to use to calculate the CRC32</param>
|
||||
public CrcCalculatorStream(System.IO.Stream stream, Int64 length, bool leaveOpen,
|
||||
CRC32 crc32)
|
||||
: this(leaveOpen, length, stream, crc32)
|
||||
{
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentException("length");
|
||||
}
|
||||
}
|
||||
|
||||
// This ctor is private - no validation is done here. This is to allow the use
|
||||
// of a (specific) negative value for the _lengthLimit, to indicate that there
|
||||
// is no length set. So we validate the length limit in those ctors that use an
|
||||
// explicit param, otherwise we don't validate, because it could be our special
|
||||
// value.
|
||||
private CrcCalculatorStream
|
||||
(bool leaveOpen, Int64 length, System.IO.Stream stream, CRC32 crc32)
|
||||
: base()
|
||||
{
|
||||
_innerStream = stream;
|
||||
_Crc32 = crc32 ?? new CRC32();
|
||||
_lengthLimit = length;
|
||||
_leaveOpen = leaveOpen;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of bytes run through the CRC32 calculator.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// This is either the total number of bytes read, or the total number of
|
||||
/// bytes written, depending on the direction of this stream.
|
||||
/// </remarks>
|
||||
public Int64 TotalBytesSlurped
|
||||
{
|
||||
get { return _Crc32.TotalBytesRead; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provides the current CRC for all blocks slurped in.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The running total of the CRC is kept as data is written or read
|
||||
/// through the stream. read this property after all reads or writes to
|
||||
/// get an accurate CRC for the entire stream.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public Int32 Crc
|
||||
{
|
||||
get { return _Crc32.Crc32Result; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the underlying stream will be left open when the
|
||||
/// <c>CrcCalculatorStream</c> is Closed.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Set this at any point before calling <see cref="Close()"/>.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public bool LeaveOpen
|
||||
{
|
||||
get { return _leaveOpen; }
|
||||
set { _leaveOpen = value; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read from the stream
|
||||
/// </summary>
|
||||
/// <param name="buffer">the buffer to read</param>
|
||||
/// <param name="offset">the offset at which to start</param>
|
||||
/// <param name="count">the number of bytes to read</param>
|
||||
/// <returns>the number of bytes actually read</returns>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
int bytesToRead = count;
|
||||
|
||||
// Need to limit the # of bytes returned, if the stream is intended to have
|
||||
// a definite length. This is especially useful when returning a stream for
|
||||
// the uncompressed data directly to the application. The app won't
|
||||
// necessarily read only the UncompressedSize number of bytes. For example
|
||||
// wrapping the stream returned from OpenReader() into a StreadReader() and
|
||||
// calling ReadToEnd() on it, We can "over-read" the zip data and get a
|
||||
// corrupt string. The length limits that, prevents that problem.
|
||||
|
||||
if (_lengthLimit != CrcCalculatorStream.UnsetLengthLimit)
|
||||
{
|
||||
if (_Crc32.TotalBytesRead >= _lengthLimit)
|
||||
{
|
||||
return 0; // EOF
|
||||
}
|
||||
Int64 bytesRemaining = _lengthLimit - _Crc32.TotalBytesRead;
|
||||
if (bytesRemaining < count)
|
||||
{
|
||||
bytesToRead = (int)bytesRemaining;
|
||||
}
|
||||
}
|
||||
int n = _innerStream.Read(buffer, offset, bytesToRead);
|
||||
if (n > 0)
|
||||
{
|
||||
_Crc32.SlurpBlock(buffer, offset, n);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write to the stream.
|
||||
/// </summary>
|
||||
/// <param name="buffer">the buffer from which to write</param>
|
||||
/// <param name="offset">the offset at which to start writing</param>
|
||||
/// <param name="count">the number of bytes to write</param>
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (count > 0)
|
||||
{
|
||||
_Crc32.SlurpBlock(buffer, offset, count);
|
||||
}
|
||||
_innerStream.Write(buffer, offset, count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream supports reading.
|
||||
/// </summary>
|
||||
public override bool CanRead
|
||||
{
|
||||
get { return _innerStream.CanRead; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream supports seeking.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Always returns false.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public override bool CanSeek
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream supports writing.
|
||||
/// </summary>
|
||||
public override bool CanWrite
|
||||
{
|
||||
get { return _innerStream.CanWrite; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Flush the stream.
|
||||
/// </summary>
|
||||
public override void Flush()
|
||||
{
|
||||
_innerStream.Flush();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the length of the underlying stream.
|
||||
/// </summary>
|
||||
public override long Length
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_lengthLimit == CrcCalculatorStream.UnsetLengthLimit)
|
||||
{
|
||||
return _innerStream.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
return _lengthLimit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The getter for this property returns the total bytes read.
|
||||
/// If you use the setter, it will throw
|
||||
/// <see cref="NotSupportedException"/>.
|
||||
/// </summary>
|
||||
public override long Position
|
||||
{
|
||||
get { return _Crc32.TotalBytesRead; }
|
||||
set { throw new NotSupportedException(); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Seeking is not supported on this stream. This method always throws
|
||||
/// <see cref="NotSupportedException"/>
|
||||
/// </summary>
|
||||
/// <param name="offset">N/A</param>
|
||||
/// <param name="origin">N/A</param>
|
||||
/// <returns>N/A</returns>
|
||||
public override long Seek(long offset, System.IO.SeekOrigin origin)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This method always throws
|
||||
/// <see cref="NotSupportedException"/>
|
||||
/// </summary>
|
||||
/// <param name="value">N/A</param>
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
void IDisposable.Dispose()
|
||||
{
|
||||
Close();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Closes the stream.
|
||||
/// </summary>
|
||||
public override void Close()
|
||||
{
|
||||
base.Close();
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
_innerStream.Close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class CRC32Hash : HashAlgorithm
|
||||
{
|
||||
private CRC32 _Crc32=new CRC32();
|
||||
|
||||
public override void Initialize()
|
||||
{
|
||||
_Crc32.Reset();
|
||||
}
|
||||
|
||||
protected override void HashCore(byte[] buffer, int start, int length)
|
||||
{
|
||||
_Crc32.SlurpBlock(buffer, start, length);
|
||||
}
|
||||
|
||||
protected override byte[] HashFinal()
|
||||
{
|
||||
uint crcValue =(uint) _Crc32.Crc32Result;
|
||||
HashValue = new[]
|
||||
{
|
||||
(byte) ((crcValue >> 24) & 0xff),
|
||||
(byte) ((crcValue >> 16) & 0xff),
|
||||
(byte) ((crcValue >> 8) & 0xff),
|
||||
(byte) (crcValue & 0xff)
|
||||
};
|
||||
return HashValue;
|
||||
}
|
||||
|
||||
public override int HashSize
|
||||
{
|
||||
get { return 32; }
|
||||
}
|
||||
}
|
||||
}
|
||||
1936
SabreTools.Library/External/Zlib/Deflate.cs
vendored
Normal file
1936
SabreTools.Library/External/Zlib/Deflate.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
759
SabreTools.Library/External/Zlib/DeflateStream.cs
vendored
Normal file
759
SabreTools.Library/External/Zlib/DeflateStream.cs
vendored
Normal file
@@ -0,0 +1,759 @@
|
||||
// DeflateStream.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009-2010 Dino Chiesa.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2011-July-31 14:48:11>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines the DeflateStream class, which can be used as a replacement for
|
||||
// the System.IO.Compression.DeflateStream class in the .NET BCL.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// A class for compressing and decompressing streams using the Deflate algorithm.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// The DeflateStream is a <see
|
||||
/// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see
|
||||
/// cref="System.IO.Stream"/>. It adds DEFLATE compression or decompression to any
|
||||
/// stream.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// Using this stream, applications can compress or decompress data via stream
|
||||
/// <c>Read</c> and <c>Write</c> operations. Either compresssion or decompression
|
||||
/// can occur through either reading or writing. The compression format used is
|
||||
/// DEFLATE, which is documented in <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE
|
||||
/// Compressed Data Format Specification version 1.3.".
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// This class is similar to <see cref="ZlibStream"/>, except that
|
||||
/// <c>ZlibStream</c> adds the <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
|
||||
/// 1950 - ZLIB</see> framing bytes to a compressed stream when compressing, or
|
||||
/// expects the RFC1950 framing bytes when decompressing. The <c>DeflateStream</c>
|
||||
/// does not.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <seealso cref="ZlibStream" />
|
||||
/// <seealso cref="GZipStream" />
|
||||
public class DeflateStream : System.IO.Stream
|
||||
{
|
||||
internal ZlibBaseStream _baseStream;
|
||||
internal System.IO.Stream _innerStream;
|
||||
bool _disposed;
|
||||
|
||||
/// <summary>
|
||||
/// Create a DeflateStream using the specified CompressionMode.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// When mode is <c>CompressionMode.Compress</c>, the DeflateStream will use
|
||||
/// the default compression level. The "captive" stream will be closed when
|
||||
/// the DeflateStream is closed.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
/// This example uses a DeflateStream to compress data from a file, and writes
|
||||
/// the compressed data to another file.
|
||||
/// <code>
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (var raw = System.IO.File.Create(fileToCompress + ".deflated"))
|
||||
/// {
|
||||
/// using (Stream compressor = new DeflateStream(raw, CompressionMode.Compress))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n;
|
||||
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
|
||||
/// {
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// </code>
|
||||
///
|
||||
/// <code lang="VB">
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using raw As FileStream = File.Create(fileToCompress & ".deflated")
|
||||
/// Using compressor As Stream = New DeflateStream(raw, CompressionMode.Compress)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <param name="stream">The stream which will be read or written.</param>
|
||||
/// <param name="mode">Indicates whether the DeflateStream will compress or decompress.</param>
|
||||
public DeflateStream(System.IO.Stream stream, CompressionMode mode)
|
||||
: this(stream, mode, CompressionLevel.Default, false)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a DeflateStream using the specified CompressionMode and the specified CompressionLevel.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is
|
||||
/// ignored. The "captive" stream will be closed when the DeflateStream is
|
||||
/// closed.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
///
|
||||
/// This example uses a DeflateStream to compress data from a file, and writes
|
||||
/// the compressed data to another file.
|
||||
///
|
||||
/// <code>
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (var raw = System.IO.File.Create(fileToCompress + ".deflated"))
|
||||
/// {
|
||||
/// using (Stream compressor = new DeflateStream(raw,
|
||||
/// CompressionMode.Compress,
|
||||
/// CompressionLevel.BestCompression))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n= -1;
|
||||
/// while (n != 0)
|
||||
/// {
|
||||
/// if (n > 0)
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// n= input.Read(buffer, 0, buffer.Length);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// </code>
|
||||
///
|
||||
/// <code lang="VB">
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using raw As FileStream = File.Create(fileToCompress & ".deflated")
|
||||
/// Using compressor As Stream = New DeflateStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <param name="stream">The stream to be read or written while deflating or inflating.</param>
|
||||
/// <param name="mode">Indicates whether the <c>DeflateStream</c> will compress or decompress.</param>
|
||||
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
|
||||
public DeflateStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level)
|
||||
: this(stream, mode, level, false)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>DeflateStream</c> using the specified
|
||||
/// <c>CompressionMode</c>, and explicitly specify whether the
|
||||
/// stream should be left open after Deflation or Inflation.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// This constructor allows the application to request that the captive stream
|
||||
/// remain open after the deflation or inflation occurs. By default, after
|
||||
/// <c>Close()</c> is called on the stream, the captive stream is also
|
||||
/// closed. In some cases this is not desired, for example if the stream is a
|
||||
/// memory stream that will be re-read after compression. Specify true for
|
||||
/// the <paramref name="leaveOpen"/> parameter to leave the stream open.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// The <c>DeflateStream</c> will use the default compression level.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// See the other overloads of this constructor for example code.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="stream">
|
||||
/// The stream which will be read or written. This is called the
|
||||
/// "captive" stream in other places in this documentation.
|
||||
/// </param>
|
||||
///
|
||||
/// <param name="mode">
|
||||
/// Indicates whether the <c>DeflateStream</c> will compress or decompress.
|
||||
/// </param>
|
||||
///
|
||||
/// <param name="leaveOpen">true if the application would like the stream to
|
||||
/// remain open after inflation/deflation.</param>
|
||||
public DeflateStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen)
|
||||
: this(stream, mode, CompressionLevel.Default, leaveOpen)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>DeflateStream</c> using the specified <c>CompressionMode</c>
|
||||
/// and the specified <c>CompressionLevel</c>, and explicitly specify whether
|
||||
/// the stream should be left open after Deflation or Inflation.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// This constructor allows the application to request that the captive stream
|
||||
/// remain open after the deflation or inflation occurs. By default, after
|
||||
/// <c>Close()</c> is called on the stream, the captive stream is also
|
||||
/// closed. In some cases this is not desired, for example if the stream is a
|
||||
/// <see cref="System.IO.MemoryStream"/> that will be re-read after
|
||||
/// compression. Specify true for the <paramref name="leaveOpen"/> parameter
|
||||
/// to leave the stream open.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
///
|
||||
/// This example shows how to use a <c>DeflateStream</c> to compress data from
|
||||
/// a file, and store the compressed data into another file.
|
||||
///
|
||||
/// <code>
|
||||
/// using (var output = System.IO.File.Create(fileToCompress + ".deflated"))
|
||||
/// {
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (Stream compressor = new DeflateStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n= -1;
|
||||
/// while (n != 0)
|
||||
/// {
|
||||
/// if (n > 0)
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// n= input.Read(buffer, 0, buffer.Length);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// // can write additional data to the output stream here
|
||||
/// }
|
||||
/// </code>
|
||||
///
|
||||
/// <code lang="VB">
|
||||
/// Using output As FileStream = File.Create(fileToCompress & ".deflated")
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using compressor As Stream = New DeflateStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// ' can write additional data to the output stream here.
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <param name="stream">The stream which will be read or written.</param>
|
||||
/// <param name="mode">Indicates whether the DeflateStream will compress or decompress.</param>
|
||||
/// <param name="leaveOpen">true if the application would like the stream to remain open after inflation/deflation.</param>
|
||||
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
|
||||
public DeflateStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
|
||||
{
|
||||
_innerStream = stream;
|
||||
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen);
|
||||
}
|
||||
|
||||
#region Zlib properties
|
||||
|
||||
/// <summary>
|
||||
/// This property sets the flush behavior on the stream.
|
||||
/// </summary>
|
||||
/// <remarks> See the ZLIB documentation for the meaning of the flush behavior.
|
||||
/// </remarks>
|
||||
virtual public FlushType FlushMode
|
||||
{
|
||||
get { return (this._baseStream._flushMode); }
|
||||
set
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
this._baseStream._flushMode = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The size of the working buffer for the compression codec.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The working buffer is used for all stream operations. The default size is
|
||||
/// 1024 bytes. The minimum size is 128 bytes. You may get better performance
|
||||
/// with a larger buffer. Then again, you might not. You would have to test
|
||||
/// it.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the
|
||||
/// stream. If you try to set it afterwards, it will throw.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public int BufferSize
|
||||
{
|
||||
get
|
||||
{
|
||||
return this._baseStream._bufferSize;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
if (this._baseStream._workingBuffer != null)
|
||||
{
|
||||
throw new ZlibException("The working buffer is already set.");
|
||||
}
|
||||
if (value < ZlibConstants.WorkingBufferSizeMin)
|
||||
{
|
||||
throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin));
|
||||
}
|
||||
this._baseStream._bufferSize = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The ZLIB strategy to be used during compression.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// By tweaking this parameter, you may be able to optimize the compression for
|
||||
/// data with particular characteristics.
|
||||
/// </remarks>
|
||||
public CompressionStrategy Strategy
|
||||
{
|
||||
get
|
||||
{
|
||||
return this._baseStream.Strategy;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_disposed) throw new ObjectDisposedException("DeflateStream");
|
||||
{
|
||||
this._baseStream.Strategy = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary> Returns the total number of bytes input so far.</summary>
|
||||
virtual public long TotalIn
|
||||
{
|
||||
get
|
||||
{
|
||||
return this._baseStream._z.TotalBytesIn;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary> Returns the total number of bytes output so far.</summary>
|
||||
virtual public long TotalOut
|
||||
{
|
||||
get
|
||||
{
|
||||
return this._baseStream._z.TotalBytesOut;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region System.IO.Stream methods
|
||||
|
||||
/// <summary>
|
||||
/// Dispose the stream.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This may or may not result in a <c>Close()</c> call on the captive
|
||||
/// stream. See the constructors that have a <c>leaveOpen</c> parameter
|
||||
/// for more information.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Application code won't call this code directly. This method may be
|
||||
/// invoked in two distinct scenarios. If disposing == true, the method
|
||||
/// has been called directly or indirectly by a user's code, for example
|
||||
/// via the public Dispose() method. In this case, both managed and
|
||||
/// unmanaged resources can be referenced and disposed. If disposing ==
|
||||
/// false, the method has been called by the runtime from inside the
|
||||
/// object finalizer and this method should not reference other objects;
|
||||
/// in that case only unmanaged resources must be referenced or
|
||||
/// disposed.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="disposing">
|
||||
/// true if the Dispose method was invoked by user code.
|
||||
/// </param>
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (disposing && (this._baseStream != null))
|
||||
{
|
||||
this._baseStream.Close();
|
||||
}
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream can be read.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The return value depends on whether the captive stream supports reading.
|
||||
/// </remarks>
|
||||
public override bool CanRead
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
return _baseStream._stream.CanRead;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream supports Seek operations.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Always returns false.
|
||||
/// </remarks>
|
||||
public override bool CanSeek
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream can be written.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The return value depends on whether the captive stream supports writing.
|
||||
/// </remarks>
|
||||
public override bool CanWrite
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
return _baseStream._stream.CanWrite;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Flush the stream.
|
||||
/// </summary>
|
||||
public override void Flush()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
_baseStream.Flush();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reading this property always throws a <see cref="NotImplementedException"/>.
|
||||
/// </summary>
|
||||
public override long Length
|
||||
{
|
||||
get { throw new NotImplementedException(); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The position of the stream pointer.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Setting this property always throws a <see
|
||||
/// cref="NotImplementedException"/>. Reading will return the total bytes
|
||||
/// written out, if used in writing, or the total bytes read in, if used in
|
||||
/// reading. The count may refer to compressed bytes or uncompressed bytes,
|
||||
/// depending on how you've used the stream.
|
||||
/// </remarks>
|
||||
public override long Position
|
||||
{
|
||||
get
|
||||
{
|
||||
if (this._baseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
|
||||
{
|
||||
return this._baseStream._z.TotalBytesOut;
|
||||
}
|
||||
if (this._baseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
|
||||
{
|
||||
return this._baseStream._z.TotalBytesIn;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
set { throw new NotImplementedException(); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read data from the stream.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// If you wish to use the <c>DeflateStream</c> to compress data while
|
||||
/// reading, you can create a <c>DeflateStream</c> with
|
||||
/// <c>CompressionMode.Compress</c>, providing an uncompressed data stream.
|
||||
/// Then call Read() on that <c>DeflateStream</c>, and the data read will be
|
||||
/// compressed as you read. If you wish to use the <c>DeflateStream</c> to
|
||||
/// decompress data while reading, you can create a <c>DeflateStream</c> with
|
||||
/// <c>CompressionMode.Decompress</c>, providing a readable compressed data
|
||||
/// stream. Then call Read() on that <c>DeflateStream</c>, and the data read
|
||||
/// will be decompressed as you read.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// A <c>DeflateStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
/// <param name="buffer">The buffer into which the read data should be placed.</param>
|
||||
/// <param name="offset">the offset within that data array to put the first byte read.</param>
|
||||
/// <param name="count">the number of bytes to read.</param>
|
||||
/// <returns>the number of bytes actually read</returns>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("DeflateStream");
|
||||
}
|
||||
return _baseStream.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calling this method always throws a <see cref="NotImplementedException"/>.
|
||||
/// </summary>
|
||||
/// <param name="offset">this is irrelevant, since it will always throw!</param>
|
||||
/// <param name="origin">this is irrelevant, since it will always throw!</param>
|
||||
/// <returns>irrelevant!</returns>
|
||||
public override long Seek(long offset, System.IO.SeekOrigin origin)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calling this method always throws a <see cref="NotImplementedException"/>.
|
||||
/// </summary>
|
||||
/// <param name="value">this is irrelevant, since it will always throw!</param>
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write data to the stream.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// If you wish to use the <c>DeflateStream</c> to compress data while
|
||||
/// writing, you can create a <c>DeflateStream</c> with
|
||||
/// <c>CompressionMode.Compress</c>, and a writable output stream. Then call
|
||||
/// <c>Write()</c> on that <c>DeflateStream</c>, providing uncompressed data
|
||||
/// as input. The data sent to the output stream will be the compressed form
|
||||
/// of the data written. If you wish to use the <c>DeflateStream</c> to
|
||||
/// decompress data while writing, you can create a <c>DeflateStream</c> with
|
||||
/// <c>CompressionMode.Decompress</c>, and a writable output stream. Then
|
||||
/// call <c>Write()</c> on that stream, providing previously compressed
|
||||
/// data. The data sent to the output stream will be the decompressed form of
|
||||
/// the data written.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// A <c>DeflateStream</c> can be used for <c>Read()</c> or <c>Write()</c>,
|
||||
/// but not both.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="buffer">The buffer holding data to write to the stream.</param>
|
||||
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
|
||||
/// <param name="count">the number of bytes to write.</param>
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_disposed) throw new ObjectDisposedException("DeflateStream");
|
||||
_baseStream.Write(buffer, offset, count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Compress a string into a byte array using DEFLATE (RFC 1951).
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Uncompress it with <see cref="DeflateStream.UncompressString(byte[])"/>.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <seealso cref="DeflateStream.UncompressString(byte[])">DeflateStream.UncompressString(byte[])</seealso>
|
||||
/// <seealso cref="DeflateStream.CompressBuffer(byte[])">DeflateStream.CompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="GZipStream.CompressString(string)">GZipStream.CompressString(string)</seealso>
|
||||
/// <seealso cref="ZlibStream.CompressString(string)">ZlibStream.CompressString(string)</seealso>
|
||||
///
|
||||
/// <param name="s">
|
||||
/// A string to compress. The string will first be encoded
|
||||
/// using UTF8, then compressed.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The string in compressed form</returns>
|
||||
public static byte[] CompressString(String s)
|
||||
{
|
||||
using (var ms = new System.IO.MemoryStream())
|
||||
{
|
||||
System.IO.Stream compressor =
|
||||
new DeflateStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
|
||||
ZlibBaseStream.CompressString(s, compressor);
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compress a byte array into a new byte array using DEFLATE.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Uncompress it with <see cref="DeflateStream.UncompressBuffer(byte[])"/>.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <seealso cref="DeflateStream.CompressString(string)">DeflateStream.CompressString(string)</seealso>
|
||||
/// <seealso cref="DeflateStream.UncompressBuffer(byte[])">DeflateStream.UncompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="GZipStream.CompressBuffer(byte[])">GZipStream.CompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="ZlibStream.CompressBuffer(byte[])">ZlibStream.CompressBuffer(byte[])</seealso>
|
||||
///
|
||||
/// <param name="b">
|
||||
/// A buffer to compress.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The data in compressed form</returns>
|
||||
public static byte[] CompressBuffer(byte[] b)
|
||||
{
|
||||
using (var ms = new System.IO.MemoryStream())
|
||||
{
|
||||
System.IO.Stream compressor =
|
||||
new DeflateStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
|
||||
|
||||
ZlibBaseStream.CompressBuffer(b, compressor);
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Uncompress a DEFLATE'd byte array into a single string.
|
||||
/// </summary>
|
||||
///
|
||||
/// <seealso cref="DeflateStream.CompressString(String)">DeflateStream.CompressString(String)</seealso>
|
||||
/// <seealso cref="DeflateStream.UncompressBuffer(byte[])">DeflateStream.UncompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="GZipStream.UncompressString(byte[])">GZipStream.UncompressString(byte[])</seealso>
|
||||
/// <seealso cref="ZlibStream.UncompressString(byte[])">ZlibStream.UncompressString(byte[])</seealso>
|
||||
///
|
||||
/// <param name="compressed">
|
||||
/// A buffer containing DEFLATE-compressed data.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The uncompressed string</returns>
|
||||
public static String UncompressString(byte[] compressed)
|
||||
{
|
||||
using (var input = new System.IO.MemoryStream(compressed))
|
||||
{
|
||||
System.IO.Stream decompressor =
|
||||
new DeflateStream(input, CompressionMode.Decompress);
|
||||
|
||||
return ZlibBaseStream.UncompressString(compressed, decompressor);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Uncompress a DEFLATE'd byte array into a byte array.
|
||||
/// </summary>
|
||||
///
|
||||
/// <seealso cref="DeflateStream.CompressBuffer(byte[])">DeflateStream.CompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="DeflateStream.UncompressString(byte[])">DeflateStream.UncompressString(byte[])</seealso>
|
||||
/// <seealso cref="GZipStream.UncompressBuffer(byte[])">GZipStream.UncompressBuffer(byte[])</seealso>
|
||||
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])">ZlibStream.UncompressBuffer(byte[])</seealso>
|
||||
///
|
||||
/// <param name="compressed">
|
||||
/// A buffer containing data that has been compressed with DEFLATE.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The data in uncompressed form</returns>
|
||||
public static byte[] UncompressBuffer(byte[] compressed)
|
||||
{
|
||||
using (var input = new System.IO.MemoryStream(compressed))
|
||||
{
|
||||
System.IO.Stream decompressor =
|
||||
new DeflateStream(input, CompressionMode.Decompress);
|
||||
|
||||
return ZlibBaseStream.UncompressBuffer(compressed, decompressor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
1072
SabreTools.Library/External/Zlib/GZipStream.cs
vendored
Normal file
1072
SabreTools.Library/External/Zlib/GZipStream.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
441
SabreTools.Library/External/Zlib/InfTree.cs
vendored
Normal file
441
SabreTools.Library/External/Zlib/InfTree.cs
vendored
Normal file
@@ -0,0 +1,441 @@
|
||||
// Inftree.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2009-October-28 12:43:54>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines classes used in decompression. This code is derived
|
||||
// from the jzlib implementation of zlib. In keeping with the license for jzlib,
|
||||
// the copyright to that code is below.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in
|
||||
// the documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// 3. The names of the authors may not be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// This program is based on zlib-1.1.3; credit to authors
|
||||
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
|
||||
// and contributors of zlib.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
sealed class InfTree
|
||||
{
|
||||
private const int MANY = 1440;
|
||||
|
||||
private const int Z_OK = 0;
|
||||
private const int Z_STREAM_END = 1;
|
||||
private const int Z_NEED_DICT = 2;
|
||||
private const int Z_ERRNO = - 1;
|
||||
private const int Z_STREAM_ERROR = - 2;
|
||||
private const int Z_DATA_ERROR = - 3;
|
||||
private const int Z_MEM_ERROR = - 4;
|
||||
private const int Z_BUF_ERROR = - 5;
|
||||
private const int Z_VERSION_ERROR = - 6;
|
||||
|
||||
internal const int fixed_bl = 9;
|
||||
internal const int fixed_bd = 5;
|
||||
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'fixed_tl'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] fixed_tl = new int[]{96, 7, 256, 0, 8, 80, 0, 8, 16, 84, 8, 115, 82, 7, 31, 0, 8, 112, 0, 8, 48, 0, 9, 192, 80, 7, 10, 0, 8, 96, 0, 8, 32, 0, 9, 160, 0, 8, 0, 0, 8, 128, 0, 8, 64, 0, 9, 224, 80, 7, 6, 0, 8, 88, 0, 8, 24, 0, 9, 144, 83, 7, 59, 0, 8, 120, 0, 8, 56, 0, 9, 208, 81, 7, 17, 0, 8, 104, 0, 8, 40, 0, 9, 176, 0, 8, 8, 0, 8, 136, 0, 8, 72, 0, 9, 240, 80, 7, 4, 0, 8, 84, 0, 8, 20, 85, 8, 227, 83, 7, 43, 0, 8, 116, 0, 8, 52, 0, 9, 200, 81, 7, 13, 0, 8, 100, 0, 8, 36, 0, 9, 168, 0, 8, 4, 0, 8, 132, 0, 8, 68, 0, 9, 232, 80, 7, 8, 0, 8, 92, 0, 8, 28, 0, 9, 152, 84, 7, 83, 0, 8, 124, 0, 8, 60, 0, 9, 216, 82, 7, 23, 0, 8, 108, 0, 8, 44, 0, 9, 184, 0, 8, 12, 0, 8, 140, 0, 8, 76, 0, 9, 248, 80, 7, 3, 0, 8, 82, 0, 8, 18, 85, 8, 163, 83, 7, 35, 0, 8, 114, 0, 8, 50, 0, 9, 196, 81, 7, 11, 0, 8, 98, 0, 8, 34, 0, 9, 164, 0, 8, 2, 0, 8, 130, 0, 8, 66, 0, 9, 228, 80, 7, 7, 0, 8, 90, 0, 8, 26, 0, 9, 148, 84, 7, 67, 0, 8, 122, 0, 8, 58, 0, 9, 212, 82, 7, 19, 0, 8, 106, 0, 8, 42, 0, 9, 180, 0, 8, 10, 0, 8, 138, 0, 8, 74, 0, 9, 244, 80, 7, 5, 0, 8, 86, 0, 8, 22, 192, 8, 0, 83, 7, 51, 0, 8, 118, 0, 8, 54, 0, 9, 204, 81, 7, 15, 0, 8, 102, 0, 8, 38, 0, 9, 172, 0, 8, 6, 0, 8, 134, 0, 8, 70, 0, 9, 236, 80, 7, 9, 0, 8, 94, 0, 8, 30, 0, 9, 156, 84, 7, 99, 0, 8, 126, 0, 8, 62, 0, 9, 220, 82, 7, 27, 0, 8, 110, 0, 8, 46, 0, 9, 188, 0, 8, 14, 0, 8, 142, 0, 8, 78, 0, 9, 252, 96, 7, 256, 0, 8, 81, 0, 8, 17, 85, 8, 131, 82, 7, 31, 0, 8, 113, 0, 8, 49, 0, 9, 194, 80, 7, 10, 0, 8, 97, 0, 8, 33, 0, 9, 162, 0, 8, 1, 0, 8, 129, 0, 8, 65, 0, 9, 226, 80, 7, 6, 0, 8, 89, 0, 8, 25, 0, 9, 146, 83, 7, 59, 0, 8, 121, 0, 8, 57, 0, 9, 210, 81, 7, 17, 0, 8, 105, 0, 8, 41, 0, 9, 178, 0, 8, 9, 0, 8, 137, 0, 8, 73, 0, 9, 242, 80, 7, 4, 0, 8, 85, 0, 8, 21, 80, 8, 258, 83, 7, 43, 0, 8, 117, 0, 8, 53, 0, 9, 202, 81, 7, 13, 0, 8, 101, 0, 8, 37, 0, 9, 170, 0, 8, 5, 0, 8, 133, 0, 8, 69, 0, 9, 234, 80, 7, 8, 0, 8, 93, 0, 8, 29, 0, 9, 154, 84, 7, 83, 0, 8, 125, 0, 8, 61, 0, 9, 218, 82, 7, 23, 0, 8, 109, 0, 8, 45, 0, 9, 186,
|
||||
0, 8, 13, 0, 8, 141, 0, 8, 77, 0, 9, 250, 80, 7, 3, 0, 8, 83, 0, 8, 19, 85, 8, 195, 83, 7, 35, 0, 8, 115, 0, 8, 51, 0, 9, 198, 81, 7, 11, 0, 8, 99, 0, 8, 35, 0, 9, 166, 0, 8, 3, 0, 8, 131, 0, 8, 67, 0, 9, 230, 80, 7, 7, 0, 8, 91, 0, 8, 27, 0, 9, 150, 84, 7, 67, 0, 8, 123, 0, 8, 59, 0, 9, 214, 82, 7, 19, 0, 8, 107, 0, 8, 43, 0, 9, 182, 0, 8, 11, 0, 8, 139, 0, 8, 75, 0, 9, 246, 80, 7, 5, 0, 8, 87, 0, 8, 23, 192, 8, 0, 83, 7, 51, 0, 8, 119, 0, 8, 55, 0, 9, 206, 81, 7, 15, 0, 8, 103, 0, 8, 39, 0, 9, 174, 0, 8, 7, 0, 8, 135, 0, 8, 71, 0, 9, 238, 80, 7, 9, 0, 8, 95, 0, 8, 31, 0, 9, 158, 84, 7, 99, 0, 8, 127, 0, 8, 63, 0, 9, 222, 82, 7, 27, 0, 8, 111, 0, 8, 47, 0, 9, 190, 0, 8, 15, 0, 8, 143, 0, 8, 79, 0, 9, 254, 96, 7, 256, 0, 8, 80, 0, 8, 16, 84, 8, 115, 82, 7, 31, 0, 8, 112, 0, 8, 48, 0, 9, 193, 80, 7, 10, 0, 8, 96, 0, 8, 32, 0, 9, 161, 0, 8, 0, 0, 8, 128, 0, 8, 64, 0, 9, 225, 80, 7, 6, 0, 8, 88, 0, 8, 24, 0, 9, 145, 83, 7, 59, 0, 8, 120, 0, 8, 56, 0, 9, 209, 81, 7, 17, 0, 8, 104, 0, 8, 40, 0, 9, 177, 0, 8, 8, 0, 8, 136, 0, 8, 72, 0, 9, 241, 80, 7, 4, 0, 8, 84, 0, 8, 20, 85, 8, 227, 83, 7, 43, 0, 8, 116, 0, 8, 52, 0, 9, 201, 81, 7, 13, 0, 8, 100, 0, 8, 36, 0, 9, 169, 0, 8, 4, 0, 8, 132, 0, 8, 68, 0, 9, 233, 80, 7, 8, 0, 8, 92, 0, 8, 28, 0, 9, 153, 84, 7, 83, 0, 8, 124, 0, 8, 60, 0, 9, 217, 82, 7, 23, 0, 8, 108, 0, 8, 44, 0, 9, 185, 0, 8, 12, 0, 8, 140, 0, 8, 76, 0, 9, 249, 80, 7, 3, 0, 8, 82, 0, 8, 18, 85, 8, 163, 83, 7, 35, 0, 8, 114, 0, 8, 50, 0, 9, 197, 81, 7, 11, 0, 8, 98, 0, 8, 34, 0, 9, 165, 0, 8, 2, 0, 8, 130, 0, 8, 66, 0, 9, 229, 80, 7, 7, 0, 8, 90, 0, 8, 26, 0, 9, 149, 84, 7, 67, 0, 8, 122, 0, 8, 58, 0, 9, 213, 82, 7, 19, 0, 8, 106, 0, 8, 42, 0, 9, 181, 0, 8, 10, 0, 8, 138, 0, 8, 74, 0, 9, 245, 80, 7, 5, 0, 8, 86, 0, 8, 22, 192, 8, 0, 83, 7, 51, 0, 8, 118, 0, 8, 54, 0, 9, 205, 81, 7, 15, 0, 8, 102, 0, 8, 38, 0, 9, 173, 0, 8, 6, 0, 8, 134, 0, 8, 70, 0, 9, 237, 80, 7, 9, 0, 8, 94, 0, 8, 30, 0, 9, 157, 84, 7, 99, 0, 8, 126, 0, 8, 62, 0, 9, 221, 82, 7, 27, 0, 8, 110, 0, 8, 46, 0, 9, 189, 0, 8,
|
||||
14, 0, 8, 142, 0, 8, 78, 0, 9, 253, 96, 7, 256, 0, 8, 81, 0, 8, 17, 85, 8, 131, 82, 7, 31, 0, 8, 113, 0, 8, 49, 0, 9, 195, 80, 7, 10, 0, 8, 97, 0, 8, 33, 0, 9, 163, 0, 8, 1, 0, 8, 129, 0, 8, 65, 0, 9, 227, 80, 7, 6, 0, 8, 89, 0, 8, 25, 0, 9, 147, 83, 7, 59, 0, 8, 121, 0, 8, 57, 0, 9, 211, 81, 7, 17, 0, 8, 105, 0, 8, 41, 0, 9, 179, 0, 8, 9, 0, 8, 137, 0, 8, 73, 0, 9, 243, 80, 7, 4, 0, 8, 85, 0, 8, 21, 80, 8, 258, 83, 7, 43, 0, 8, 117, 0, 8, 53, 0, 9, 203, 81, 7, 13, 0, 8, 101, 0, 8, 37, 0, 9, 171, 0, 8, 5, 0, 8, 133, 0, 8, 69, 0, 9, 235, 80, 7, 8, 0, 8, 93, 0, 8, 29, 0, 9, 155, 84, 7, 83, 0, 8, 125, 0, 8, 61, 0, 9, 219, 82, 7, 23, 0, 8, 109, 0, 8, 45, 0, 9, 187, 0, 8, 13, 0, 8, 141, 0, 8, 77, 0, 9, 251, 80, 7, 3, 0, 8, 83, 0, 8, 19, 85, 8, 195, 83, 7, 35, 0, 8, 115, 0, 8, 51, 0, 9, 199, 81, 7, 11, 0, 8, 99, 0, 8, 35, 0, 9, 167, 0, 8, 3, 0, 8, 131, 0, 8, 67, 0, 9, 231, 80, 7, 7, 0, 8, 91, 0, 8, 27, 0, 9, 151, 84, 7, 67, 0, 8, 123, 0, 8, 59, 0, 9, 215, 82, 7, 19, 0, 8, 107, 0, 8, 43, 0, 9, 183, 0, 8, 11, 0, 8, 139, 0, 8, 75, 0, 9, 247, 80, 7, 5, 0, 8, 87, 0, 8, 23, 192, 8, 0, 83, 7, 51, 0, 8, 119, 0, 8, 55, 0, 9, 207, 81, 7, 15, 0, 8, 103, 0, 8, 39, 0, 9, 175, 0, 8, 7, 0, 8, 135, 0, 8, 71, 0, 9, 239, 80, 7, 9, 0, 8, 95, 0, 8, 31, 0, 9, 159, 84, 7, 99, 0, 8, 127, 0, 8, 63, 0, 9, 223, 82, 7, 27, 0, 8, 111, 0, 8, 47, 0, 9, 191, 0, 8, 15, 0, 8, 143, 0, 8, 79, 0, 9, 255};
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'fixed_td'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] fixed_td = new int[]{80, 5, 1, 87, 5, 257, 83, 5, 17, 91, 5, 4097, 81, 5, 5, 89, 5, 1025, 85, 5, 65, 93, 5, 16385, 80, 5, 3, 88, 5, 513, 84, 5, 33, 92, 5, 8193, 82, 5, 9, 90, 5, 2049, 86, 5, 129, 192, 5, 24577, 80, 5, 2, 87, 5, 385, 83, 5, 25, 91, 5, 6145, 81, 5, 7, 89, 5, 1537, 85, 5, 97, 93, 5, 24577, 80, 5, 4, 88, 5, 769, 84, 5, 49, 92, 5, 12289, 82, 5, 13, 90, 5, 3073, 86, 5, 193, 192, 5, 24577};
|
||||
|
||||
// Tables for deflate from PKZIP's appnote.txt.
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'cplens'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] cplens = new int[]{3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
|
||||
|
||||
// see note #13 above about 258
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'cplext'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] cplext = new int[]{0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112};
|
||||
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdist'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] cpdist = new int[]{1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577};
|
||||
|
||||
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdext'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
|
||||
internal static readonly int[] cpdext = new int[]{0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13};
|
||||
|
||||
// If BMAX needs to be larger than 16, then h and x[] should be uLong.
|
||||
internal const int BMAX = 15; // maximum bit length of any code
|
||||
|
||||
internal int[] hn = null; // hufts used in space
|
||||
internal int[] v = null; // work area for huft_build
|
||||
internal int[] c = null; // bit length count table
|
||||
internal int[] r = null; // table entry for structure assignment
|
||||
internal int[] u = null; // table stack
|
||||
internal int[] x = null; // bit offsets, then code stack
|
||||
|
||||
private int huft_build(int[] b, int bindex, int n, int s, int[] d, int[] e, int[] t, int[] m, int[] hp, int[] hn, int[] v)
|
||||
{
|
||||
// Given a list of code lengths and a maximum table size, make a set of
|
||||
// tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR
|
||||
// if the given code set is incomplete (the tables are still built in this
|
||||
// case), Z_DATA_ERROR if the input is invalid (an over-subscribed set of
|
||||
// lengths), or Z_MEM_ERROR if not enough memory.
|
||||
|
||||
int a; // counter for codes of length k
|
||||
int f; // i repeats in table every f entries
|
||||
int g; // maximum code length
|
||||
int h; // table level
|
||||
int i; // counter, current code
|
||||
int j; // counter
|
||||
int k; // number of bits in current code
|
||||
int l; // bits per table (returned in m)
|
||||
int mask; // (1 << w) - 1, to avoid cc -O bug on HP
|
||||
int p; // pointer into c[], b[], or v[]
|
||||
int q; // points to current table
|
||||
int w; // bits before this table == (l * h)
|
||||
int xp; // pointer into x
|
||||
int y; // number of dummy codes added
|
||||
int z; // number of entries in current table
|
||||
|
||||
// Generate counts for each bit length
|
||||
|
||||
p = 0; i = n;
|
||||
do
|
||||
{
|
||||
c[b[bindex + p]]++; p++; i--; // assume all entries <= BMAX
|
||||
}
|
||||
while (i != 0);
|
||||
|
||||
if (c[0] == n)
|
||||
{
|
||||
// null input--all zero length codes
|
||||
t[0] = - 1;
|
||||
m[0] = 0;
|
||||
return Z_OK;
|
||||
}
|
||||
|
||||
// Find minimum and maximum length, bound *m by those
|
||||
l = m[0];
|
||||
for (j = 1; j <= BMAX; j++)
|
||||
{
|
||||
if (c[j] != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
k = j; // minimum code length
|
||||
if (l < j)
|
||||
{
|
||||
l = j;
|
||||
}
|
||||
for (i = BMAX; i != 0; i--)
|
||||
{
|
||||
if (c[i] != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
g = i; // maximum code length
|
||||
if (l > i)
|
||||
{
|
||||
l = i;
|
||||
}
|
||||
m[0] = l;
|
||||
|
||||
// Adjust last length count to fill out codes, if needed
|
||||
for (y = 1 << j; j < i; j++, y <<= 1)
|
||||
{
|
||||
if ((y -= c[j]) < 0)
|
||||
{
|
||||
return Z_DATA_ERROR;
|
||||
}
|
||||
}
|
||||
if ((y -= c[i]) < 0)
|
||||
{
|
||||
return Z_DATA_ERROR;
|
||||
}
|
||||
c[i] += y;
|
||||
|
||||
// Generate starting offsets into the value table for each length
|
||||
x[1] = j = 0;
|
||||
p = 1; xp = 2;
|
||||
while (--i != 0)
|
||||
{
|
||||
// note that i == g from above
|
||||
x[xp] = (j += c[p]);
|
||||
xp++;
|
||||
p++;
|
||||
}
|
||||
|
||||
// Make a table of values in order of bit lengths
|
||||
i = 0; p = 0;
|
||||
do
|
||||
{
|
||||
if ((j = b[bindex + p]) != 0)
|
||||
{
|
||||
v[x[j]++] = i;
|
||||
}
|
||||
p++;
|
||||
}
|
||||
while (++i < n);
|
||||
n = x[g]; // set n to length of v
|
||||
|
||||
// Generate the Huffman codes and for each, make the table entries
|
||||
x[0] = i = 0; // first Huffman code is zero
|
||||
p = 0; // grab values in bit order
|
||||
h = - 1; // no tables yet--level -1
|
||||
w = - l; // bits decoded == (l * h)
|
||||
u[0] = 0; // just to keep compilers happy
|
||||
q = 0; // ditto
|
||||
z = 0; // ditto
|
||||
|
||||
// go through the bit lengths (k already is bits in shortest code)
|
||||
for (; k <= g; k++)
|
||||
{
|
||||
a = c[k];
|
||||
while (a-- != 0)
|
||||
{
|
||||
// here i is the Huffman code of length k bits for value *p
|
||||
// make tables up to required level
|
||||
while (k > w + l)
|
||||
{
|
||||
h++;
|
||||
w += l; // previous table always l bits
|
||||
// compute minimum size table less than or equal to l bits
|
||||
z = g - w;
|
||||
z = (z > l)?l:z; // table size upper limit
|
||||
if ((f = 1 << (j = k - w)) > a + 1)
|
||||
{
|
||||
// try a k-w bit table
|
||||
// too few codes for k-w bit table
|
||||
f -= (a + 1); // deduct codes from patterns left
|
||||
xp = k;
|
||||
if (j < z)
|
||||
{
|
||||
while (++j < z)
|
||||
{
|
||||
// try smaller tables up to z bits
|
||||
if ((f <<= 1) <= c[++xp])
|
||||
{
|
||||
break; // enough codes to use up j bits
|
||||
}
|
||||
f -= c[xp]; // else deduct codes from patterns
|
||||
}
|
||||
}
|
||||
}
|
||||
z = 1 << j; // table entries for j-bit table
|
||||
|
||||
// allocate new table
|
||||
if (hn[0] + z > MANY)
|
||||
{
|
||||
// (note: doesn't matter for fixed)
|
||||
return Z_DATA_ERROR; // overflow of MANY
|
||||
}
|
||||
u[h] = q = hn[0]; // DEBUG
|
||||
hn[0] += z;
|
||||
|
||||
// connect to last table, if there is one
|
||||
if (h != 0)
|
||||
{
|
||||
x[h] = i; // save pattern for backing up
|
||||
r[0] = (sbyte) j; // bits in this table
|
||||
r[1] = (sbyte) l; // bits to dump before this table
|
||||
j = SharedUtils.URShift(i, (w - l));
|
||||
r[2] = (int) (q - u[h - 1] - j); // offset to this table
|
||||
Array.Copy(r, 0, hp, (u[h - 1] + j) * 3, 3); // connect to last table
|
||||
}
|
||||
else
|
||||
{
|
||||
t[0] = q; // first table is returned result
|
||||
}
|
||||
}
|
||||
|
||||
// set up table entry in r
|
||||
r[1] = (sbyte) (k - w);
|
||||
if (p >= n)
|
||||
{
|
||||
r[0] = 128 + 64; // out of values--invalid code
|
||||
}
|
||||
else if (v[p] < s)
|
||||
{
|
||||
r[0] = (sbyte) (v[p] < 256?0:32 + 64); // 256 is end-of-block
|
||||
r[2] = v[p++]; // simple code is just the value
|
||||
}
|
||||
else
|
||||
{
|
||||
r[0] = (sbyte) (e[v[p] - s] + 16 + 64); // non-simple--look up in lists
|
||||
r[2] = d[v[p++] - s];
|
||||
}
|
||||
|
||||
// fill code-like entries with r
|
||||
f = 1 << (k - w);
|
||||
for (j = SharedUtils.URShift(i, w); j < z; j += f)
|
||||
{
|
||||
Array.Copy(r, 0, hp, (q + j) * 3, 3);
|
||||
}
|
||||
|
||||
// backwards increment the k-bit code i
|
||||
for (j = 1 << (k - 1); (i & j) != 0; j = SharedUtils.URShift(j, 1))
|
||||
{
|
||||
i ^= j;
|
||||
}
|
||||
i ^= j;
|
||||
|
||||
// backup over finished tables
|
||||
mask = (1 << w) - 1; // needed on HP, cc -O bug
|
||||
while ((i & mask) != x[h])
|
||||
{
|
||||
h--; // don't need to update q
|
||||
w -= l;
|
||||
mask = (1 << w) - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return Z_BUF_ERROR if we were given an incomplete table
|
||||
return y != 0 && g != 1?Z_BUF_ERROR:Z_OK;
|
||||
}
|
||||
|
||||
internal int inflate_trees_bits(int[] c, int[] bb, int[] tb, int[] hp, ZlibCodec z)
|
||||
{
|
||||
int result;
|
||||
initWorkArea(19);
|
||||
hn[0] = 0;
|
||||
result = huft_build(c, 0, 19, 19, null, null, tb, bb, hp, hn, v);
|
||||
|
||||
if (result == Z_DATA_ERROR)
|
||||
{
|
||||
z.Message = "oversubscribed dynamic bit lengths tree";
|
||||
}
|
||||
else if (result == Z_BUF_ERROR || bb[0] == 0)
|
||||
{
|
||||
z.Message = "incomplete dynamic bit lengths tree";
|
||||
result = Z_DATA_ERROR;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal int inflate_trees_dynamic(int nl, int nd, int[] c, int[] bl, int[] bd, int[] tl, int[] td, int[] hp, ZlibCodec z)
|
||||
{
|
||||
int result;
|
||||
|
||||
// build literal/length tree
|
||||
initWorkArea(288);
|
||||
hn[0] = 0;
|
||||
result = huft_build(c, 0, nl, 257, cplens, cplext, tl, bl, hp, hn, v);
|
||||
if (result != Z_OK || bl[0] == 0)
|
||||
{
|
||||
if (result == Z_DATA_ERROR)
|
||||
{
|
||||
z.Message = "oversubscribed literal/length tree";
|
||||
}
|
||||
else if (result != Z_MEM_ERROR)
|
||||
{
|
||||
z.Message = "incomplete literal/length tree";
|
||||
result = Z_DATA_ERROR;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// build distance tree
|
||||
initWorkArea(288);
|
||||
result = huft_build(c, nl, nd, 0, cpdist, cpdext, td, bd, hp, hn, v);
|
||||
|
||||
if (result != Z_OK || (bd[0] == 0 && nl > 257))
|
||||
{
|
||||
if (result == Z_DATA_ERROR)
|
||||
{
|
||||
z.Message = "oversubscribed distance tree";
|
||||
}
|
||||
else if (result == Z_BUF_ERROR)
|
||||
{
|
||||
z.Message = "incomplete distance tree";
|
||||
result = Z_DATA_ERROR;
|
||||
}
|
||||
else if (result != Z_MEM_ERROR)
|
||||
{
|
||||
z.Message = "empty distance tree with lengths";
|
||||
result = Z_DATA_ERROR;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return Z_OK;
|
||||
}
|
||||
|
||||
internal static int inflate_trees_fixed(int[] bl, int[] bd, int[][] tl, int[][] td, ZlibCodec z)
|
||||
{
|
||||
bl[0] = fixed_bl;
|
||||
bd[0] = fixed_bd;
|
||||
tl[0] = fixed_tl;
|
||||
td[0] = fixed_td;
|
||||
return Z_OK;
|
||||
}
|
||||
|
||||
private void initWorkArea(int vsize)
|
||||
{
|
||||
if (hn == null)
|
||||
{
|
||||
hn = new int[1];
|
||||
v = new int[vsize];
|
||||
c = new int[BMAX + 1];
|
||||
r = new int[3];
|
||||
u = new int[BMAX];
|
||||
x = new int[BMAX + 1];
|
||||
}
|
||||
else
|
||||
{
|
||||
if (v.Length < vsize)
|
||||
{
|
||||
v = new int[vsize];
|
||||
}
|
||||
Array.Clear(v,0,vsize);
|
||||
Array.Clear(c,0,BMAX+1);
|
||||
r[0]=0; r[1]=0; r[2]=0;
|
||||
// for(int i=0; i<BMAX; i++){u[i]=0;}
|
||||
//Array.Copy(c, 0, u, 0, BMAX);
|
||||
Array.Clear(u,0,BMAX);
|
||||
// for(int i=0; i<BMAX+1; i++){x[i]=0;}
|
||||
//Array.Copy(c, 0, x, 0, BMAX + 1);
|
||||
Array.Clear(x,0,BMAX+1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1844
SabreTools.Library/External/Zlib/Inflate.cs
vendored
Normal file
1844
SabreTools.Library/External/Zlib/Inflate.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1396
SabreTools.Library/External/Zlib/ParallelDeflateOutputStream.cs
vendored
Normal file
1396
SabreTools.Library/External/Zlib/ParallelDeflateOutputStream.cs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
437
SabreTools.Library/External/Zlib/Tree.cs
vendored
Normal file
437
SabreTools.Library/External/Zlib/Tree.cs
vendored
Normal file
@@ -0,0 +1,437 @@
|
||||
// Tree.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2009-October-28 13:29:50>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines classes for zlib compression and
|
||||
// decompression. This code is derived from the jzlib implementation of
|
||||
// zlib. In keeping with the license for jzlib, the copyright to that
|
||||
// code is below.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in
|
||||
// the documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// 3. The names of the authors may not be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// This program is based on zlib-1.1.3; credit to authors
|
||||
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
|
||||
// and contributors of zlib.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
sealed class Tree
|
||||
{
|
||||
private static readonly int HEAP_SIZE = (2 * InternalConstants.L_CODES + 1);
|
||||
|
||||
// extra bits for each length code
|
||||
internal static readonly int[] ExtraLengthBits = new int[]
|
||||
{
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
|
||||
3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0
|
||||
};
|
||||
|
||||
// extra bits for each distance code
|
||||
internal static readonly int[] ExtraDistanceBits = new int[]
|
||||
{
|
||||
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
|
||||
7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
|
||||
};
|
||||
|
||||
// extra bits for each bit length code
|
||||
internal static readonly int[] extra_blbits = new int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7};
|
||||
|
||||
internal static readonly sbyte[] bl_order = new sbyte[]{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
|
||||
|
||||
// The lengths of the bit length codes are sent in order of decreasing
|
||||
// probability, to avoid transmitting the lengths for unused bit
|
||||
// length codes.
|
||||
|
||||
internal const int Buf_size = 8 * 2;
|
||||
|
||||
// see definition of array dist_code below
|
||||
//internal const int DIST_CODE_LEN = 512;
|
||||
|
||||
private static readonly sbyte[] _dist_code = new sbyte[]
|
||||
{
|
||||
0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
|
||||
8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
|
||||
10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
|
||||
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
|
||||
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
|
||||
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
|
||||
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
|
||||
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
|
||||
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
|
||||
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
|
||||
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
|
||||
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
|
||||
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
|
||||
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
|
||||
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
|
||||
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
|
||||
0, 0, 16, 17, 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21,
|
||||
22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
|
||||
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
|
||||
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
|
||||
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
|
||||
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
|
||||
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
|
||||
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
|
||||
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
|
||||
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
|
||||
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
|
||||
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
|
||||
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
|
||||
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
|
||||
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
|
||||
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
|
||||
};
|
||||
|
||||
internal static readonly sbyte[] LengthCode = new sbyte[]
|
||||
{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11,
|
||||
12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15,
|
||||
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17,
|
||||
18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19,
|
||||
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
|
||||
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
|
||||
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
|
||||
23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
|
||||
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
|
||||
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
|
||||
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
|
||||
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
|
||||
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
|
||||
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
|
||||
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
|
||||
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
|
||||
};
|
||||
|
||||
internal static readonly int[] LengthBase = new int[]
|
||||
{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28,
|
||||
32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 0
|
||||
};
|
||||
|
||||
internal static readonly int[] DistanceBase = new int[]
|
||||
{
|
||||
0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 48, 64, 96, 128, 192,
|
||||
256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Map from a distance to a distance code.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// No side effects. _dist_code[256] and _dist_code[257] are never used.
|
||||
/// </remarks>
|
||||
internal static int DistanceCode(int dist)
|
||||
{
|
||||
return (dist < 256)
|
||||
? _dist_code[dist]
|
||||
: _dist_code[256 + SharedUtils.URShift(dist, 7)];
|
||||
}
|
||||
|
||||
internal short[] dyn_tree; // the dynamic tree
|
||||
internal int max_code; // largest code with non zero frequency
|
||||
internal StaticTree staticTree; // the corresponding static tree
|
||||
|
||||
// Compute the optimal bit lengths for a tree and update the total bit length
|
||||
// for the current block.
|
||||
// IN assertion: the fields freq and dad are set, heap[heap_max] and
|
||||
// above are the tree nodes sorted by increasing frequency.
|
||||
// OUT assertions: the field len is set to the optimal bit length, the
|
||||
// array bl_count contains the frequencies for each bit length.
|
||||
// The length opt_len is updated; static_len is also updated if stree is
|
||||
// not null.
|
||||
internal void gen_bitlen(DeflateManager s)
|
||||
{
|
||||
short[] tree = dyn_tree;
|
||||
short[] stree = staticTree.treeCodes;
|
||||
int[] extra = staticTree.extraBits;
|
||||
int base_Renamed = staticTree.extraBase;
|
||||
int max_length = staticTree.maxLength;
|
||||
int h; // heap index
|
||||
int n, m; // iterate over the tree elements
|
||||
int bits; // bit length
|
||||
int xbits; // extra bits
|
||||
short f; // frequency
|
||||
int overflow = 0; // number of elements with bit length too large
|
||||
|
||||
for (bits = 0; bits <= InternalConstants.MAX_BITS; bits++)
|
||||
{
|
||||
s.bl_count[bits] = 0;
|
||||
}
|
||||
|
||||
// In a first pass, compute the optimal bit lengths (which may
|
||||
// overflow in the case of the bit length tree).
|
||||
tree[s.heap[s.heap_max] * 2 + 1] = 0; // root of the heap
|
||||
|
||||
for (h = s.heap_max + 1; h < HEAP_SIZE; h++)
|
||||
{
|
||||
n = s.heap[h];
|
||||
bits = tree[tree[n * 2 + 1] * 2 + 1] + 1;
|
||||
if (bits > max_length)
|
||||
{
|
||||
bits = max_length; overflow++;
|
||||
}
|
||||
tree[n * 2 + 1] = (short) bits;
|
||||
// We overwrite tree[n*2+1] which is no longer needed
|
||||
|
||||
if (n > max_code)
|
||||
{
|
||||
continue; // not a leaf node
|
||||
}
|
||||
|
||||
s.bl_count[bits]++;
|
||||
xbits = 0;
|
||||
if (n >= base_Renamed)
|
||||
{
|
||||
xbits = extra[n - base_Renamed];
|
||||
}
|
||||
f = tree[n * 2];
|
||||
s.opt_len += f * (bits + xbits);
|
||||
if (stree != null)
|
||||
{
|
||||
s.static_len += f * (stree[n * 2 + 1] + xbits);
|
||||
}
|
||||
}
|
||||
if (overflow == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// This happens for example on obj2 and pic of the Calgary corpus
|
||||
// Find the first bit length which could increase:
|
||||
do
|
||||
{
|
||||
bits = max_length - 1;
|
||||
while (s.bl_count[bits] == 0)
|
||||
{
|
||||
bits--;
|
||||
}
|
||||
s.bl_count[bits]--; // move one leaf down the tree
|
||||
s.bl_count[bits + 1] = (short) (s.bl_count[bits + 1] + 2); // move one overflow item as its brother
|
||||
s.bl_count[max_length]--;
|
||||
// The brother of the overflow item also moves one step up,
|
||||
// but this does not affect bl_count[max_length]
|
||||
overflow -= 2;
|
||||
}
|
||||
while (overflow > 0);
|
||||
|
||||
for (bits = max_length; bits != 0; bits--)
|
||||
{
|
||||
n = s.bl_count[bits];
|
||||
while (n != 0)
|
||||
{
|
||||
m = s.heap[--h];
|
||||
if (m > max_code)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (tree[m * 2 + 1] != bits)
|
||||
{
|
||||
s.opt_len = (int) (s.opt_len + ((long) bits - (long) tree[m * 2 + 1]) * (long) tree[m * 2]);
|
||||
tree[m * 2 + 1] = (short) bits;
|
||||
}
|
||||
n--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Construct one Huffman tree and assigns the code bit strings and lengths.
|
||||
// Update the total bit length for the current block.
|
||||
// IN assertion: the field freq is set for all tree elements.
|
||||
// OUT assertions: the fields len and code are set to the optimal bit length
|
||||
// and corresponding code. The length opt_len is updated; static_len is
|
||||
// also updated if stree is not null. The field max_code is set.
|
||||
internal void build_tree(DeflateManager s)
|
||||
{
|
||||
short[] tree = dyn_tree;
|
||||
short[] stree = staticTree.treeCodes;
|
||||
int elems = staticTree.elems;
|
||||
int n, m; // iterate over heap elements
|
||||
int max_code = -1; // largest code with non zero frequency
|
||||
int node; // new node being created
|
||||
|
||||
// Construct the initial heap, with least frequent element in
|
||||
// heap[1]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
|
||||
// heap[0] is not used.
|
||||
s.heap_len = 0;
|
||||
s.heap_max = HEAP_SIZE;
|
||||
|
||||
for (n = 0; n < elems; n++)
|
||||
{
|
||||
if (tree[n * 2] != 0)
|
||||
{
|
||||
s.heap[++s.heap_len] = max_code = n;
|
||||
s.depth[n] = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
tree[n * 2 + 1] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// The pkzip format requires that at least one distance code exists,
|
||||
// and that at least one bit should be sent even if there is only one
|
||||
// possible code. So to avoid special checks later on we force at least
|
||||
// two codes of non zero frequency.
|
||||
while (s.heap_len < 2)
|
||||
{
|
||||
node = s.heap[++s.heap_len] = (max_code < 2?++max_code:0);
|
||||
tree[node * 2] = 1;
|
||||
s.depth[node] = 0;
|
||||
s.opt_len--;
|
||||
if (stree != null)
|
||||
{
|
||||
s.static_len -= stree[node * 2 + 1];
|
||||
}// node is 0 or 1 so it does not have extra bits
|
||||
}
|
||||
this.max_code = max_code;
|
||||
|
||||
// The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
|
||||
// establish sub-heaps of increasing lengths:
|
||||
|
||||
for (n = s.heap_len / 2; n >= 1; n--)
|
||||
{
|
||||
s.pqdownheap(tree, n);
|
||||
}
|
||||
|
||||
// Construct the Huffman tree by repeatedly combining the least two
|
||||
// frequent nodes.
|
||||
|
||||
node = elems; // next internal node of the tree
|
||||
do
|
||||
{
|
||||
// n = node of least frequency
|
||||
n = s.heap[1];
|
||||
s.heap[1] = s.heap[s.heap_len--];
|
||||
s.pqdownheap(tree, 1);
|
||||
m = s.heap[1]; // m = node of next least frequency
|
||||
|
||||
s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency
|
||||
s.heap[--s.heap_max] = m;
|
||||
|
||||
// Create a new node father of n and m
|
||||
tree[node * 2] = unchecked((short) (tree[n * 2] + tree[m * 2]));
|
||||
s.depth[node] = (sbyte) (System.Math.Max((byte) s.depth[n], (byte) s.depth[m]) + 1);
|
||||
tree[n * 2 + 1] = tree[m * 2 + 1] = (short) node;
|
||||
|
||||
// and insert the new node in the heap
|
||||
s.heap[1] = node++;
|
||||
s.pqdownheap(tree, 1);
|
||||
}
|
||||
while (s.heap_len >= 2);
|
||||
|
||||
s.heap[--s.heap_max] = s.heap[1];
|
||||
|
||||
// At this point, the fields freq and dad are set. We can now
|
||||
// generate the bit lengths.
|
||||
|
||||
gen_bitlen(s);
|
||||
|
||||
// The field len is now set, we can generate the bit codes
|
||||
gen_codes(tree, max_code, s.bl_count);
|
||||
}
|
||||
|
||||
// Generate the codes for a given tree and bit counts (which need not be
|
||||
// optimal).
|
||||
// IN assertion: the array bl_count contains the bit length statistics for
|
||||
// the given tree and the field len is set for all tree elements.
|
||||
// OUT assertion: the field code is set for all tree elements of non
|
||||
// zero code length.
|
||||
internal static void gen_codes(short[] tree, int max_code, short[] bl_count)
|
||||
{
|
||||
short[] next_code = new short[InternalConstants.MAX_BITS + 1]; // next code value for each bit length
|
||||
short code = 0; // running code value
|
||||
int bits; // bit index
|
||||
int n; // code index
|
||||
|
||||
// The distribution counts are first used to generate the code values
|
||||
// without bit reversal.
|
||||
for (bits = 1; bits <= InternalConstants.MAX_BITS; bits++)
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
next_code[bits] = code = (short)((code + bl_count[bits - 1]) << 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Check that the bit counts in bl_count are consistent. The last code
|
||||
// must be all ones.
|
||||
//Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
|
||||
// "inconsistent bit counts");
|
||||
//Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
|
||||
|
||||
for (n = 0; n <= max_code; n++)
|
||||
{
|
||||
int len = tree[n * 2 + 1];
|
||||
if (len == 0)
|
||||
{
|
||||
continue;
|
||||
}// Now reverse the bits
|
||||
tree[n * 2] = unchecked((short) (bi_reverse(next_code[len]++, len)));
|
||||
}
|
||||
}
|
||||
|
||||
// Reverse the first len bits of a code, using straightforward code (a faster
|
||||
// method would use a table)
|
||||
// IN assertion: 1 <= len <= 15
|
||||
internal static int bi_reverse(int code, int len)
|
||||
{
|
||||
int res = 0;
|
||||
do
|
||||
{
|
||||
res |= code & 1;
|
||||
code >>= 1; //SharedUtils.URShift(code, 1);
|
||||
res <<= 1;
|
||||
}
|
||||
while (--len > 0);
|
||||
return res >> 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
538
SabreTools.Library/External/Zlib/Zlib.cs
vendored
Normal file
538
SabreTools.Library/External/Zlib/Zlib.cs
vendored
Normal file
@@ -0,0 +1,538 @@
|
||||
// Zlib.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009-2011 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Last Saved: <2011-August-03 19:52:28>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines classes for ZLIB compression and
|
||||
// decompression. This code is derived from the jzlib implementation of
|
||||
// zlib, but significantly modified. The object model is not the same,
|
||||
// and many of the behaviors are new or different. Nonetheless, in
|
||||
// keeping with the license for jzlib, the copyright to that code is
|
||||
// included below.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// The following notice applies to jzlib:
|
||||
//
|
||||
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in
|
||||
// the documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// 3. The names of the authors may not be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// jzlib is based on zlib-1.1.3.
|
||||
//
|
||||
// The following notice applies to zlib:
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (C) 1995-2004 Jean-loup Gailly and Mark Adler
|
||||
//
|
||||
// The ZLIB software is provided 'as-is', without any express or implied
|
||||
// warranty. In no event will the authors be held liable for any damages
|
||||
// arising from the use of this software.
|
||||
//
|
||||
// Permission is granted to anyone to use this software for any purpose,
|
||||
// including commercial applications, and to alter it and redistribute it
|
||||
// freely, subject to the following restrictions:
|
||||
//
|
||||
// 1. The origin of this software must not be misrepresented; you must not
|
||||
// claim that you wrote the original software. If you use this software
|
||||
// in a product, an acknowledgment in the product documentation would be
|
||||
// appreciated but is not required.
|
||||
// 2. Altered source versions must be plainly marked as such, and must not be
|
||||
// misrepresented as being the original software.
|
||||
// 3. This notice may not be removed or altered from any source distribution.
|
||||
//
|
||||
// Jean-loup Gailly jloup@gzip.org
|
||||
// Mark Adler madler@alumni.caltech.edu
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// Describes how to flush the current deflate operation.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The different FlushType values are useful when using a Deflate in a streaming application.
|
||||
/// </remarks>
|
||||
public enum FlushType
|
||||
{
|
||||
/// <summary>No flush at all.</summary>
|
||||
None = 0,
|
||||
|
||||
/// <summary>Closes the current block, but doesn't flush it to
|
||||
/// the output. Used internally only in hypothetical
|
||||
/// scenarios. This was supposed to be removed by Zlib, but it is
|
||||
/// still in use in some edge cases.
|
||||
/// </summary>
|
||||
Partial,
|
||||
|
||||
/// <summary>
|
||||
/// Use this during compression to specify that all pending output should be
|
||||
/// flushed to the output buffer and the output should be aligned on a byte
|
||||
/// boundary. You might use this in a streaming communication scenario, so that
|
||||
/// the decompressor can get all input data available so far. When using this
|
||||
/// with a ZlibCodec, <c>AvailableBytesIn</c> will be zero after the call if
|
||||
/// enough output space has been provided before the call. Flushing will
|
||||
/// degrade compression and so it should be used only when necessary.
|
||||
/// </summary>
|
||||
Sync,
|
||||
|
||||
/// <summary>
|
||||
/// Use this during compression to specify that all output should be flushed, as
|
||||
/// with <c>FlushType.Sync</c>, but also, the compression state should be reset
|
||||
/// so that decompression can restart from this point if previous compressed
|
||||
/// data has been damaged or if random access is desired. Using
|
||||
/// <c>FlushType.Full</c> too often can significantly degrade the compression.
|
||||
/// </summary>
|
||||
Full,
|
||||
|
||||
/// <summary>Signals the end of the compression/decompression stream.</summary>
|
||||
Finish,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The compression level to be used when using a DeflateStream or ZlibStream with CompressionMode.Compress.
|
||||
/// </summary>
|
||||
public enum CompressionLevel
|
||||
{
|
||||
/// <summary>
|
||||
/// None means that the data will be simply stored, with no change at all.
|
||||
/// If you are producing ZIPs for use on Mac OSX, be aware that archives produced with CompressionLevel.None
|
||||
/// cannot be opened with the default zip reader. Use a different CompressionLevel.
|
||||
/// </summary>
|
||||
None= 0,
|
||||
/// <summary>
|
||||
/// Same as None.
|
||||
/// </summary>
|
||||
Level0 = 0,
|
||||
|
||||
/// <summary>
|
||||
/// The fastest but least effective compression.
|
||||
/// </summary>
|
||||
BestSpeed = 1,
|
||||
|
||||
/// <summary>
|
||||
/// A synonym for BestSpeed.
|
||||
/// </summary>
|
||||
Level1 = 1,
|
||||
|
||||
/// <summary>
|
||||
/// A little slower, but better, than level 1.
|
||||
/// </summary>
|
||||
Level2 = 2,
|
||||
|
||||
/// <summary>
|
||||
/// A little slower, but better, than level 2.
|
||||
/// </summary>
|
||||
Level3 = 3,
|
||||
|
||||
/// <summary>
|
||||
/// A little slower, but better, than level 3.
|
||||
/// </summary>
|
||||
Level4 = 4,
|
||||
|
||||
/// <summary>
|
||||
/// A little slower than level 4, but with better compression.
|
||||
/// </summary>
|
||||
Level5 = 5,
|
||||
|
||||
/// <summary>
|
||||
/// The default compression level, with a good balance of speed and compression efficiency.
|
||||
/// </summary>
|
||||
Default = 6,
|
||||
/// <summary>
|
||||
/// A synonym for Default.
|
||||
/// </summary>
|
||||
Level6 = 6,
|
||||
|
||||
/// <summary>
|
||||
/// Pretty good compression!
|
||||
/// </summary>
|
||||
Level7 = 7,
|
||||
|
||||
/// <summary>
|
||||
/// Better compression than Level7!
|
||||
/// </summary>
|
||||
Level8 = 8,
|
||||
|
||||
/// <summary>
|
||||
/// The "best" compression, where best means greatest reduction in size of the input data stream.
|
||||
/// This is also the slowest compression.
|
||||
/// </summary>
|
||||
BestCompression = 9,
|
||||
|
||||
/// <summary>
|
||||
/// A synonym for BestCompression.
|
||||
/// </summary>
|
||||
Level9 = 9,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes options for how the compression algorithm is executed. Different strategies
|
||||
/// work better on different sorts of data. The strategy parameter can affect the compression
|
||||
/// ratio and the speed of compression but not the correctness of the compresssion.
|
||||
/// </summary>
|
||||
public enum CompressionStrategy
|
||||
{
|
||||
/// <summary>
|
||||
/// The default strategy is probably the best for normal data.
|
||||
/// </summary>
|
||||
Default = 0,
|
||||
|
||||
/// <summary>
|
||||
/// The <c>Filtered</c> strategy is intended to be used most effectively with data produced by a
|
||||
/// filter or predictor. By this definition, filtered data consists mostly of small
|
||||
/// values with a somewhat random distribution. In this case, the compression algorithm
|
||||
/// is tuned to compress them better. The effect of <c>Filtered</c> is to force more Huffman
|
||||
/// coding and less string matching; it is a half-step between <c>Default</c> and <c>HuffmanOnly</c>.
|
||||
/// </summary>
|
||||
Filtered = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Using <c>HuffmanOnly</c> will force the compressor to do Huffman encoding only, with no
|
||||
/// string matching.
|
||||
/// </summary>
|
||||
HuffmanOnly = 2,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An enum to specify the direction of transcoding - whether to compress or decompress.
|
||||
/// </summary>
|
||||
public enum CompressionMode
|
||||
{
|
||||
/// <summary>
|
||||
/// Used to specify that the stream should compress the data.
|
||||
/// </summary>
|
||||
Compress= 0,
|
||||
/// <summary>
|
||||
/// Used to specify that the stream should decompress the data.
|
||||
/// </summary>
|
||||
Decompress = 1,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A general purpose exception class for exceptions in the Zlib library.
|
||||
/// </summary>
|
||||
[Guid("ebc25cf6-9120-4283-b972-0e5520d0000E")]
|
||||
public class ZlibException : System.Exception
|
||||
{
|
||||
/// <summary>
|
||||
/// The ZlibException class captures exception information generated
|
||||
/// by the Zlib library.
|
||||
/// </summary>
|
||||
public ZlibException()
|
||||
: base()
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This ctor collects a message attached to the exception.
|
||||
/// </summary>
|
||||
/// <param name="s">the message for the exception.</param>
|
||||
public ZlibException(System.String s)
|
||||
: base(s)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
internal class SharedUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Performs an unsigned bitwise right shift with the specified number
|
||||
/// </summary>
|
||||
/// <param name="number">Number to operate on</param>
|
||||
/// <param name="bits">Ammount of bits to shift</param>
|
||||
/// <returns>The resulting number from the shift operation</returns>
|
||||
public static int URShift(int number, int bits)
|
||||
{
|
||||
return (int)((uint)number >> bits);
|
||||
}
|
||||
|
||||
#if NOT
|
||||
/// <summary>
|
||||
/// Performs an unsigned bitwise right shift with the specified number
|
||||
/// </summary>
|
||||
/// <param name="number">Number to operate on</param>
|
||||
/// <param name="bits">Ammount of bits to shift</param>
|
||||
/// <returns>The resulting number from the shift operation</returns>
|
||||
public static long URShift(long number, int bits)
|
||||
{
|
||||
return (long) ((UInt64)number >> bits);
|
||||
}
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// Reads a number of characters from the current source TextReader and writes
|
||||
/// the data to the target array at the specified index.
|
||||
/// </summary>
|
||||
///
|
||||
/// <param name="sourceTextReader">The source TextReader to read from</param>
|
||||
/// <param name="target">Contains the array of characteres read from the source TextReader.</param>
|
||||
/// <param name="start">The starting index of the target array.</param>
|
||||
/// <param name="count">The maximum number of characters to read from the source TextReader.</param>
|
||||
///
|
||||
/// <returns>
|
||||
/// The number of characters read. The number will be less than or equal to
|
||||
/// count depending on the data available in the source TextReader. Returns -1
|
||||
/// if the end of the stream is reached.
|
||||
/// </returns>
|
||||
public static System.Int32 ReadInput(System.IO.TextReader sourceTextReader, byte[] target, int start, int count)
|
||||
{
|
||||
// Returns 0 bytes if not enough space in target
|
||||
if (target.Length == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
char[] charArray = new char[target.Length];
|
||||
int bytesRead = sourceTextReader.Read(charArray, start, count);
|
||||
|
||||
// Returns -1 if EOF
|
||||
if (bytesRead == 0) return -1;
|
||||
|
||||
for (int index = start; index < start + bytesRead; index++)
|
||||
{
|
||||
target[index] = (byte)charArray[index];
|
||||
}
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
internal static byte[] ToByteArray(System.String sourceString)
|
||||
{
|
||||
return System.Text.UTF8Encoding.UTF8.GetBytes(sourceString);
|
||||
}
|
||||
|
||||
internal static char[] ToCharArray(byte[] byteArray)
|
||||
{
|
||||
return System.Text.UTF8Encoding.UTF8.GetChars(byteArray);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class InternalConstants
|
||||
{
|
||||
internal static readonly int MAX_BITS = 15;
|
||||
internal static readonly int BL_CODES = 19;
|
||||
internal static readonly int D_CODES = 30;
|
||||
internal static readonly int LITERALS = 256;
|
||||
internal static readonly int LENGTH_CODES = 29;
|
||||
internal static readonly int L_CODES = (LITERALS + 1 + LENGTH_CODES);
|
||||
|
||||
// Bit length codes must not exceed MAX_BL_BITS bits
|
||||
internal static readonly int MAX_BL_BITS = 7;
|
||||
|
||||
// repeat previous bit length 3-6 times (2 bits of repeat count)
|
||||
internal static readonly int REP_3_6 = 16;
|
||||
|
||||
// repeat a zero length 3-10 times (3 bits of repeat count)
|
||||
internal static readonly int REPZ_3_10 = 17;
|
||||
|
||||
// repeat a zero length 11-138 times (7 bits of repeat count)
|
||||
internal static readonly int REPZ_11_138 = 18;
|
||||
}
|
||||
|
||||
internal sealed class StaticTree
|
||||
{
|
||||
internal static readonly short[] lengthAndLiteralsTreeCodes = new short[] {
|
||||
12, 8, 140, 8, 76, 8, 204, 8, 44, 8, 172, 8, 108, 8, 236, 8,
|
||||
28, 8, 156, 8, 92, 8, 220, 8, 60, 8, 188, 8, 124, 8, 252, 8,
|
||||
2, 8, 130, 8, 66, 8, 194, 8, 34, 8, 162, 8, 98, 8, 226, 8,
|
||||
18, 8, 146, 8, 82, 8, 210, 8, 50, 8, 178, 8, 114, 8, 242, 8,
|
||||
10, 8, 138, 8, 74, 8, 202, 8, 42, 8, 170, 8, 106, 8, 234, 8,
|
||||
26, 8, 154, 8, 90, 8, 218, 8, 58, 8, 186, 8, 122, 8, 250, 8,
|
||||
6, 8, 134, 8, 70, 8, 198, 8, 38, 8, 166, 8, 102, 8, 230, 8,
|
||||
22, 8, 150, 8, 86, 8, 214, 8, 54, 8, 182, 8, 118, 8, 246, 8,
|
||||
14, 8, 142, 8, 78, 8, 206, 8, 46, 8, 174, 8, 110, 8, 238, 8,
|
||||
30, 8, 158, 8, 94, 8, 222, 8, 62, 8, 190, 8, 126, 8, 254, 8,
|
||||
1, 8, 129, 8, 65, 8, 193, 8, 33, 8, 161, 8, 97, 8, 225, 8,
|
||||
17, 8, 145, 8, 81, 8, 209, 8, 49, 8, 177, 8, 113, 8, 241, 8,
|
||||
9, 8, 137, 8, 73, 8, 201, 8, 41, 8, 169, 8, 105, 8, 233, 8,
|
||||
25, 8, 153, 8, 89, 8, 217, 8, 57, 8, 185, 8, 121, 8, 249, 8,
|
||||
5, 8, 133, 8, 69, 8, 197, 8, 37, 8, 165, 8, 101, 8, 229, 8,
|
||||
21, 8, 149, 8, 85, 8, 213, 8, 53, 8, 181, 8, 117, 8, 245, 8,
|
||||
13, 8, 141, 8, 77, 8, 205, 8, 45, 8, 173, 8, 109, 8, 237, 8,
|
||||
29, 8, 157, 8, 93, 8, 221, 8, 61, 8, 189, 8, 125, 8, 253, 8,
|
||||
19, 9, 275, 9, 147, 9, 403, 9, 83, 9, 339, 9, 211, 9, 467, 9,
|
||||
51, 9, 307, 9, 179, 9, 435, 9, 115, 9, 371, 9, 243, 9, 499, 9,
|
||||
11, 9, 267, 9, 139, 9, 395, 9, 75, 9, 331, 9, 203, 9, 459, 9,
|
||||
43, 9, 299, 9, 171, 9, 427, 9, 107, 9, 363, 9, 235, 9, 491, 9,
|
||||
27, 9, 283, 9, 155, 9, 411, 9, 91, 9, 347, 9, 219, 9, 475, 9,
|
||||
59, 9, 315, 9, 187, 9, 443, 9, 123, 9, 379, 9, 251, 9, 507, 9,
|
||||
7, 9, 263, 9, 135, 9, 391, 9, 71, 9, 327, 9, 199, 9, 455, 9,
|
||||
39, 9, 295, 9, 167, 9, 423, 9, 103, 9, 359, 9, 231, 9, 487, 9,
|
||||
23, 9, 279, 9, 151, 9, 407, 9, 87, 9, 343, 9, 215, 9, 471, 9,
|
||||
55, 9, 311, 9, 183, 9, 439, 9, 119, 9, 375, 9, 247, 9, 503, 9,
|
||||
15, 9, 271, 9, 143, 9, 399, 9, 79, 9, 335, 9, 207, 9, 463, 9,
|
||||
47, 9, 303, 9, 175, 9, 431, 9, 111, 9, 367, 9, 239, 9, 495, 9,
|
||||
31, 9, 287, 9, 159, 9, 415, 9, 95, 9, 351, 9, 223, 9, 479, 9,
|
||||
63, 9, 319, 9, 191, 9, 447, 9, 127, 9, 383, 9, 255, 9, 511, 9,
|
||||
0, 7, 64, 7, 32, 7, 96, 7, 16, 7, 80, 7, 48, 7, 112, 7,
|
||||
8, 7, 72, 7, 40, 7, 104, 7, 24, 7, 88, 7, 56, 7, 120, 7,
|
||||
4, 7, 68, 7, 36, 7, 100, 7, 20, 7, 84, 7, 52, 7, 116, 7,
|
||||
3, 8, 131, 8, 67, 8, 195, 8, 35, 8, 163, 8, 99, 8, 227, 8
|
||||
};
|
||||
|
||||
internal static readonly short[] distTreeCodes = new short[] {
|
||||
0, 5, 16, 5, 8, 5, 24, 5, 4, 5, 20, 5, 12, 5, 28, 5,
|
||||
2, 5, 18, 5, 10, 5, 26, 5, 6, 5, 22, 5, 14, 5, 30, 5,
|
||||
1, 5, 17, 5, 9, 5, 25, 5, 5, 5, 21, 5, 13, 5, 29, 5,
|
||||
3, 5, 19, 5, 11, 5, 27, 5, 7, 5, 23, 5 };
|
||||
|
||||
internal static readonly StaticTree Literals;
|
||||
internal static readonly StaticTree Distances;
|
||||
internal static readonly StaticTree BitLengths;
|
||||
|
||||
internal short[] treeCodes; // static tree or null
|
||||
internal int[] extraBits; // extra bits for each code or null
|
||||
internal int extraBase; // base index for extra_bits
|
||||
internal int elems; // max number of elements in the tree
|
||||
internal int maxLength; // max bit length for the codes
|
||||
|
||||
private StaticTree(short[] treeCodes, int[] extraBits, int extraBase, int elems, int maxLength)
|
||||
{
|
||||
this.treeCodes = treeCodes;
|
||||
this.extraBits = extraBits;
|
||||
this.extraBase = extraBase;
|
||||
this.elems = elems;
|
||||
this.maxLength = maxLength;
|
||||
}
|
||||
|
||||
static StaticTree()
|
||||
{
|
||||
Literals = new StaticTree(lengthAndLiteralsTreeCodes, Tree.ExtraLengthBits, InternalConstants.LITERALS + 1, InternalConstants.L_CODES, InternalConstants.MAX_BITS);
|
||||
Distances = new StaticTree(distTreeCodes, Tree.ExtraDistanceBits, 0, InternalConstants.D_CODES, InternalConstants.MAX_BITS);
|
||||
BitLengths = new StaticTree(null, Tree.extra_blbits, 0, InternalConstants.BL_CODES, InternalConstants.MAX_BL_BITS);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes an Adler-32 checksum.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The Adler checksum is similar to a CRC checksum, but faster to compute, though less
|
||||
/// reliable. It is used in producing RFC1950 compressed streams. The Adler checksum
|
||||
/// is a required part of the "ZLIB" standard. Applications will almost never need to
|
||||
/// use this class directly.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <exclude/>
|
||||
public sealed class Adler
|
||||
{
|
||||
// largest prime smaller than 65536
|
||||
private static readonly uint BASE = 65521;
|
||||
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
|
||||
private static readonly int NMAX = 5552;
|
||||
|
||||
#pragma warning disable 3001
|
||||
#pragma warning disable 3002
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This is used within ZLIB. You probably don't need to use this directly.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <example>
|
||||
/// To compute an Adler32 checksum on a byte array:
|
||||
/// <code>
|
||||
/// var adler = Adler.Adler32(0, null, 0, 0);
|
||||
/// adler = Adler.Adler32(adler, buffer, index, length);
|
||||
/// </code>
|
||||
/// </example>
|
||||
public static uint Adler32(uint adler, byte[] buf, int index, int len)
|
||||
{
|
||||
if (buf == null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
uint s1 = (uint)(adler & 0xffff);
|
||||
uint s2 = (uint)((adler >> 16) & 0xffff);
|
||||
|
||||
while (len > 0)
|
||||
{
|
||||
int k = len < NMAX ? len : NMAX;
|
||||
len -= k;
|
||||
while (k >= 16)
|
||||
{
|
||||
//s1 += (buf[index++] & 0xff); s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
s1 += buf[index++]; s2 += s1;
|
||||
k -= 16;
|
||||
}
|
||||
if (k != 0)
|
||||
{
|
||||
do
|
||||
{
|
||||
s1 += buf[index++];
|
||||
s2 += s1;
|
||||
}
|
||||
while (--k != 0);
|
||||
}
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
}
|
||||
return (uint)((s2 << 16) | s1);
|
||||
}
|
||||
#pragma warning restore 3001
|
||||
#pragma warning restore 3002
|
||||
}
|
||||
}
|
||||
690
SabreTools.Library/External/Zlib/ZlibBaseStream.cs
vendored
Normal file
690
SabreTools.Library/External/Zlib/ZlibBaseStream.cs
vendored
Normal file
@@ -0,0 +1,690 @@
|
||||
// ZlibBaseStream.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2011-August-06 21:22:38>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines the ZlibBaseStream class, which is an intnernal
|
||||
// base class for DeflateStream, ZlibStream and GZipStream.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
internal enum ZlibStreamFlavor { ZLIB = 1950, DEFLATE = 1951, GZIP = 1952 }
|
||||
|
||||
internal class ZlibBaseStream : System.IO.Stream
|
||||
{
|
||||
protected internal ZlibCodec _z = null; // deferred init... new ZlibCodec();
|
||||
|
||||
protected internal StreamMode _streamMode = StreamMode.Undefined;
|
||||
protected internal FlushType _flushMode;
|
||||
protected internal ZlibStreamFlavor _flavor;
|
||||
protected internal CompressionMode _compressionMode;
|
||||
protected internal CompressionLevel _level;
|
||||
protected internal bool _leaveOpen;
|
||||
protected internal byte[] _workingBuffer;
|
||||
protected internal int _bufferSize = ZlibConstants.WorkingBufferSizeDefault;
|
||||
protected internal byte[] _buf1 = new byte[1];
|
||||
|
||||
protected internal System.IO.Stream _stream;
|
||||
protected internal CompressionStrategy Strategy = CompressionStrategy.Default;
|
||||
|
||||
// workitem 7159
|
||||
CRC32 crc;
|
||||
protected internal string _GzipFileName;
|
||||
protected internal string _GzipComment;
|
||||
protected internal DateTime _GzipMtime;
|
||||
protected internal int _gzipHeaderByteCount;
|
||||
|
||||
internal int Crc32 { get { if (crc == null) return 0; return crc.Crc32Result; } }
|
||||
|
||||
public ZlibBaseStream(System.IO.Stream stream,
|
||||
CompressionMode compressionMode,
|
||||
CompressionLevel level,
|
||||
ZlibStreamFlavor flavor,
|
||||
bool leaveOpen)
|
||||
: base()
|
||||
{
|
||||
this._flushMode = FlushType.None;
|
||||
//this._workingBuffer = new byte[WORKING_BUFFER_SIZE_DEFAULT];
|
||||
this._stream = stream;
|
||||
this._leaveOpen = leaveOpen;
|
||||
this._compressionMode = compressionMode;
|
||||
this._flavor = flavor;
|
||||
this._level = level;
|
||||
// workitem 7159
|
||||
if (flavor == ZlibStreamFlavor.GZIP)
|
||||
{
|
||||
this.crc = new CRC32();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected internal bool _wantCompress
|
||||
{
|
||||
get
|
||||
{
|
||||
return (this._compressionMode == CompressionMode.Compress);
|
||||
}
|
||||
}
|
||||
|
||||
private ZlibCodec z
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_z == null)
|
||||
{
|
||||
bool wantRfc1950Header = (this._flavor == ZlibStreamFlavor.ZLIB);
|
||||
_z = new ZlibCodec();
|
||||
if (this._compressionMode == CompressionMode.Decompress)
|
||||
{
|
||||
_z.InitializeInflate(wantRfc1950Header);
|
||||
}
|
||||
else
|
||||
{
|
||||
_z.Strategy = Strategy;
|
||||
_z.InitializeDeflate(this._level, wantRfc1950Header);
|
||||
}
|
||||
}
|
||||
return _z;
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] workingBuffer
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_workingBuffer == null)
|
||||
{
|
||||
_workingBuffer = new byte[_bufferSize];
|
||||
}
|
||||
return _workingBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Write(System.Byte[] buffer, int offset, int count)
|
||||
{
|
||||
// workitem 7159
|
||||
// calculate the CRC on the unccompressed data (before writing)
|
||||
if (crc != null)
|
||||
{
|
||||
crc.SlurpBlock(buffer, offset, count);
|
||||
}
|
||||
|
||||
if (_streamMode == StreamMode.Undefined)
|
||||
{
|
||||
_streamMode = StreamMode.Writer;
|
||||
}
|
||||
else if (_streamMode != StreamMode.Writer)
|
||||
{
|
||||
throw new ZlibException("Cannot Write after Reading.");
|
||||
}
|
||||
|
||||
if (count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// first reference of z property will initialize the private var _z
|
||||
z.InputBuffer = buffer;
|
||||
_z.NextIn = offset;
|
||||
_z.AvailableBytesIn = count;
|
||||
bool done = false;
|
||||
do
|
||||
{
|
||||
_z.OutputBuffer = workingBuffer;
|
||||
_z.NextOut = 0;
|
||||
_z.AvailableBytesOut = _workingBuffer.Length;
|
||||
int rc = (_wantCompress)
|
||||
? _z.Deflate(_flushMode)
|
||||
: _z.Inflate(_flushMode);
|
||||
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
{
|
||||
throw new ZlibException((_wantCompress ? "de" : "in") + "flating: " + _z.Message);
|
||||
}
|
||||
|
||||
//if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
|
||||
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
|
||||
|
||||
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
|
||||
|
||||
// If GZIP and de-compress, we're done when 8 bytes remain.
|
||||
if (_flavor == ZlibStreamFlavor.GZIP && !_wantCompress)
|
||||
{
|
||||
done = (_z.AvailableBytesIn == 8 && _z.AvailableBytesOut != 0);
|
||||
}
|
||||
}
|
||||
while (!done);
|
||||
}
|
||||
|
||||
private void finish()
|
||||
{
|
||||
if (_z == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_streamMode == StreamMode.Writer)
|
||||
{
|
||||
bool done = false;
|
||||
do
|
||||
{
|
||||
_z.OutputBuffer = workingBuffer;
|
||||
_z.NextOut = 0;
|
||||
_z.AvailableBytesOut = _workingBuffer.Length;
|
||||
int rc = (_wantCompress)
|
||||
? _z.Deflate(FlushType.Finish)
|
||||
: _z.Inflate(FlushType.Finish);
|
||||
|
||||
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
|
||||
{
|
||||
string verb = (_wantCompress ? "de" : "in") + "flating";
|
||||
if (_z.Message == null)
|
||||
{
|
||||
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ZlibException(verb + ": " + _z.Message);
|
||||
}
|
||||
}
|
||||
|
||||
if (_workingBuffer.Length - _z.AvailableBytesOut > 0)
|
||||
{
|
||||
_stream.Write(_workingBuffer, 0, _workingBuffer.Length - _z.AvailableBytesOut);
|
||||
}
|
||||
|
||||
done = _z.AvailableBytesIn == 0 && _z.AvailableBytesOut != 0;
|
||||
// If GZIP and de-compress, we're done when 8 bytes remain.
|
||||
if (_flavor == ZlibStreamFlavor.GZIP && !_wantCompress)
|
||||
done = (_z.AvailableBytesIn == 8 && _z.AvailableBytesOut != 0);
|
||||
|
||||
}
|
||||
while (!done);
|
||||
|
||||
Flush();
|
||||
|
||||
// workitem 7159
|
||||
if (_flavor == ZlibStreamFlavor.GZIP)
|
||||
{
|
||||
if (_wantCompress)
|
||||
{
|
||||
// Emit the GZIP trailer: CRC32 and size mod 2^32
|
||||
int c1 = crc.Crc32Result;
|
||||
_stream.Write(BitConverter.GetBytes(c1), 0, 4);
|
||||
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
|
||||
_stream.Write(BitConverter.GetBytes(c2), 0, 4);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ZlibException("Writing with decompression is not supported.");
|
||||
}
|
||||
}
|
||||
}
|
||||
// workitem 7159
|
||||
else if (_streamMode == StreamMode.Reader)
|
||||
{
|
||||
if (_flavor == ZlibStreamFlavor.GZIP)
|
||||
{
|
||||
if (!_wantCompress)
|
||||
{
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (_z.TotalBytesOut == 0L)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Read and potentially verify the GZIP trailer:
|
||||
// CRC32 and size mod 2^32
|
||||
byte[] trailer = new byte[8];
|
||||
|
||||
// workitems 8679 & 12554
|
||||
if (_z.AvailableBytesIn < 8)
|
||||
{
|
||||
// Make sure we have read to the end of the stream
|
||||
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, _z.AvailableBytesIn);
|
||||
int bytesNeeded = 8 - _z.AvailableBytesIn;
|
||||
int bytesRead = _stream.Read(trailer,
|
||||
_z.AvailableBytesIn,
|
||||
bytesNeeded);
|
||||
if (bytesNeeded != bytesRead)
|
||||
{
|
||||
throw new ZlibException(String.Format("Missing or incomplete GZIP trailer. Expected 8 bytes, got {0}.",
|
||||
_z.AvailableBytesIn + bytesRead));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
|
||||
}
|
||||
|
||||
Int32 crc32_expected = BitConverter.ToInt32(trailer, 0);
|
||||
Int32 crc32_actual = crc.Crc32Result;
|
||||
Int32 isize_expected = BitConverter.ToInt32(trailer, 4);
|
||||
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
|
||||
|
||||
if (crc32_actual != crc32_expected)
|
||||
{
|
||||
throw new ZlibException(String.Format("Bad CRC32 in GZIP trailer. (actual({0:X8})!=expected({1:X8}))", crc32_actual, crc32_expected));
|
||||
}
|
||||
|
||||
if (isize_actual != isize_expected)
|
||||
{
|
||||
throw new ZlibException(String.Format("Bad size in GZIP trailer. (actual({0})!=expected({1}))", isize_actual, isize_expected));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ZlibException("Reading with compression is not supported.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void end()
|
||||
{
|
||||
if (z == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (_wantCompress)
|
||||
{
|
||||
_z.EndDeflate();
|
||||
}
|
||||
else
|
||||
{
|
||||
_z.EndInflate();
|
||||
}
|
||||
_z = null;
|
||||
}
|
||||
|
||||
|
||||
public override void Close()
|
||||
{
|
||||
if (_stream == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
try
|
||||
{
|
||||
finish();
|
||||
}
|
||||
finally
|
||||
{
|
||||
end();
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
_stream.Close();
|
||||
}
|
||||
_stream = null;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
_stream.Flush();
|
||||
}
|
||||
|
||||
public override System.Int64 Seek(System.Int64 offset, System.IO.SeekOrigin origin)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
//_outStream.Seek(offset, origin);
|
||||
}
|
||||
public override void SetLength(System.Int64 value)
|
||||
{
|
||||
_stream.SetLength(value);
|
||||
}
|
||||
|
||||
#if NOT
|
||||
public int Read()
|
||||
{
|
||||
if (Read(_buf1, 0, 1) == 0)
|
||||
return 0;
|
||||
// calculate CRC after reading
|
||||
if (crc!=null)
|
||||
crc.SlurpBlock(_buf1,0,1);
|
||||
return (_buf1[0] & 0xFF);
|
||||
}
|
||||
#endif
|
||||
|
||||
private bool nomoreinput = false;
|
||||
|
||||
private string ReadZeroTerminatedString()
|
||||
{
|
||||
var list = new System.Collections.Generic.List<byte>();
|
||||
bool done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
int n = _stream.Read(_buf1, 0, 1);
|
||||
if (n != 1)
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
else
|
||||
{
|
||||
if (_buf1[0] == 0)
|
||||
{
|
||||
done = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
list.Add(_buf1[0]);
|
||||
}
|
||||
}
|
||||
} while (!done);
|
||||
byte[] a = list.ToArray();
|
||||
return GZipStream.iso8859dash1.GetString(a, 0, a.Length);
|
||||
}
|
||||
|
||||
private int _ReadAndValidateGzipHeader()
|
||||
{
|
||||
int totalBytesRead = 0;
|
||||
// read the header on the first read
|
||||
byte[] header = new byte[10];
|
||||
int n = _stream.Read(header, 0, header.Length);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (n != 10)
|
||||
{
|
||||
throw new ZlibException("Not a valid GZIP stream.");
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
Int32 timet = BitConverter.ToInt32(header, 4);
|
||||
_GzipMtime = GZipStream._unixEpoch.AddSeconds(timet);
|
||||
totalBytesRead += n;
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
{
|
||||
// read and discard extra field
|
||||
n = _stream.Read(header, 0, 2); // 2-byte length field
|
||||
totalBytesRead += n;
|
||||
|
||||
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
|
||||
byte[] extra = new byte[extraLength];
|
||||
n = _stream.Read(extra, 0, extra.Length);
|
||||
if (n != extraLength)
|
||||
{
|
||||
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
|
||||
}
|
||||
totalBytesRead += n;
|
||||
}
|
||||
if ((header[3] & 0x08) == 0x08)
|
||||
{
|
||||
_GzipFileName = ReadZeroTerminatedString();
|
||||
}
|
||||
if ((header[3] & 0x10) == 0x010)
|
||||
{
|
||||
_GzipComment = ReadZeroTerminatedString();
|
||||
}
|
||||
if ((header[3] & 0x02) == 0x02)
|
||||
{
|
||||
Read(_buf1, 0, 1); // CRC16, ignore
|
||||
}
|
||||
|
||||
return totalBytesRead;
|
||||
}
|
||||
|
||||
public override System.Int32 Read(System.Byte[] buffer, System.Int32 offset, System.Int32 count)
|
||||
{
|
||||
// According to MS documentation, any implementation of the IO.Stream.Read function must:
|
||||
// (a) throw an exception if offset & count reference an invalid part of the buffer,
|
||||
// or if count < 0, or if buffer is null
|
||||
// (b) return 0 only upon EOF, or if count = 0
|
||||
// (c) if not EOF, then return at least 1 byte, up to <count> bytes
|
||||
|
||||
if (_streamMode == StreamMode.Undefined)
|
||||
{
|
||||
if (!this._stream.CanRead)
|
||||
{
|
||||
throw new ZlibException("The stream is not readable.");
|
||||
}// for the first read, set up some controls.
|
||||
_streamMode = StreamMode.Reader;
|
||||
// (The first reference to _z goes through the private accessor which
|
||||
// may initialize it.)
|
||||
z.AvailableBytesIn = 0;
|
||||
if (_flavor == ZlibStreamFlavor.GZIP)
|
||||
{
|
||||
_gzipHeaderByteCount = _ReadAndValidateGzipHeader();
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (_gzipHeaderByteCount == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_streamMode != StreamMode.Reader)
|
||||
{
|
||||
throw new ZlibException("Cannot Read after Writing.");
|
||||
}
|
||||
|
||||
if (count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
if (nomoreinput && _wantCompress)
|
||||
{
|
||||
return 0; // workitem 8557
|
||||
}
|
||||
if (buffer == null)
|
||||
{
|
||||
throw new ArgumentNullException("buffer");
|
||||
}
|
||||
if (count < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException("count");
|
||||
}
|
||||
if (offset < buffer.GetLowerBound(0))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException("offset");
|
||||
}
|
||||
if ((offset + count) > buffer.GetLength(0))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException("count");
|
||||
}
|
||||
|
||||
int rc = 0;
|
||||
|
||||
// set up the output of the deflate/inflate codec:
|
||||
_z.OutputBuffer = buffer;
|
||||
_z.NextOut = offset;
|
||||
_z.AvailableBytesOut = count;
|
||||
|
||||
// This is necessary in case _workingBuffer has been resized. (new byte[])
|
||||
// (The first reference to _workingBuffer goes through the private accessor which
|
||||
// may initialize it.)
|
||||
_z.InputBuffer = workingBuffer;
|
||||
|
||||
do
|
||||
{
|
||||
// need data in _workingBuffer in order to deflate/inflate. Here, we check if we have any.
|
||||
if ((_z.AvailableBytesIn == 0) && (!nomoreinput))
|
||||
{
|
||||
// No data available, so try to Read data from the captive stream.
|
||||
_z.NextIn = 0;
|
||||
_z.AvailableBytesIn = _stream.Read(_workingBuffer, 0, _workingBuffer.Length);
|
||||
if (_z.AvailableBytesIn == 0)
|
||||
{
|
||||
nomoreinput = true;
|
||||
}
|
||||
}
|
||||
// we have data in InputBuffer; now compress or decompress as appropriate
|
||||
rc = (_wantCompress)
|
||||
? _z.Deflate(_flushMode)
|
||||
: _z.Inflate(_flushMode);
|
||||
|
||||
if (nomoreinput && (rc == ZlibConstants.Z_BUF_ERROR))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
{
|
||||
throw new ZlibException(String.Format("{0}flating: rc={1} msg={2}", (_wantCompress ? "de" : "in"), rc, _z.Message));
|
||||
}
|
||||
|
||||
if ((nomoreinput || rc == ZlibConstants.Z_STREAM_END) && (_z.AvailableBytesOut == count))
|
||||
{
|
||||
break; // nothing more to read
|
||||
}
|
||||
}
|
||||
//while (_z.AvailableBytesOut == count && rc == ZlibConstants.Z_OK);
|
||||
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
|
||||
|
||||
// workitem 8557
|
||||
// is there more room in output?
|
||||
if (_z.AvailableBytesOut > 0)
|
||||
{
|
||||
if (rc == ZlibConstants.Z_OK && _z.AvailableBytesIn == 0)
|
||||
{
|
||||
// deferred
|
||||
}
|
||||
|
||||
// are we completely done reading?
|
||||
if (nomoreinput)
|
||||
{
|
||||
// and in compression?
|
||||
if (_wantCompress)
|
||||
{
|
||||
// no more input data available; therefore we flush to
|
||||
// try to complete the read
|
||||
rc = _z.Deflate(FlushType.Finish);
|
||||
|
||||
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
{
|
||||
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
|
||||
} }
|
||||
}
|
||||
}
|
||||
|
||||
rc = (count - _z.AvailableBytesOut);
|
||||
|
||||
// calculate CRC after reading
|
||||
if (crc != null)
|
||||
{
|
||||
crc.SlurpBlock(buffer, offset, rc);
|
||||
}
|
||||
|
||||
return rc;
|
||||
}
|
||||
|
||||
public override System.Boolean CanRead
|
||||
{
|
||||
get { return this._stream.CanRead; }
|
||||
}
|
||||
|
||||
public override System.Boolean CanSeek
|
||||
{
|
||||
get { return this._stream.CanSeek; }
|
||||
}
|
||||
|
||||
public override System.Boolean CanWrite
|
||||
{
|
||||
get { return this._stream.CanWrite; }
|
||||
}
|
||||
|
||||
public override System.Int64 Length
|
||||
{
|
||||
get { return _stream.Length; }
|
||||
}
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get { throw new NotImplementedException(); }
|
||||
set { throw new NotImplementedException(); }
|
||||
}
|
||||
|
||||
internal enum StreamMode
|
||||
{
|
||||
Writer,
|
||||
Reader,
|
||||
Undefined,
|
||||
}
|
||||
|
||||
public static void CompressString(String s, Stream compressor)
|
||||
{
|
||||
byte[] uncompressed = System.Text.Encoding.UTF8.GetBytes(s);
|
||||
using (compressor)
|
||||
{
|
||||
compressor.Write(uncompressed, 0, uncompressed.Length);
|
||||
}
|
||||
}
|
||||
|
||||
public static void CompressBuffer(byte[] b, Stream compressor)
|
||||
{
|
||||
// workitem 8460
|
||||
using (compressor)
|
||||
{
|
||||
compressor.Write(b, 0, b.Length);
|
||||
}
|
||||
}
|
||||
|
||||
public static String UncompressString(byte[] compressed, Stream decompressor)
|
||||
{
|
||||
// workitem 8460
|
||||
byte[] working = new byte[1024];
|
||||
var encoding = System.Text.Encoding.UTF8;
|
||||
using (var output = new MemoryStream())
|
||||
{
|
||||
using (decompressor)
|
||||
{
|
||||
int n;
|
||||
while ((n = decompressor.Read(working, 0, working.Length)) != 0)
|
||||
{
|
||||
output.Write(working, 0, n);
|
||||
}
|
||||
}
|
||||
|
||||
// reset to allow read from start
|
||||
output.Seek(0, SeekOrigin.Begin);
|
||||
var sr = new StreamReader(output, encoding);
|
||||
return sr.ReadToEnd();
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] UncompressBuffer(byte[] compressed, Stream decompressor)
|
||||
{
|
||||
// workitem 8460
|
||||
byte[] working = new byte[1024];
|
||||
using (var output = new MemoryStream())
|
||||
{
|
||||
using (decompressor)
|
||||
{
|
||||
int n;
|
||||
while ((n = decompressor.Read(working, 0, working.Length)) != 0)
|
||||
{
|
||||
output.Write(working, 0, n);
|
||||
}
|
||||
}
|
||||
return output.ToArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
745
SabreTools.Library/External/Zlib/ZlibCodec.cs
vendored
Normal file
745
SabreTools.Library/External/Zlib/ZlibCodec.cs
vendored
Normal file
@@ -0,0 +1,745 @@
|
||||
// ZlibCodec.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2009-November-03 15:40:51>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines a Codec for ZLIB compression and
|
||||
// decompression. This code extends code that was based the jzlib
|
||||
// implementation of zlib, but this code is completely novel. The codec
|
||||
// class is new, and encapsulates some behaviors that are new, and some
|
||||
// that were present in other classes in the jzlib code base. In
|
||||
// keeping with the license for jzlib, the copyright to the jzlib code
|
||||
// is included below.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in
|
||||
// the documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// 3. The names of the authors may not be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// This program is based on zlib-1.1.3; credit to authors
|
||||
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
|
||||
// and contributors of zlib.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// Encoder and Decoder for ZLIB and DEFLATE (IETF RFC1950 and RFC1951).
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// This class compresses and decompresses data according to the Deflate algorithm
|
||||
/// and optionally, the ZLIB format, as documented in <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1950.txt">RFC 1950 - ZLIB</see> and <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1951.txt">RFC 1951 - DEFLATE</see>.
|
||||
/// </remarks>
|
||||
[Guid("ebc25cf6-9120-4283-b972-0e5520d0000D")]
|
||||
[System.Runtime.InteropServices.ComVisible(true)]
|
||||
#if !NETCF
|
||||
[System.Runtime.InteropServices.ClassInterface(System.Runtime.InteropServices.ClassInterfaceType.AutoDispatch)]
|
||||
#endif
|
||||
sealed public class ZlibCodec
|
||||
{
|
||||
/// <summary>
|
||||
/// The buffer from which data is taken.
|
||||
/// </summary>
|
||||
public byte[] InputBuffer;
|
||||
|
||||
/// <summary>
|
||||
/// An index into the InputBuffer array, indicating where to start reading.
|
||||
/// </summary>
|
||||
public int NextIn;
|
||||
|
||||
/// <summary>
|
||||
/// The number of bytes available in the InputBuffer, starting at NextIn.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Generally you should set this to InputBuffer.Length before the first Inflate() or Deflate() call.
|
||||
/// The class will update this number as calls to Inflate/Deflate are made.
|
||||
/// </remarks>
|
||||
public int AvailableBytesIn;
|
||||
|
||||
/// <summary>
|
||||
/// Total number of bytes read so far, through all calls to Inflate()/Deflate().
|
||||
/// </summary>
|
||||
public long TotalBytesIn;
|
||||
|
||||
/// <summary>
|
||||
/// Buffer to store output data.
|
||||
/// </summary>
|
||||
public byte[] OutputBuffer;
|
||||
|
||||
/// <summary>
|
||||
/// An index into the OutputBuffer array, indicating where to start writing.
|
||||
/// </summary>
|
||||
public int NextOut;
|
||||
|
||||
/// <summary>
|
||||
/// The number of bytes available in the OutputBuffer, starting at NextOut.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Generally you should set this to OutputBuffer.Length before the first Inflate() or Deflate() call.
|
||||
/// The class will update this number as calls to Inflate/Deflate are made.
|
||||
/// </remarks>
|
||||
public int AvailableBytesOut;
|
||||
|
||||
/// <summary>
|
||||
/// Total number of bytes written to the output so far, through all calls to Inflate()/Deflate().
|
||||
/// </summary>
|
||||
public long TotalBytesOut;
|
||||
|
||||
/// <summary>
|
||||
/// used for diagnostics, when something goes wrong!
|
||||
/// </summary>
|
||||
public System.String Message;
|
||||
|
||||
internal DeflateManager dstate;
|
||||
internal InflateManager istate;
|
||||
|
||||
internal uint _Adler32;
|
||||
|
||||
/// <summary>
|
||||
/// The compression level to use in this codec. Useful only in compression mode.
|
||||
/// </summary>
|
||||
public CompressionLevel CompressLevel = CompressionLevel.Default;
|
||||
|
||||
/// <summary>
|
||||
/// The number of Window Bits to use.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This gauges the size of the sliding window, and hence the
|
||||
/// compression effectiveness as well as memory consumption. It's best to just leave this
|
||||
/// setting alone if you don't know what it is. The maximum value is 15 bits, which implies
|
||||
/// a 32k window.
|
||||
/// </remarks>
|
||||
public int WindowBits = ZlibConstants.WindowBitsDefault;
|
||||
|
||||
/// <summary>
|
||||
/// The compression strategy to use.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This is only effective in compression. The theory offered by ZLIB is that different
|
||||
/// strategies could potentially produce significant differences in compression behavior
|
||||
/// for different data sets. Unfortunately I don't have any good recommendations for how
|
||||
/// to set it differently. When I tested changing the strategy I got minimally different
|
||||
/// compression performance. It's best to leave this property alone if you don't have a
|
||||
/// good feel for it. Or, you may want to produce a test harness that runs through the
|
||||
/// different strategy options and evaluates them on different file types. If you do that,
|
||||
/// let me know your results.
|
||||
/// </remarks>
|
||||
public CompressionStrategy Strategy = CompressionStrategy.Default;
|
||||
|
||||
/// <summary>
|
||||
/// The Adler32 checksum on the data transferred through the codec so far. You probably don't need to look at this.
|
||||
/// </summary>
|
||||
public int Adler32 { get { return (int)_Adler32; } }
|
||||
|
||||
/// <summary>
|
||||
/// Create a ZlibCodec.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// If you use this default constructor, you will later have to explicitly call
|
||||
/// InitializeInflate() or InitializeDeflate() before using the ZlibCodec to compress
|
||||
/// or decompress.
|
||||
/// </remarks>
|
||||
public ZlibCodec() { }
|
||||
|
||||
/// <summary>
|
||||
/// Create a ZlibCodec that either compresses or decompresses.
|
||||
/// </summary>
|
||||
/// <param name="mode">
|
||||
/// Indicates whether the codec should compress (deflate) or decompress (inflate).
|
||||
/// </param>
|
||||
public ZlibCodec(CompressionMode mode)
|
||||
{
|
||||
if (mode == CompressionMode.Compress)
|
||||
{
|
||||
int rc = InitializeDeflate();
|
||||
if (rc != ZlibConstants.Z_OK)
|
||||
{
|
||||
throw new ZlibException("Cannot initialize for deflate.");
|
||||
}
|
||||
}
|
||||
else if (mode == CompressionMode.Decompress)
|
||||
{
|
||||
int rc = InitializeInflate();
|
||||
if (rc != ZlibConstants.Z_OK)
|
||||
{
|
||||
throw new ZlibException("Cannot initialize for inflate.");
|
||||
}
|
||||
}
|
||||
else throw new ZlibException("Invalid ZlibStreamFlavor.");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the inflation state.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// It is not necessary to call this before using the ZlibCodec to inflate data;
|
||||
/// It is implicitly called when you call the constructor.
|
||||
/// </remarks>
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int InitializeInflate()
|
||||
{
|
||||
return InitializeInflate(this.WindowBits);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the inflation state with an explicit flag to
|
||||
/// govern the handling of RFC1950 header bytes.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// By default, the ZLIB header defined in <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1950.txt">RFC 1950</see> is expected. If
|
||||
/// you want to read a zlib stream you should specify true for
|
||||
/// expectRfc1950Header. If you have a deflate stream, you will want to specify
|
||||
/// false. It is only necessary to invoke this initializer explicitly if you
|
||||
/// want to specify false.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="expectRfc1950Header">whether to expect an RFC1950 header byte
|
||||
/// pair when reading the stream of data to be inflated.</param>
|
||||
///
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int InitializeInflate(bool expectRfc1950Header)
|
||||
{
|
||||
return InitializeInflate(this.WindowBits, expectRfc1950Header);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for inflation, with the specified number of window bits.
|
||||
/// </summary>
|
||||
/// <param name="windowBits">The number of window bits to use. If you need to ask what that is,
|
||||
/// then you shouldn't be calling this initializer.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int InitializeInflate(int windowBits)
|
||||
{
|
||||
this.WindowBits = windowBits;
|
||||
return InitializeInflate(windowBits, true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the inflation state with an explicit flag to govern the handling of
|
||||
/// RFC1950 header bytes.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// If you want to read a zlib stream you should specify true for
|
||||
/// expectRfc1950Header. In this case, the library will expect to find a ZLIB
|
||||
/// header, as defined in <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
|
||||
/// 1950</see>, in the compressed stream. If you will be reading a DEFLATE or
|
||||
/// GZIP stream, which does not have such a header, you will want to specify
|
||||
/// false.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="expectRfc1950Header">whether to expect an RFC1950 header byte pair when reading
|
||||
/// the stream of data to be inflated.</param>
|
||||
/// <param name="windowBits">The number of window bits to use. If you need to ask what that is,
|
||||
/// then you shouldn't be calling this initializer.</param>
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int InitializeInflate(int windowBits, bool expectRfc1950Header)
|
||||
{
|
||||
this.WindowBits = windowBits;
|
||||
if (dstate != null)
|
||||
{
|
||||
throw new ZlibException("You may not call InitializeInflate() after calling InitializeDeflate().");
|
||||
}
|
||||
istate = new InflateManager(expectRfc1950Header);
|
||||
return istate.Initialize(this, windowBits);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Inflate the data in the InputBuffer, placing the result in the OutputBuffer.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// You must have set InputBuffer and OutputBuffer, NextIn and NextOut, and AvailableBytesIn and
|
||||
/// AvailableBytesOut before calling this method.
|
||||
/// </remarks>
|
||||
/// <example>
|
||||
/// <code>
|
||||
/// private void InflateBuffer()
|
||||
/// {
|
||||
/// int bufferSize = 1024;
|
||||
/// byte[] buffer = new byte[bufferSize];
|
||||
/// ZlibCodec decompressor = new ZlibCodec();
|
||||
///
|
||||
/// Console.WriteLine("\n============================================");
|
||||
/// Console.WriteLine("Size of Buffer to Inflate: {0} bytes.", CompressedBytes.Length);
|
||||
/// MemoryStream ms = new MemoryStream(DecompressedBytes);
|
||||
///
|
||||
/// int rc = decompressor.InitializeInflate();
|
||||
///
|
||||
/// decompressor.InputBuffer = CompressedBytes;
|
||||
/// decompressor.NextIn = 0;
|
||||
/// decompressor.AvailableBytesIn = CompressedBytes.Length;
|
||||
///
|
||||
/// decompressor.OutputBuffer = buffer;
|
||||
///
|
||||
/// // pass 1: inflate
|
||||
/// do
|
||||
/// {
|
||||
/// decompressor.NextOut = 0;
|
||||
/// decompressor.AvailableBytesOut = buffer.Length;
|
||||
/// rc = decompressor.Inflate(FlushType.None);
|
||||
///
|
||||
/// if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
/// throw new Exception("inflating: " + decompressor.Message);
|
||||
///
|
||||
/// ms.Write(decompressor.OutputBuffer, 0, buffer.Length - decompressor.AvailableBytesOut);
|
||||
/// }
|
||||
/// while (decompressor.AvailableBytesIn > 0 || decompressor.AvailableBytesOut == 0);
|
||||
///
|
||||
/// // pass 2: finish and flush
|
||||
/// do
|
||||
/// {
|
||||
/// decompressor.NextOut = 0;
|
||||
/// decompressor.AvailableBytesOut = buffer.Length;
|
||||
/// rc = decompressor.Inflate(FlushType.Finish);
|
||||
///
|
||||
/// if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
|
||||
/// throw new Exception("inflating: " + decompressor.Message);
|
||||
///
|
||||
/// if (buffer.Length - decompressor.AvailableBytesOut > 0)
|
||||
/// ms.Write(buffer, 0, buffer.Length - decompressor.AvailableBytesOut);
|
||||
/// }
|
||||
/// while (decompressor.AvailableBytesIn > 0 || decompressor.AvailableBytesOut == 0);
|
||||
///
|
||||
/// decompressor.EndInflate();
|
||||
/// }
|
||||
///
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <param name="flush">The flush to use when inflating.</param>
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int Inflate(FlushType flush)
|
||||
{
|
||||
if (istate == null)
|
||||
{
|
||||
throw new ZlibException("No Inflate State!");
|
||||
}
|
||||
return istate.Inflate(flush);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ends an inflation session.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Call this after successively calling Inflate(). This will cause all buffers to be flushed.
|
||||
/// After calling this you cannot call Inflate() without a intervening call to one of the
|
||||
/// InitializeInflate() overloads.
|
||||
/// </remarks>
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int EndInflate()
|
||||
{
|
||||
if (istate == null)
|
||||
{
|
||||
throw new ZlibException("No Inflate State!");
|
||||
} int ret = istate.End();
|
||||
istate = null;
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// I don't know what this does!
|
||||
/// </summary>
|
||||
/// <returns>Z_OK if everything goes well.</returns>
|
||||
public int SyncInflate()
|
||||
{
|
||||
if (istate == null)
|
||||
{
|
||||
throw new ZlibException("No Inflate State!");
|
||||
}
|
||||
return istate.Sync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for deflation operation.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The codec will use the MAX window bits and the default level of compression.
|
||||
/// </remarks>
|
||||
/// <example>
|
||||
/// <code>
|
||||
/// int bufferSize = 40000;
|
||||
/// byte[] CompressedBytes = new byte[bufferSize];
|
||||
/// byte[] DecompressedBytes = new byte[bufferSize];
|
||||
///
|
||||
/// ZlibCodec compressor = new ZlibCodec();
|
||||
///
|
||||
/// compressor.InitializeDeflate(CompressionLevel.Default);
|
||||
///
|
||||
/// compressor.InputBuffer = System.Text.ASCIIEncoding.ASCII.GetBytes(TextToCompress);
|
||||
/// compressor.NextIn = 0;
|
||||
/// compressor.AvailableBytesIn = compressor.InputBuffer.Length;
|
||||
///
|
||||
/// compressor.OutputBuffer = CompressedBytes;
|
||||
/// compressor.NextOut = 0;
|
||||
/// compressor.AvailableBytesOut = CompressedBytes.Length;
|
||||
///
|
||||
/// while (compressor.TotalBytesIn != TextToCompress.Length && compressor.TotalBytesOut < bufferSize)
|
||||
/// {
|
||||
/// compressor.Deflate(FlushType.None);
|
||||
/// }
|
||||
///
|
||||
/// while (true)
|
||||
/// {
|
||||
/// int rc= compressor.Deflate(FlushType.Finish);
|
||||
/// if (rc == ZlibConstants.Z_STREAM_END) break;
|
||||
/// }
|
||||
///
|
||||
/// compressor.EndDeflate();
|
||||
///
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <returns>Z_OK if all goes well. You generally don't need to check the return code.</returns>
|
||||
public int InitializeDeflate()
|
||||
{
|
||||
return _InternalInitializeDeflate(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The codec will use the maximum window bits (15) and the specified
|
||||
/// CompressionLevel. It will emit a ZLIB stream as it compresses.
|
||||
/// </remarks>
|
||||
/// <param name="level">The compression level for the codec.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int InitializeDeflate(CompressionLevel level)
|
||||
{
|
||||
this.CompressLevel = level;
|
||||
return _InternalInitializeDeflate(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel,
|
||||
/// and the explicit flag governing whether to emit an RFC1950 header byte pair.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The codec will use the maximum window bits (15) and the specified CompressionLevel.
|
||||
/// If you want to generate a zlib stream, you should specify true for
|
||||
/// wantRfc1950Header. In this case, the library will emit a ZLIB
|
||||
/// header, as defined in <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
|
||||
/// 1950</see>, in the compressed stream.
|
||||
/// </remarks>
|
||||
/// <param name="level">The compression level for the codec.</param>
|
||||
/// <param name="wantRfc1950Header">whether to emit an initial RFC1950 byte pair in the compressed stream.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int InitializeDeflate(CompressionLevel level, bool wantRfc1950Header)
|
||||
{
|
||||
this.CompressLevel = level;
|
||||
return _InternalInitializeDeflate(wantRfc1950Header);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for deflation operation, using the specified CompressionLevel,
|
||||
/// and the specified number of window bits.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The codec will use the specified number of window bits and the specified CompressionLevel.
|
||||
/// </remarks>
|
||||
/// <param name="level">The compression level for the codec.</param>
|
||||
/// <param name="bits">the number of window bits to use. If you don't know what this means, don't use this method.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int InitializeDeflate(CompressionLevel level, int bits)
|
||||
{
|
||||
this.CompressLevel = level;
|
||||
this.WindowBits = bits;
|
||||
return _InternalInitializeDeflate(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize the ZlibCodec for deflation operation, using the specified
|
||||
/// CompressionLevel, the specified number of window bits, and the explicit flag
|
||||
/// governing whether to emit an RFC1950 header byte pair.
|
||||
/// </summary>
|
||||
///
|
||||
/// <param name="level">The compression level for the codec.</param>
|
||||
/// <param name="wantRfc1950Header">whether to emit an initial RFC1950 byte pair in the compressed stream.</param>
|
||||
/// <param name="bits">the number of window bits to use. If you don't know what this means, don't use this method.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int InitializeDeflate(CompressionLevel level, int bits, bool wantRfc1950Header)
|
||||
{
|
||||
this.CompressLevel = level;
|
||||
this.WindowBits = bits;
|
||||
return _InternalInitializeDeflate(wantRfc1950Header);
|
||||
}
|
||||
|
||||
private int _InternalInitializeDeflate(bool wantRfc1950Header)
|
||||
{
|
||||
if (istate != null)
|
||||
{
|
||||
throw new ZlibException("You may not call InitializeDeflate() after calling InitializeInflate().");
|
||||
}
|
||||
dstate = new DeflateManager();
|
||||
dstate.WantRfc1950HeaderBytes = wantRfc1950Header;
|
||||
|
||||
return dstate.Initialize(this, this.CompressLevel, this.WindowBits, this.Strategy);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deflate one batch of data.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// You must have set InputBuffer and OutputBuffer before calling this method.
|
||||
/// </remarks>
|
||||
/// <example>
|
||||
/// <code>
|
||||
/// private void DeflateBuffer(CompressionLevel level)
|
||||
/// {
|
||||
/// int bufferSize = 1024;
|
||||
/// byte[] buffer = new byte[bufferSize];
|
||||
/// ZlibCodec compressor = new ZlibCodec();
|
||||
///
|
||||
/// Console.WriteLine("\n============================================");
|
||||
/// Console.WriteLine("Size of Buffer to Deflate: {0} bytes.", UncompressedBytes.Length);
|
||||
/// MemoryStream ms = new MemoryStream();
|
||||
///
|
||||
/// int rc = compressor.InitializeDeflate(level);
|
||||
///
|
||||
/// compressor.InputBuffer = UncompressedBytes;
|
||||
/// compressor.NextIn = 0;
|
||||
/// compressor.AvailableBytesIn = UncompressedBytes.Length;
|
||||
///
|
||||
/// compressor.OutputBuffer = buffer;
|
||||
///
|
||||
/// // pass 1: deflate
|
||||
/// do
|
||||
/// {
|
||||
/// compressor.NextOut = 0;
|
||||
/// compressor.AvailableBytesOut = buffer.Length;
|
||||
/// rc = compressor.Deflate(FlushType.None);
|
||||
///
|
||||
/// if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
/// throw new Exception("deflating: " + compressor.Message);
|
||||
///
|
||||
/// ms.Write(compressor.OutputBuffer, 0, buffer.Length - compressor.AvailableBytesOut);
|
||||
/// }
|
||||
/// while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0);
|
||||
///
|
||||
/// // pass 2: finish and flush
|
||||
/// do
|
||||
/// {
|
||||
/// compressor.NextOut = 0;
|
||||
/// compressor.AvailableBytesOut = buffer.Length;
|
||||
/// rc = compressor.Deflate(FlushType.Finish);
|
||||
///
|
||||
/// if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
|
||||
/// throw new Exception("deflating: " + compressor.Message);
|
||||
///
|
||||
/// if (buffer.Length - compressor.AvailableBytesOut > 0)
|
||||
/// ms.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut);
|
||||
/// }
|
||||
/// while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0);
|
||||
///
|
||||
/// compressor.EndDeflate();
|
||||
///
|
||||
/// ms.Seek(0, SeekOrigin.Begin);
|
||||
/// CompressedBytes = new byte[compressor.TotalBytesOut];
|
||||
/// ms.Read(CompressedBytes, 0, CompressedBytes.Length);
|
||||
/// }
|
||||
/// </code>
|
||||
/// </example>
|
||||
/// <param name="flush">whether to flush all data as you deflate. Generally you will want to
|
||||
/// use Z_NO_FLUSH here, in a series of calls to Deflate(), and then call EndDeflate() to
|
||||
/// flush everything.
|
||||
/// </param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int Deflate(FlushType flush)
|
||||
{
|
||||
if (dstate == null)
|
||||
{
|
||||
throw new ZlibException("No Deflate State!");
|
||||
}
|
||||
return dstate.Deflate(flush);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// End a deflation session.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Call this after making a series of one or more calls to Deflate(). All buffers are flushed.
|
||||
/// </remarks>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int EndDeflate()
|
||||
{
|
||||
if (dstate == null)
|
||||
{
|
||||
throw new ZlibException("No Deflate State!");
|
||||
}
|
||||
// TODO: dinoch Tue, 03 Nov 2009 15:39 (test this)
|
||||
//int ret = dstate.End();
|
||||
dstate = null;
|
||||
return ZlibConstants.Z_OK; //ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reset a codec for another deflation session.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Call this to reset the deflation state. For example if a thread is deflating
|
||||
/// non-consecutive blocks, you can call Reset() after the Deflate(Sync) of the first
|
||||
/// block and before the next Deflate(None) of the second block.
|
||||
/// </remarks>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public void ResetDeflate()
|
||||
{
|
||||
if (dstate == null)
|
||||
{
|
||||
throw new ZlibException("No Deflate State!");
|
||||
}
|
||||
dstate.Reset();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set the CompressionStrategy and CompressionLevel for a deflation session.
|
||||
/// </summary>
|
||||
/// <param name="level">the level of compression to use.</param>
|
||||
/// <param name="strategy">the strategy to use for compression.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int SetDeflateParams(CompressionLevel level, CompressionStrategy strategy)
|
||||
{
|
||||
if (dstate == null)
|
||||
{
|
||||
throw new ZlibException("No Deflate State!");
|
||||
}
|
||||
return dstate.SetParams(level, strategy);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set the dictionary to be used for either Inflation or Deflation.
|
||||
/// </summary>
|
||||
/// <param name="dictionary">The dictionary bytes to use.</param>
|
||||
/// <returns>Z_OK if all goes well.</returns>
|
||||
public int SetDictionary(byte[] dictionary)
|
||||
{
|
||||
if (istate != null)
|
||||
{
|
||||
return istate.SetDictionary(dictionary);
|
||||
}
|
||||
|
||||
if (dstate != null)
|
||||
{
|
||||
return dstate.SetDictionary(dictionary);
|
||||
}
|
||||
|
||||
throw new ZlibException("No Inflate or Deflate state!");
|
||||
}
|
||||
|
||||
// Flush as much pending output as possible. All deflate() output goes
|
||||
// through this function so some applications may wish to modify it
|
||||
// to avoid allocating a large strm->next_out buffer and copying into it.
|
||||
// (See also read_buf()).
|
||||
internal void flush_pending()
|
||||
{
|
||||
int len = dstate.pendingCount;
|
||||
|
||||
if (len > AvailableBytesOut)
|
||||
{
|
||||
len = AvailableBytesOut;
|
||||
}
|
||||
if (len == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (dstate.pending.Length <= dstate.nextPending ||
|
||||
OutputBuffer.Length <= NextOut ||
|
||||
dstate.pending.Length < (dstate.nextPending + len) ||
|
||||
OutputBuffer.Length < (NextOut + len))
|
||||
{
|
||||
throw new ZlibException(String.Format("Invalid State. (pending.Length={0}, pendingCount={1})",
|
||||
dstate.pending.Length, dstate.pendingCount));
|
||||
}
|
||||
|
||||
Array.Copy(dstate.pending, dstate.nextPending, OutputBuffer, NextOut, len);
|
||||
|
||||
NextOut += len;
|
||||
dstate.nextPending += len;
|
||||
TotalBytesOut += len;
|
||||
AvailableBytesOut -= len;
|
||||
dstate.pendingCount -= len;
|
||||
if (dstate.pendingCount == 0)
|
||||
{
|
||||
dstate.nextPending = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Read a new buffer from the current input stream, update the adler32
|
||||
// and total number of bytes read. All deflate() input goes through
|
||||
// this function so some applications may wish to modify it to avoid
|
||||
// allocating a large strm->next_in buffer and copying from it.
|
||||
// (See also flush_pending()).
|
||||
internal int read_buf(byte[] buf, int start, int size)
|
||||
{
|
||||
int len = AvailableBytesIn;
|
||||
|
||||
if (len > size)
|
||||
{
|
||||
len = size;
|
||||
}
|
||||
if (len == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
AvailableBytesIn -= len;
|
||||
|
||||
if (dstate.WantRfc1950HeaderBytes)
|
||||
{
|
||||
_Adler32 = Adler.Adler32(_Adler32, InputBuffer, NextIn, len);
|
||||
}
|
||||
Array.Copy(InputBuffer, NextIn, buf, start, len);
|
||||
NextIn += len;
|
||||
TotalBytesIn += len;
|
||||
return len;
|
||||
}
|
||||
}
|
||||
}
|
||||
124
SabreTools.Library/External/Zlib/ZlibConstants.cs
vendored
Normal file
124
SabreTools.Library/External/Zlib/ZlibConstants.cs
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
// ZlibConstants.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2009-November-03 18:50:19>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines constants used by the zlib class library. This
|
||||
// code is derived from the jzlib implementation of zlib, but
|
||||
// significantly modified. In keeping with the license for jzlib, the
|
||||
// copyright to that code is included here.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in
|
||||
// the documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// 3. The names of the authors may not be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
// INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
//
|
||||
// This program is based on zlib-1.1.3; credit to authors
|
||||
// Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
|
||||
// and contributors of zlib.
|
||||
//
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// A bunch of constants used in the Zlib interface.
|
||||
/// </summary>
|
||||
public static class ZlibConstants
|
||||
{
|
||||
/// <summary>
|
||||
/// The maximum number of window bits for the Deflate algorithm.
|
||||
/// </summary>
|
||||
public const int WindowBitsMax = 15; // 32K LZ77 window
|
||||
|
||||
/// <summary>
|
||||
/// The default number of window bits for the Deflate algorithm.
|
||||
/// </summary>
|
||||
public const int WindowBitsDefault = WindowBitsMax;
|
||||
|
||||
/// <summary>
|
||||
/// indicates everything is A-OK
|
||||
/// </summary>
|
||||
public const int Z_OK = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Indicates that the last operation reached the end of the stream.
|
||||
/// </summary>
|
||||
public const int Z_STREAM_END = 1;
|
||||
|
||||
/// <summary>
|
||||
/// The operation ended in need of a dictionary.
|
||||
/// </summary>
|
||||
public const int Z_NEED_DICT = 2;
|
||||
|
||||
/// <summary>
|
||||
/// There was an error with the stream - not enough data, not open and readable, etc.
|
||||
/// </summary>
|
||||
public const int Z_STREAM_ERROR = -2;
|
||||
|
||||
/// <summary>
|
||||
/// There was an error with the data - not enough data, bad data, etc.
|
||||
/// </summary>
|
||||
public const int Z_DATA_ERROR = -3;
|
||||
|
||||
/// <summary>
|
||||
/// There was an error with the working buffer.
|
||||
/// </summary>
|
||||
public const int Z_BUF_ERROR = -5;
|
||||
|
||||
/// <summary>
|
||||
/// The size of the working buffer used in the ZlibCodec class. Defaults to 8192 bytes.
|
||||
/// </summary>
|
||||
#if NETCF
|
||||
public const int WorkingBufferSizeDefault = 8192;
|
||||
#else
|
||||
public const int WorkingBufferSizeDefault = 16384;
|
||||
#endif
|
||||
/// <summary>
|
||||
/// The minimum size of the working buffer used in the ZlibCodec class. Currently it is 128 bytes.
|
||||
/// </summary>
|
||||
public const int WorkingBufferSizeMin = 1024;
|
||||
}
|
||||
}
|
||||
|
||||
748
SabreTools.Library/External/Zlib/ZlibStream.cs
vendored
Normal file
748
SabreTools.Library/External/Zlib/ZlibStream.cs
vendored
Normal file
@@ -0,0 +1,748 @@
|
||||
// ZlibStream.cs
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
|
||||
// All rights reserved.
|
||||
//
|
||||
// This code module is part of DotNetZip, a zipfile class library.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This code is licensed under the Microsoft Public License.
|
||||
// See the file License.txt for the license details.
|
||||
// More info on: http://dotnetzip.codeplex.com
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// last saved (in emacs):
|
||||
// Time-stamp: <2011-July-31 14:53:33>
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
//
|
||||
// This module defines the ZlibStream class, which is similar in idea to
|
||||
// the System.IO.Compression.DeflateStream and
|
||||
// System.IO.Compression.GZipStream classes in the .NET BCL.
|
||||
//
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Ionic.Zlib
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a Zlib stream for compression or decompression.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// The ZlibStream is a <see
|
||||
/// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see
|
||||
/// cref="System.IO.Stream"/>. It adds ZLIB compression or decompression to any
|
||||
/// stream.
|
||||
/// </para>
|
||||
///
|
||||
/// <para> Using this stream, applications can compress or decompress data via
|
||||
/// stream <c>Read()</c> and <c>Write()</c> operations. Either compresssion or
|
||||
/// decompression can occur through either reading or writing. The compression
|
||||
/// format used is ZLIB, which is documented in <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1950.txt">IETF RFC 1950</see>, "ZLIB Compressed
|
||||
/// Data Format Specification version 3.3". This implementation of ZLIB always uses
|
||||
/// DEFLATE as the compression method. (see <see
|
||||
/// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE
|
||||
/// Compressed Data Format Specification version 1.3.") </para>
|
||||
///
|
||||
/// <para>
|
||||
/// The ZLIB format allows for varying compression methods, window sizes, and dictionaries.
|
||||
/// This implementation always uses the DEFLATE compression method, a preset dictionary,
|
||||
/// and 15 window bits by default.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// This class is similar to <see cref="DeflateStream"/>, except that it adds the
|
||||
/// RFC1950 header and trailer bytes to a compressed stream when compressing, or expects
|
||||
/// the RFC1950 header and trailer bytes when decompressing. It is also similar to the
|
||||
/// <see cref="GZipStream"/>.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <seealso cref="DeflateStream" />
|
||||
/// <seealso cref="GZipStream" />
|
||||
public class ZlibStream : System.IO.Stream
|
||||
{
|
||||
internal ZlibBaseStream _baseStream;
|
||||
bool _disposed;
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c>
|
||||
/// will use the default compression level. The "captive" stream will be
|
||||
/// closed when the <c>ZlibStream</c> is closed.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
/// This example uses a <c>ZlibStream</c> to compress a file, and writes the
|
||||
/// compressed data to another file.
|
||||
/// <code>
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
|
||||
/// {
|
||||
/// using (Stream compressor = new ZlibStream(raw, CompressionMode.Compress))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n;
|
||||
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
|
||||
/// {
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// </code>
|
||||
/// <code lang="VB">
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
|
||||
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
///
|
||||
/// <param name="stream">The stream which will be read or written.</param>
|
||||
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
|
||||
public ZlibStream(System.IO.Stream stream, CompressionMode mode)
|
||||
: this(stream, mode, CompressionLevel.Default, false)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and
|
||||
/// the specified <c>CompressionLevel</c>.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
|
||||
/// The "captive" stream will be closed when the <c>ZlibStream</c> is closed.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
/// This example uses a <c>ZlibStream</c> to compress data from a file, and writes the
|
||||
/// compressed data to another file.
|
||||
///
|
||||
/// <code>
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
|
||||
/// {
|
||||
/// using (Stream compressor = new ZlibStream(raw,
|
||||
/// CompressionMode.Compress,
|
||||
/// CompressionLevel.BestCompression))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n;
|
||||
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
|
||||
/// {
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// </code>
|
||||
///
|
||||
/// <code lang="VB">
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
|
||||
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
///
|
||||
/// <param name="stream">The stream to be read or written while deflating or inflating.</param>
|
||||
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
|
||||
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
|
||||
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level)
|
||||
: this(stream, mode, level, false)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>, and
|
||||
/// explicitly specify whether the captive stream should be left open after
|
||||
/// Deflation or Inflation.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use
|
||||
/// the default compression level.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// This constructor allows the application to request that the captive stream
|
||||
/// remain open after the deflation or inflation occurs. By default, after
|
||||
/// <c>Close()</c> is called on the stream, the captive stream is also
|
||||
/// closed. In some cases this is not desired, for example if the stream is a
|
||||
/// <see cref="System.IO.MemoryStream"/> that will be re-read after
|
||||
/// compression. Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream
|
||||
/// open.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// See the other overloads of this constructor for example code.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="stream">The stream which will be read or written. This is called the
|
||||
/// "captive" stream in other places in this documentation.</param>
|
||||
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
|
||||
/// <param name="leaveOpen">true if the application would like the stream to remain
|
||||
/// open after inflation/deflation.</param>
|
||||
public ZlibStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen)
|
||||
: this(stream, mode, CompressionLevel.Default, leaveOpen)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>
|
||||
/// and the specified <c>CompressionLevel</c>, and explicitly specify
|
||||
/// whether the stream should be left open after Deflation or Inflation.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// This constructor allows the application to request that the captive
|
||||
/// stream remain open after the deflation or inflation occurs. By
|
||||
/// default, after <c>Close()</c> is called on the stream, the captive
|
||||
/// stream is also closed. In some cases this is not desired, for example
|
||||
/// if the stream is a <see cref="System.IO.MemoryStream"/> that will be
|
||||
/// re-read after compression. Specify true for the <paramref
|
||||
/// name="leaveOpen"/> parameter to leave the stream open.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is
|
||||
/// ignored.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <example>
|
||||
///
|
||||
/// This example shows how to use a ZlibStream to compress the data from a file,
|
||||
/// and store the result into another file. The filestream remains open to allow
|
||||
/// additional data to be written to it.
|
||||
///
|
||||
/// <code>
|
||||
/// using (var output = System.IO.File.Create(fileToCompress + ".zlib"))
|
||||
/// {
|
||||
/// using (System.IO.Stream input = System.IO.File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
/// {
|
||||
/// using (Stream compressor = new ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true))
|
||||
/// {
|
||||
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
|
||||
/// int n;
|
||||
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
|
||||
/// {
|
||||
/// compressor.Write(buffer, 0, n);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// // can write additional data to the output stream here
|
||||
/// }
|
||||
/// </code>
|
||||
/// <code lang="VB">
|
||||
/// Using output As FileStream = File.Create(fileToCompress & ".zlib")
|
||||
/// Using input As Stream = File.Open(fileToCompress, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)
|
||||
/// Using compressor As Stream = New ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True)
|
||||
/// Dim buffer As Byte() = New Byte(4096) {}
|
||||
/// Dim n As Integer = -1
|
||||
/// Do While (n <> 0)
|
||||
/// If (n > 0) Then
|
||||
/// compressor.Write(buffer, 0, n)
|
||||
/// End If
|
||||
/// n = input.Read(buffer, 0, buffer.Length)
|
||||
/// Loop
|
||||
/// End Using
|
||||
/// End Using
|
||||
/// ' can write additional data to the output stream here.
|
||||
/// End Using
|
||||
/// </code>
|
||||
/// </example>
|
||||
///
|
||||
/// <param name="stream">The stream which will be read or written.</param>
|
||||
///
|
||||
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
|
||||
///
|
||||
/// <param name="leaveOpen">
|
||||
/// true if the application would like the stream to remain open after
|
||||
/// inflation/deflation.
|
||||
/// </param>
|
||||
///
|
||||
/// <param name="level">
|
||||
/// A tuning knob to trade speed for effectiveness. This parameter is
|
||||
/// effective only when mode is <c>CompressionMode.Compress</c>.
|
||||
/// </param>
|
||||
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
|
||||
{
|
||||
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen);
|
||||
}
|
||||
|
||||
#region Zlib properties
|
||||
|
||||
/// <summary>
|
||||
/// This property sets the flush behavior on the stream.
|
||||
/// Sorry, though, not sure exactly how to describe all the various settings.
|
||||
/// </summary>
|
||||
virtual public FlushType FlushMode
|
||||
{
|
||||
get { return (this._baseStream._flushMode); }
|
||||
set
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
this._baseStream._flushMode = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The size of the working buffer for the compression codec.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// The working buffer is used for all stream operations. The default size is
|
||||
/// 1024 bytes. The minimum size is 128 bytes. You may get better performance
|
||||
/// with a larger buffer. Then again, you might not. You would have to test
|
||||
/// it.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the
|
||||
/// stream. If you try to set it afterwards, it will throw.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public int BufferSize
|
||||
{
|
||||
get
|
||||
{
|
||||
return this._baseStream._bufferSize;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
if (this._baseStream._workingBuffer != null)
|
||||
{
|
||||
throw new ZlibException("The working buffer is already set.");
|
||||
}
|
||||
if (value < ZlibConstants.WorkingBufferSizeMin)
|
||||
{
|
||||
throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin));
|
||||
}
|
||||
this._baseStream._bufferSize = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary> Returns the total number of bytes input so far.</summary>
|
||||
virtual public long TotalIn
|
||||
{
|
||||
get { return this._baseStream._z.TotalBytesIn; }
|
||||
}
|
||||
|
||||
/// <summary> Returns the total number of bytes output so far.</summary>
|
||||
virtual public long TotalOut
|
||||
{
|
||||
get { return this._baseStream._z.TotalBytesOut; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region System.IO.Stream methods
|
||||
|
||||
/// <summary>
|
||||
/// Dispose the stream.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This may or may not result in a <c>Close()</c> call on the captive
|
||||
/// stream. See the constructors that have a <c>leaveOpen</c> parameter
|
||||
/// for more information.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// This method may be invoked in two distinct scenarios. If disposing
|
||||
/// == true, the method has been called directly or indirectly by a
|
||||
/// user's code, for example via the public Dispose() method. In this
|
||||
/// case, both managed and unmanaged resources can be referenced and
|
||||
/// disposed. If disposing == false, the method has been called by the
|
||||
/// runtime from inside the object finalizer and this method should not
|
||||
/// reference other objects; in that case only unmanaged resources must
|
||||
/// be referenced or disposed.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="disposing">
|
||||
/// indicates whether the Dispose method was invoked by user code.
|
||||
/// </param>
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (disposing && (this._baseStream != null))
|
||||
{
|
||||
this._baseStream.Close();
|
||||
}
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream can be read.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The return value depends on whether the captive stream supports reading.
|
||||
/// </remarks>
|
||||
public override bool CanRead
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
return _baseStream._stream.CanRead;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream supports Seek operations.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Always returns false.
|
||||
/// </remarks>
|
||||
public override bool CanSeek
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the stream can be written.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The return value depends on whether the captive stream supports writing.
|
||||
/// </remarks>
|
||||
public override bool CanWrite
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
return _baseStream._stream.CanWrite;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Flush the stream.
|
||||
/// </summary>
|
||||
public override void Flush()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
_baseStream.Flush();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reading this property always throws a <see cref="NotSupportedException"/>.
|
||||
/// </summary>
|
||||
public override long Length
|
||||
{
|
||||
get { throw new NotSupportedException(); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The position of the stream pointer.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Setting this property always throws a <see
|
||||
/// cref="NotSupportedException"/>. Reading will return the total bytes
|
||||
/// written out, if used in writing, or the total bytes read in, if used in
|
||||
/// reading. The count may refer to compressed bytes or uncompressed bytes,
|
||||
/// depending on how you've used the stream.
|
||||
/// </remarks>
|
||||
public override long Position
|
||||
{
|
||||
get
|
||||
{
|
||||
if (this._baseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
|
||||
{
|
||||
return this._baseStream._z.TotalBytesOut;
|
||||
}
|
||||
if (this._baseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
|
||||
{
|
||||
return this._baseStream._z.TotalBytesIn;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
set { throw new NotSupportedException(); }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read data from the stream.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// If you wish to use the <c>ZlibStream</c> to compress data while reading,
|
||||
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
|
||||
/// providing an uncompressed data stream. Then call <c>Read()</c> on that
|
||||
/// <c>ZlibStream</c>, and the data read will be compressed. If you wish to
|
||||
/// use the <c>ZlibStream</c> to decompress data while reading, you can create
|
||||
/// a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, providing a
|
||||
/// readable compressed data stream. Then call <c>Read()</c> on that
|
||||
/// <c>ZlibStream</c>, and the data will be decompressed as it is read.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but
|
||||
/// not both.
|
||||
/// </para>
|
||||
///
|
||||
/// </remarks>
|
||||
///
|
||||
/// <param name="buffer">
|
||||
/// The buffer into which the read data should be placed.</param>
|
||||
///
|
||||
/// <param name="offset">
|
||||
/// the offset within that data array to put the first byte read.</param>
|
||||
///
|
||||
/// <param name="count">the number of bytes to read.</param>
|
||||
///
|
||||
/// <returns>the number of bytes read</returns>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
return _baseStream.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calling this method always throws a <see cref="NotSupportedException"/>.
|
||||
/// </summary>
|
||||
/// <param name="offset">
|
||||
/// The offset to seek to....
|
||||
/// IF THIS METHOD ACTUALLY DID ANYTHING.
|
||||
/// </param>
|
||||
/// <param name="origin">
|
||||
/// The reference specifying how to apply the offset.... IF
|
||||
/// THIS METHOD ACTUALLY DID ANYTHING.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>nothing. This method always throws.</returns>
|
||||
public override long Seek(long offset, System.IO.SeekOrigin origin)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calling this method always throws a <see cref="NotSupportedException"/>.
|
||||
/// </summary>
|
||||
/// <param name="value">
|
||||
/// The new value for the stream length.... IF
|
||||
/// THIS METHOD ACTUALLY DID ANYTHING.
|
||||
/// </param>
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write data to the stream.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
///
|
||||
/// <para>
|
||||
/// If you wish to use the <c>ZlibStream</c> to compress data while writing,
|
||||
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
|
||||
/// and a writable output stream. Then call <c>Write()</c> on that
|
||||
/// <c>ZlibStream</c>, providing uncompressed data as input. The data sent to
|
||||
/// the output stream will be the compressed form of the data written. If you
|
||||
/// wish to use the <c>ZlibStream</c> to decompress data while writing, you
|
||||
/// can create a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, and a
|
||||
/// writable output stream. Then call <c>Write()</c> on that stream,
|
||||
/// providing previously compressed data. The data sent to the output stream
|
||||
/// will be the decompressed form of the data written.
|
||||
/// </para>
|
||||
///
|
||||
/// <para>
|
||||
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="buffer">The buffer holding data to write to the stream.</param>
|
||||
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
|
||||
/// <param name="count">the number of bytes to write.</param>
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException("ZlibStream");
|
||||
}
|
||||
_baseStream.Write(buffer, offset, count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Compress a string into a byte array using ZLIB.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Uncompress it with <see cref="ZlibStream.UncompressString(byte[])"/>.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
|
||||
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
|
||||
/// <seealso cref="GZipStream.CompressString(string)"/>
|
||||
///
|
||||
/// <param name="s">
|
||||
/// A string to compress. The string will first be encoded
|
||||
/// using UTF8, then compressed.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The string in compressed form</returns>
|
||||
public static byte[] CompressString(String s)
|
||||
{
|
||||
using (var ms = new MemoryStream())
|
||||
{
|
||||
Stream compressor =
|
||||
new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
|
||||
ZlibBaseStream.CompressString(s, compressor);
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compress a byte array into a new byte array using ZLIB.
|
||||
/// </summary>
|
||||
///
|
||||
/// <remarks>
|
||||
/// Uncompress it with <see cref="ZlibStream.UncompressBuffer(byte[])"/>.
|
||||
/// </remarks>
|
||||
///
|
||||
/// <seealso cref="ZlibStream.CompressString(string)"/>
|
||||
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
|
||||
///
|
||||
/// <param name="b">
|
||||
/// A buffer to compress.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The data in compressed form</returns>
|
||||
public static byte[] CompressBuffer(byte[] b)
|
||||
{
|
||||
using (var ms = new MemoryStream())
|
||||
{
|
||||
Stream compressor =
|
||||
new ZlibStream( ms, CompressionMode.Compress, CompressionLevel.BestCompression );
|
||||
|
||||
ZlibBaseStream.CompressBuffer(b, compressor);
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Uncompress a ZLIB-compressed byte array into a single string.
|
||||
/// </summary>
|
||||
///
|
||||
/// <seealso cref="ZlibStream.CompressString(String)"/>
|
||||
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
|
||||
///
|
||||
/// <param name="compressed">
|
||||
/// A buffer containing ZLIB-compressed data.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The uncompressed string</returns>
|
||||
public static String UncompressString(byte[] compressed)
|
||||
{
|
||||
using (var input = new MemoryStream(compressed))
|
||||
{
|
||||
Stream decompressor =
|
||||
new ZlibStream(input, CompressionMode.Decompress);
|
||||
|
||||
return ZlibBaseStream.UncompressString(compressed, decompressor);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Uncompress a ZLIB-compressed byte array into a byte array.
|
||||
/// </summary>
|
||||
///
|
||||
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
|
||||
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
|
||||
///
|
||||
/// <param name="compressed">
|
||||
/// A buffer containing ZLIB-compressed data.
|
||||
/// </param>
|
||||
///
|
||||
/// <returns>The data in uncompressed form</returns>
|
||||
public static byte[] UncompressBuffer(byte[] compressed)
|
||||
{
|
||||
using (var input = new MemoryStream(compressed))
|
||||
{
|
||||
Stream decompressor =
|
||||
new ZlibStream( input, CompressionMode.Decompress );
|
||||
|
||||
return ZlibBaseStream.UncompressBuffer(compressed, decompressor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
255
SabreTools.Library/External/xxHash/xxHash.cs
vendored
Normal file
255
SabreTools.Library/External/xxHash/xxHash.cs
vendored
Normal file
@@ -0,0 +1,255 @@
|
||||
/*
|
||||
xxHashSharp - A pure C# implementation of xxhash
|
||||
Copyright (C) 2014, Seok-Ju, Yun. (https://github.com/noricube/xxHashSharp)
|
||||
Original C Implementation Copyright (C) 2012-2014, Yann Collet. (https://code.google.com/p/xxhash/)
|
||||
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SabreTools.Helper.External
|
||||
{
|
||||
class xxHash
|
||||
{
|
||||
public struct XXH_State
|
||||
{
|
||||
public ulong total_len;
|
||||
public uint seed;
|
||||
public uint v1;
|
||||
public uint v2;
|
||||
public uint v3;
|
||||
public uint v4;
|
||||
public int memsize;
|
||||
public byte[] memory;
|
||||
};
|
||||
|
||||
const uint PRIME32_1 = 2654435761U;
|
||||
const uint PRIME32_2 = 2246822519U;
|
||||
const uint PRIME32_3 = 3266489917U;
|
||||
const uint PRIME32_4 = 668265263U;
|
||||
const uint PRIME32_5 = 374761393U;
|
||||
|
||||
protected XXH_State _state;
|
||||
public xxHash()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public static uint CalculateHash(byte[] buf, int len = -1, uint seed = 0)
|
||||
{
|
||||
uint h32;
|
||||
int index = 0;
|
||||
if (len == -1)
|
||||
{
|
||||
len = buf.Length;
|
||||
}
|
||||
|
||||
|
||||
if (len >= 16)
|
||||
{
|
||||
int limit = len - 16;
|
||||
uint v1 = seed + PRIME32_1 + PRIME32_2;
|
||||
uint v2 = seed + PRIME32_2;
|
||||
uint v3 = seed + 0;
|
||||
uint v4 = seed - PRIME32_1;
|
||||
|
||||
do
|
||||
{
|
||||
v1 = CalcSubHash(v1, buf, index);
|
||||
index += 4;
|
||||
v2 = CalcSubHash(v2, buf, index);
|
||||
index += 4;
|
||||
v3 = CalcSubHash(v3, buf, index);
|
||||
index += 4;
|
||||
v4 = CalcSubHash(v4, buf, index);
|
||||
index += 4;
|
||||
} while (index <= limit);
|
||||
|
||||
h32 = RotateLeft(v1, 1) + RotateLeft(v2, 7) + RotateLeft(v3, 12) + RotateLeft(v4, 18);
|
||||
}
|
||||
else
|
||||
{
|
||||
h32 = seed + PRIME32_5;
|
||||
}
|
||||
|
||||
h32 += (uint)len;
|
||||
|
||||
while (index <= len - 4)
|
||||
{
|
||||
h32 += BitConverter.ToUInt32(buf, index) * PRIME32_3;
|
||||
h32 = RotateLeft(h32, 17) * PRIME32_4;
|
||||
index += 4;
|
||||
}
|
||||
|
||||
while (index < len)
|
||||
{
|
||||
h32 += buf[index] * PRIME32_5;
|
||||
h32 = RotateLeft(h32, 11) * PRIME32_1;
|
||||
index++;
|
||||
}
|
||||
|
||||
h32 ^= h32 >> 15;
|
||||
h32 *= PRIME32_2;
|
||||
h32 ^= h32 >> 13;
|
||||
h32 *= PRIME32_3;
|
||||
h32 ^= h32 >> 16;
|
||||
|
||||
return h32;
|
||||
}
|
||||
|
||||
public void Init(uint seed = 0)
|
||||
{
|
||||
_state.seed = seed;
|
||||
_state.v1 = seed + PRIME32_1 + PRIME32_2;
|
||||
_state.v2 = seed + PRIME32_2;
|
||||
_state.v3 = seed + 0;
|
||||
_state.v4 = seed - PRIME32_1;
|
||||
_state.total_len = 0;
|
||||
_state.memsize = 0;
|
||||
_state.memory = new byte[16];
|
||||
}
|
||||
|
||||
public bool Update(byte[] input, int len)
|
||||
{
|
||||
int index = 0;
|
||||
|
||||
_state.total_len += (uint)len;
|
||||
|
||||
if (_state.memsize + len < 16) // 버퍼 + 입력길이가 16바이트 이하일경우 버퍼에 저장만 해둔다
|
||||
{
|
||||
Array.Copy(input, 0, _state.memory, _state.memsize, len);
|
||||
_state.memsize += len;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (_state.memsize > 0) // 이전데이터가 남아있을경우 먼저 처리한다.
|
||||
{
|
||||
Array.Copy(input, 0, _state.memory, _state.memsize, 16 - _state.memsize);
|
||||
|
||||
_state.v1 = CalcSubHash(_state.v1, _state.memory, index);
|
||||
index += 4;
|
||||
_state.v2 = CalcSubHash(_state.v2, _state.memory, index);
|
||||
index += 4;
|
||||
_state.v3 = CalcSubHash(_state.v3, _state.memory, index);
|
||||
index += 4;
|
||||
_state.v4 = CalcSubHash(_state.v4, _state.memory, index);
|
||||
index += 4;
|
||||
|
||||
index = 0;
|
||||
_state.memsize = 0;
|
||||
}
|
||||
|
||||
if (index <= len - 16)
|
||||
{
|
||||
int limit = len - 16;
|
||||
uint v1 = _state.v1;
|
||||
uint v2 = _state.v2;
|
||||
uint v3 = _state.v3;
|
||||
uint v4 = _state.v4;
|
||||
|
||||
do
|
||||
{
|
||||
v1 = CalcSubHash(v1, input, index);
|
||||
index += 4;
|
||||
v2 = CalcSubHash(v2, input, index);
|
||||
index += 4;
|
||||
v3 = CalcSubHash(v3, input, index);
|
||||
index += 4;
|
||||
v4 = CalcSubHash(v4, input, index);
|
||||
index += 4;
|
||||
} while (index <= limit);
|
||||
|
||||
_state.v1 = v1;
|
||||
_state.v2 = v2;
|
||||
_state.v3 = v3;
|
||||
_state.v4 = v4;
|
||||
}
|
||||
|
||||
if (index < len)
|
||||
{
|
||||
Array.Copy(input, index, _state.memory, 0, len - index);
|
||||
_state.memsize = len - index;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public uint Digest()
|
||||
{
|
||||
uint h32;
|
||||
int index = 0;
|
||||
if (_state.total_len >= 16)
|
||||
{
|
||||
h32 = RotateLeft(_state.v1, 1) + RotateLeft(_state.v2, 7) + RotateLeft(_state.v3, 12) + RotateLeft(_state.v4, 18);
|
||||
}
|
||||
else
|
||||
{
|
||||
h32 = _state.seed + PRIME32_5;
|
||||
}
|
||||
|
||||
h32 += (UInt32)_state.total_len;
|
||||
|
||||
while (index <= _state.memsize - 4)
|
||||
{
|
||||
h32 += BitConverter.ToUInt32(_state.memory, index) * PRIME32_3;
|
||||
h32 = RotateLeft(h32, 17) * PRIME32_4;
|
||||
index += 4;
|
||||
}
|
||||
|
||||
while (index < _state.memsize)
|
||||
{
|
||||
h32 += _state.memory[index] * PRIME32_5;
|
||||
h32 = RotateLeft(h32, 11) * PRIME32_1;
|
||||
index++;
|
||||
}
|
||||
|
||||
h32 ^= h32 >> 15;
|
||||
h32 *= PRIME32_2;
|
||||
h32 ^= h32 >> 13;
|
||||
h32 *= PRIME32_3;
|
||||
h32 ^= h32 >> 16;
|
||||
|
||||
return h32;
|
||||
}
|
||||
private static uint CalcSubHash(uint value, byte[] buf, int index)
|
||||
{
|
||||
uint read_value = BitConverter.ToUInt32(buf, index);
|
||||
value += read_value * PRIME32_2;
|
||||
value = RotateLeft(value, 13);
|
||||
value *= PRIME32_1;
|
||||
return value;
|
||||
}
|
||||
|
||||
private static uint RotateLeft(uint value, int count)
|
||||
{
|
||||
return (value << count) | (value >> (32 - count));
|
||||
}
|
||||
}
|
||||
}
|
||||
386
SabreTools.Library/Help/Feature.cs
Normal file
386
SabreTools.Library/Help/Feature.cs
Normal file
@@ -0,0 +1,386 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
namespace SabreTools.Helper.Help
|
||||
{
|
||||
public class Feature
|
||||
{
|
||||
#region Private variables
|
||||
|
||||
private List<string> _flags;
|
||||
private string _description;
|
||||
private FeatureType _featureType;
|
||||
private Dictionary<string, Feature> _features;
|
||||
private List<string> _additionalNotes;
|
||||
private bool _foundOnce = false;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
public Feature()
|
||||
{
|
||||
_flags = new List<string>();
|
||||
_description = null;
|
||||
_featureType = FeatureType.Flag;
|
||||
_features = new Dictionary<string, Feature>();
|
||||
_additionalNotes = new List<string>();
|
||||
}
|
||||
|
||||
public Feature(string flag, string description, FeatureType featureType, List<string> additionalNotes)
|
||||
{
|
||||
List<string> flags = new List<string>();
|
||||
flags.Add(flag);
|
||||
_flags = flags;
|
||||
_description = description;
|
||||
_featureType = featureType;
|
||||
_features = new Dictionary<string, Feature>();
|
||||
_additionalNotes = additionalNotes;
|
||||
}
|
||||
|
||||
public Feature(List<string> flags, string description, FeatureType featureType, List<string> additionalNotes)
|
||||
{
|
||||
_flags = flags;
|
||||
_description = description;
|
||||
_featureType = featureType;
|
||||
_features = new Dictionary<string, Feature>();
|
||||
_additionalNotes = additionalNotes;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Accessors
|
||||
|
||||
/// <summary>
|
||||
/// Directly address a given subfeature
|
||||
/// </summary>
|
||||
public Feature this[string name]
|
||||
{
|
||||
get { return _features[name]; }
|
||||
set { _features[name] = value; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a new feature for this feature
|
||||
/// </summary>
|
||||
/// <param name="name">Name of the feature to add</param>
|
||||
/// <param name="feature"></param>
|
||||
public void AddFeature(string name, Feature feature)
|
||||
{
|
||||
if (_features == null)
|
||||
{
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
lock(_features)
|
||||
{
|
||||
if (!_features.ContainsKey(name))
|
||||
{
|
||||
_features.Add(name, feature);
|
||||
}
|
||||
else
|
||||
{
|
||||
_features[name] = feature;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a new flag for this feature
|
||||
/// </summary>
|
||||
/// <param name="flag">Flag to add for this feature</param>
|
||||
public void AddFlag(string flag)
|
||||
{
|
||||
if (_flags == null)
|
||||
{
|
||||
_flags = new List<string>();
|
||||
}
|
||||
|
||||
lock (_flags)
|
||||
{
|
||||
_flags.Add(flag);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a set of new flags for this feature
|
||||
/// </summary>
|
||||
/// <param name="flags">List of flags to add to this feature</param>
|
||||
public void AddFlags(List<string> flags)
|
||||
{
|
||||
if (_flags == null)
|
||||
{
|
||||
_flags = new List<string>();
|
||||
}
|
||||
|
||||
lock (_flags)
|
||||
{
|
||||
_flags.AddRange(flags);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a new additional note to this feature
|
||||
/// </summary>
|
||||
/// <param name="note">Note to add for this feature</param>
|
||||
public void AddNote(string note)
|
||||
{
|
||||
if (_additionalNotes == null)
|
||||
{
|
||||
_additionalNotes = new List<string>();
|
||||
}
|
||||
|
||||
lock (_additionalNotes)
|
||||
{
|
||||
_additionalNotes.Add(note);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a set of new notes for this feature
|
||||
/// </summary>
|
||||
/// <param name="notes">List of notes to add to this feature</param>
|
||||
public void AddNotes(List<string> notes)
|
||||
{
|
||||
if (_additionalNotes == null)
|
||||
{
|
||||
_additionalNotes = new List<string>();
|
||||
}
|
||||
|
||||
lock (_additionalNotes)
|
||||
{
|
||||
_additionalNotes.AddRange(notes);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns if a flag exists for the current feature
|
||||
/// </summary>
|
||||
/// <param name="name">Name of the flag to check</param>
|
||||
/// <returns>True if the flag was found, false otherwise</returns>
|
||||
public bool ContainsFlag(string name)
|
||||
{
|
||||
bool success = false;
|
||||
|
||||
// Loop through the flags
|
||||
foreach (string flag in _flags)
|
||||
{
|
||||
if (flag == name)
|
||||
{
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
else if (flag.TrimStart('-') == name)
|
||||
{
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns if the feature contains a flag that starts with the given character
|
||||
/// </summary>
|
||||
/// <param name="c">Character to check against</param>
|
||||
/// <returns>True if the flag was found, false otherwise</returns>
|
||||
public bool StartsWith(char c)
|
||||
{
|
||||
bool success = false;
|
||||
|
||||
// Loop through the flags
|
||||
foreach (string flag in _flags)
|
||||
{
|
||||
if (flag.TrimStart('-').ToLowerInvariant()[0] == c)
|
||||
{
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Methods
|
||||
|
||||
/// <summary>
|
||||
/// Output this feature only
|
||||
/// </summary>
|
||||
/// <param name="pre">Positive number representing number of spaces to put in front of the feature</param>
|
||||
/// <param name="midpoint">Positive number representing the column where the description should start</param>
|
||||
public List<string> Output(int pre = 0, int midpoint = 0)
|
||||
{
|
||||
// Create the output list
|
||||
List<string> outputList = new List<string>();
|
||||
|
||||
// Build the output string first
|
||||
string output = "";
|
||||
|
||||
// Add the pre-space first
|
||||
string prespace = "";
|
||||
for (int i = 0; i < pre; i++)
|
||||
{
|
||||
prespace += " ";
|
||||
}
|
||||
output += prespace;
|
||||
|
||||
// Now add all flags
|
||||
output += String.Join(", ", _flags);
|
||||
|
||||
// If we have a midpoint set, check to see if the string needs padding
|
||||
if (midpoint > 0 && output.Length < midpoint)
|
||||
{
|
||||
while (output.Length < midpoint)
|
||||
{
|
||||
output += " ";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
output += " ";
|
||||
}
|
||||
|
||||
// Append the description
|
||||
output += _description;
|
||||
|
||||
// Now append it to the list
|
||||
outputList.Add(output);
|
||||
|
||||
return outputList;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output this feature and all subfeatures
|
||||
/// </summary>
|
||||
/// <param name="tabLevel">Level of indentation for this feature</param>
|
||||
/// <param name="pre">Positive number representing number of spaces to put in front of the feature</param>
|
||||
/// <param name="midpoint">Positive number representing the column where the description should start</param>
|
||||
public List<string> OutputRecursive(int tabLevel, int pre = 0, int midpoint = 0)
|
||||
{
|
||||
// Create the output list
|
||||
List<string> outputList = new List<string>();
|
||||
|
||||
// Build the output string first
|
||||
string output = "";
|
||||
|
||||
// Normalize based on the tab level
|
||||
int preAdjusted = pre;
|
||||
int midpointAdjusted = midpoint;
|
||||
if (tabLevel > 0)
|
||||
{
|
||||
preAdjusted += 4 * tabLevel;
|
||||
midpointAdjusted += 4 * tabLevel;
|
||||
}
|
||||
|
||||
// Add the pre-space first
|
||||
string prespace = "";
|
||||
for (int i = 0; i < preAdjusted; i++)
|
||||
{
|
||||
prespace += " ";
|
||||
}
|
||||
output += prespace;
|
||||
|
||||
// Now add all flags
|
||||
output += String.Join(", ", _flags);
|
||||
|
||||
// If we have a midpoint set, check to see if the string needs padding
|
||||
if (midpoint > 0 && output.Length < midpointAdjusted)
|
||||
{
|
||||
while (output.Length < midpointAdjusted)
|
||||
{
|
||||
output += " ";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
output += " ";
|
||||
}
|
||||
|
||||
// Append the description
|
||||
output += _description;
|
||||
|
||||
// Now append it to the list
|
||||
outputList.Add(output);
|
||||
|
||||
// Now let's append all subfeatures
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
outputList.AddRange(_features[feature].OutputRecursive(tabLevel + 1, pre, midpoint));
|
||||
}
|
||||
|
||||
// Finally, let's append all additional notes
|
||||
if (_additionalNotes != null && _additionalNotes.Count > 0)
|
||||
{
|
||||
foreach (string note in _additionalNotes)
|
||||
{
|
||||
outputList.Add(prespace + note);
|
||||
}
|
||||
}
|
||||
|
||||
return outputList;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate whether a flag is valid for this feature or not
|
||||
/// </summary>
|
||||
/// <param name="input">Input to check against</param>
|
||||
/// <param name="exact">True if just this feature should be checked, false if all subfeatures are checked as well</param>
|
||||
/// <returns>True if the flag was valid, false otherwise</returns>
|
||||
public bool ValidateInput(string input, bool exact = false)
|
||||
{
|
||||
bool valid = false;
|
||||
|
||||
// Determine what we should be looking for
|
||||
switch (_featureType)
|
||||
{
|
||||
// If we have a flag, make sure it doesn't have an equal sign in it
|
||||
case FeatureType.Flag:
|
||||
valid = !input.Contains("=") && _flags.Contains(input);
|
||||
break;
|
||||
|
||||
// If we have an input, make sure it has an equals sign in it
|
||||
case FeatureType.List:
|
||||
case FeatureType.String:
|
||||
valid = input.Contains("=") && _flags.Contains(input.Split('=')[0]);
|
||||
break;
|
||||
}
|
||||
|
||||
// If we haven't found a valid flag and we're not looking for just this feature, check to see if any of the subfeatures are valid
|
||||
if (!valid && !exact)
|
||||
{
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
valid = _features[feature].ValidateInput(input);
|
||||
|
||||
// If we've found a valid feature, we break out
|
||||
if (valid)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we've already found this flag before and we don't allow duplicates, set valid to false
|
||||
if (valid && _foundOnce && _featureType != FeatureType.List)
|
||||
{
|
||||
valid = false;
|
||||
}
|
||||
|
||||
// If we're not valid at this point, we want to check if this flag is a file or a folder
|
||||
if (!valid)
|
||||
{
|
||||
valid = File.Exists(input) || Directory.Exists(input);
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
300
SabreTools.Library/Help/Help.cs
Normal file
300
SabreTools.Library/Help/Help.cs
Normal file
@@ -0,0 +1,300 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace SabreTools.Helper.Help
|
||||
{
|
||||
public class Help
|
||||
{
|
||||
#region Private variables
|
||||
|
||||
private List<string> _header;
|
||||
private Dictionary<string, Feature> _features;
|
||||
private static string _barrier = "-----------------------------------------";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
public Help()
|
||||
{
|
||||
_header = new List<string>();
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
public Help(List<string> header)
|
||||
{
|
||||
_header = header;
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Accessors
|
||||
|
||||
public Feature this[string name]
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_features == null)
|
||||
{
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
if (!_features.ContainsKey(name))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return _features[name];
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_features == null)
|
||||
{
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
if (_features.ContainsKey(name))
|
||||
{
|
||||
_features[name] = value;
|
||||
}
|
||||
else
|
||||
{
|
||||
_features.Add(name, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a new feature to the help
|
||||
/// </summary>
|
||||
/// <param name="name">Name of the feature to add</param>
|
||||
/// <param name="feature">Feature object to map to</param>
|
||||
public void Add(string name, Feature feature)
|
||||
{
|
||||
if (_features == null)
|
||||
{
|
||||
_features = new Dictionary<string, Feature>();
|
||||
}
|
||||
|
||||
lock (_features)
|
||||
{
|
||||
_features.Add(name, feature);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Methods
|
||||
|
||||
/// <summary>
|
||||
/// Get the feature name for a given flag or short name
|
||||
/// </summary>
|
||||
/// <returns>Feature name</returns>
|
||||
public string GetFeatureName(string name)
|
||||
{
|
||||
string feature = "";
|
||||
|
||||
// Loop through the features
|
||||
foreach (string featureName in _features.Keys)
|
||||
{
|
||||
if (_features[featureName].ValidateInput(name, exact: true))
|
||||
{
|
||||
feature = featureName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return feature;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output top-level features only
|
||||
/// </summary>
|
||||
public void OutputGenericHelp()
|
||||
{
|
||||
// Start building the output list
|
||||
List<string> output = new List<string>();
|
||||
|
||||
// Append the header first
|
||||
output.AddRange(_header);
|
||||
|
||||
// Now append all available top-level flags
|
||||
output.Add("Available options:");
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
output.AddRange(_features[feature].Output(pre: 2, midpoint: 25));
|
||||
}
|
||||
|
||||
// And append the generic ending
|
||||
output.Add("");
|
||||
output.Add("For information on available flags, put the option name after help");
|
||||
|
||||
// Now write out everything in a staged manner
|
||||
WriteOutWithPauses(output);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output all features recursively
|
||||
/// </summary>
|
||||
public void OutputAllHelp()
|
||||
{
|
||||
// Start building the output list
|
||||
List<string> output = new List<string>();
|
||||
|
||||
// Append the header first
|
||||
output.AddRange(_header);
|
||||
|
||||
// Now append all available flags recursively
|
||||
output.Add("Available options:");
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
output.AddRange(_features[feature].OutputRecursive(0, pre: 2, midpoint: 25));
|
||||
}
|
||||
|
||||
// Now write out everything in a staged manner
|
||||
WriteOutWithPauses(output);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output the SabreTools suite credits
|
||||
/// </summary>
|
||||
public void OutputCredits()
|
||||
{
|
||||
List<string> credits = new List<string>();
|
||||
credits.Add(_barrier);
|
||||
credits.Add("Credits");
|
||||
credits.Add(_barrier);
|
||||
credits.Add("");
|
||||
credits.Add("Programmer / Lead: Matt Nadareski (darksabre76)");
|
||||
credits.Add("Additional code: emuLOAD, @tractivo, motoschifo");
|
||||
credits.Add("Testing: emuLOAD, @tractivo, Kludge, Obiwantje, edc");
|
||||
credits.Add("Suggestions: edc, AcidX, Amiga12, EliUmniCk");
|
||||
credits.Add("Based on work by: The Wizard of DATz");
|
||||
WriteOutWithPauses(credits);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output a single feature recursively
|
||||
/// </summary>
|
||||
public void OutputIndividualFeature(string featurename)
|
||||
{
|
||||
// Start building the output list
|
||||
List<string> output = new List<string>();
|
||||
|
||||
// Now try to find the feature that has the name included
|
||||
string realname = null;
|
||||
List<string> startsWith = new List<string>();
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
// If we have a match to the feature name somehow
|
||||
if (feature == featurename)
|
||||
{
|
||||
realname = feature;
|
||||
break;
|
||||
}
|
||||
|
||||
// If we have a match within the flags
|
||||
else if (_features[feature].ContainsFlag(featurename))
|
||||
{
|
||||
realname = feature;
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, we want to get features with the same start
|
||||
else if (_features[feature].StartsWith(featurename[0]))
|
||||
{
|
||||
startsWith.Add(feature);
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a real name found, append all available subflags recursively
|
||||
if (realname != null)
|
||||
{
|
||||
output.Add("Available options for " + realname + ":");
|
||||
output.AddRange(_features[realname].OutputRecursive(0, pre: 2, midpoint: 25));
|
||||
}
|
||||
|
||||
// If no name was found but we have possible matches, show them
|
||||
else if (startsWith.Count > 0)
|
||||
{
|
||||
output.Add("\"" + featurename + "\" not found. Did you mean:");
|
||||
foreach (string possible in startsWith)
|
||||
{
|
||||
output.AddRange(_features[possible].Output(pre: 2, midpoint: 25));
|
||||
}
|
||||
}
|
||||
|
||||
// Now write out everything in a staged manner
|
||||
WriteOutWithPauses(output);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a flag is a top-level (main application) flag
|
||||
/// </summary>
|
||||
/// <param name="flag">Name of the flag to check</param>
|
||||
/// <returns>True if the feature was found, false otherwise</returns>
|
||||
public bool TopLevelFlag(string flag)
|
||||
{
|
||||
bool success = false;
|
||||
|
||||
// Loop through the features and check
|
||||
foreach (string feature in _features.Keys)
|
||||
{
|
||||
if (_features[feature].ValidateInput(flag, exact: true))
|
||||
{
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write out the help text with pauses, if needed
|
||||
/// </summary>
|
||||
/// <param name="helptext"></param>
|
||||
private void WriteOutWithPauses(List<string> helptext)
|
||||
{
|
||||
// Now output based on the size of the screen
|
||||
int i = 0;
|
||||
for (int line = 0; line < helptext.Count; line++)
|
||||
{
|
||||
string help = helptext[line];
|
||||
|
||||
Console.WriteLine(help);
|
||||
i++;
|
||||
|
||||
// If we're not being redirected and we reached the size of the screen, pause
|
||||
if (i == Console.WindowHeight - 3 && line != helptext.Count - 1)
|
||||
{
|
||||
i = 0;
|
||||
Pause();
|
||||
}
|
||||
}
|
||||
Pause();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pause on console output
|
||||
/// </summary>
|
||||
private static void Pause()
|
||||
{
|
||||
if (!Console.IsOutputRedirected)
|
||||
{
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Press enter to continue...");
|
||||
Console.ReadLine();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Here is a non-comprehensive list of things we want a help method to do:
|
||||
* - Parse and return flags from arguments
|
||||
* - Perform partial matching to find potentially similar features
|
||||
*/
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
674
SabreTools.Library/Licenses/LICENSE
Normal file
674
SabreTools.Library/Licenses/LICENSE
Normal file
@@ -0,0 +1,674 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
19
SabreTools.Library/Licenses/LICENSE.alphafs.txt
Normal file
19
SabreTools.Library/Licenses/LICENSE.alphafs.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
The MIT License (MIT)
|
||||
=====================
|
||||
|
||||
Copyright (c) 2008-2016 Peter Palotas, Jeffrey Jangli, Alexandr Normuradov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this
|
||||
software and associated documentation files (the "Software"), to deal in the Software
|
||||
without restriction, including without limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to
|
||||
whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or
|
||||
substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
|
||||
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
32
SabreTools.Library/Licenses/LICENSE.jzlib.txt
Normal file
32
SabreTools.Library/Licenses/LICENSE.jzlib.txt
Normal file
@@ -0,0 +1,32 @@
|
||||
The ZLIB library, available as Ionic.Zlib.dll or as part of DotNetZip,
|
||||
is a ported-then-modified version of jzlib. The following applies to jzlib:
|
||||
|
||||
JZlib 0.0.* were released under the GNU LGPL license. Later, we have switched
|
||||
over to a BSD-style license.
|
||||
|
||||
------------------------------------------------------------------------------
|
||||
Copyright (c) 2000,2001,2002,2003 ymnk, JCraft,Inc. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in
|
||||
the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. The names of the authors may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
|
||||
INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
165
SabreTools.Library/Licenses/LICENSE.sevenzipsharp.txt
Normal file
165
SabreTools.Library/Licenses/LICENSE.sevenzipsharp.txt
Normal file
@@ -0,0 +1,165 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
21
SabreTools.Library/Licenses/LICENSE.sharpcompress.txt
Normal file
21
SabreTools.Library/Licenses/LICENSE.sharpcompress.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Adam Hathcock
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
31
SabreTools.Library/Licenses/LICENSE.zlib.txt
Normal file
31
SabreTools.Library/Licenses/LICENSE.zlib.txt
Normal file
@@ -0,0 +1,31 @@
|
||||
The ZLIB library, available as Ionic.Zlib.dll or as part of DotNetZip,
|
||||
is a ported-then-modified version of jzlib, which itself is based on
|
||||
zlib-1.1.3, the well-known C-language compression library.
|
||||
|
||||
The following notice applies to zlib:
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
Copyright (C) 1995-2004 Jean-loup Gailly and Mark Adler
|
||||
|
||||
The ZLIB software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Permission is granted to anyone to use this software for any purpose,
|
||||
including commercial applications, and to alter it and redistribute it
|
||||
freely, subject to the following restrictions:
|
||||
|
||||
1. The origin of this software must not be misrepresented; you must not
|
||||
claim that you wrote the original software. If you use this software
|
||||
in a product, an acknowledgment in the product documentation would be
|
||||
appreciated but is not required.
|
||||
2. Altered source versions must be plainly marked as such, and must not be
|
||||
misrepresented as being the original software.
|
||||
3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
Jean-loup Gailly jloup@gzip.org
|
||||
Mark Adler madler@alumni.caltech.edu
|
||||
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
308
SabreTools.Library/Logger.cs
Normal file
308
SabreTools.Library/Logger.cs
Normal file
@@ -0,0 +1,308 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using FileStream = System.IO.FileStream;
|
||||
using StreamWriter = System.IO.StreamWriter;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper
|
||||
{
|
||||
/// <summary>
|
||||
/// Log either to file or to the console
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// TODO: Allow for "triggerable" logging done on an interval (async)
|
||||
/// </remarks>
|
||||
public class Logger
|
||||
{
|
||||
// Private instance variables
|
||||
private bool _tofile;
|
||||
private bool _warnings;
|
||||
private bool _errors;
|
||||
private string _filename;
|
||||
private LogLevel _filter;
|
||||
private DateTime _start;
|
||||
private StreamWriter _log;
|
||||
|
||||
// Private required variables
|
||||
private string _basepath = Path.Combine(Globals.ExeDir, "logs") + Path.DirectorySeparatorChar;
|
||||
|
||||
/// <summary>
|
||||
/// Initialize a console-only logger object
|
||||
/// </summary>
|
||||
public Logger()
|
||||
{
|
||||
_tofile = false;
|
||||
_warnings = false;
|
||||
_errors = false;
|
||||
_filename = null;
|
||||
_filter = LogLevel.VERBOSE;
|
||||
|
||||
Start();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initialize a Logger object with the given information
|
||||
/// </summary>
|
||||
/// <param name="tofile">True if file should be written to instead of console</param>
|
||||
/// <param name="filename">Filename representing log location</param>
|
||||
/// <param name="filter">Highest filtering level to be kept, default VERBOSE</param>
|
||||
public Logger(bool tofile, string filename, LogLevel filter = LogLevel.VERBOSE)
|
||||
{
|
||||
_tofile = tofile;
|
||||
_warnings = false;
|
||||
_errors = false;
|
||||
_filename = Path.GetFileNameWithoutExtension(filename) + " (" + DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss") + ")" + Path.GetExtension(filename);
|
||||
_filter = filter;
|
||||
|
||||
if (!Directory.Exists(_basepath))
|
||||
{
|
||||
Directory.CreateDirectory(_basepath);
|
||||
}
|
||||
|
||||
Start();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start logging by opening output file (if necessary)
|
||||
/// </summary>
|
||||
/// <returns>True if the logging was started correctly, false otherwise</returns>
|
||||
public bool Start()
|
||||
{
|
||||
_start = DateTime.Now;
|
||||
if (!_tofile)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
FileStream logfile = FileTools.TryCreate(Path.Combine(_basepath, _filename));
|
||||
_log = new StreamWriter(logfile, Encoding.UTF8, (int)(4 * Constants.KibiByte), true);
|
||||
_log.AutoFlush = true;
|
||||
|
||||
_log.WriteLine("Logging started " + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"));
|
||||
_log.WriteLine(string.Format("Command run: {0}", Globals.CommandLineArgs));
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// End logging by closing output file (if necessary)
|
||||
/// </summary>
|
||||
/// <param name="suppress">True if all ending output is to be suppressed, false otherwise (default)</param>
|
||||
/// <returns>True if the logging was ended correctly, false otherwise</returns>
|
||||
public bool Close(bool suppress = false)
|
||||
{
|
||||
if (!suppress)
|
||||
{
|
||||
if (_warnings)
|
||||
{
|
||||
Console.WriteLine("There were warnings in the last run! Check the log for more details");
|
||||
}
|
||||
if (_errors)
|
||||
{
|
||||
Console.WriteLine("There were errors in the last run! Check the log for more details");
|
||||
}
|
||||
|
||||
TimeSpan span = DateTime.Now.Subtract(_start);
|
||||
|
||||
// Special case for multi-day runs
|
||||
string total = "";
|
||||
if (span >= TimeSpan.FromDays(1))
|
||||
{
|
||||
total = span.ToString(@"d\:hh\:mm\:ss");
|
||||
}
|
||||
else
|
||||
{
|
||||
total = span.ToString(@"hh\:mm\:ss");
|
||||
}
|
||||
|
||||
if (!_tofile)
|
||||
{
|
||||
Console.WriteLine("Total runtime: " + total);
|
||||
return true;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
_log.WriteLine("Logging ended " + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"));
|
||||
_log.WriteLine("Total runtime: " + total);
|
||||
Console.WriteLine("Total runtime: " + total);
|
||||
_log.Close();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
_log.Close();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write the given string to the log output
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="loglevel">Severity of the information being logged</param>
|
||||
/// <param name="appendPrefix">True if the level and datetime should be prepended to each statement, false otherwise</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>
|
||||
private bool Log(string output, LogLevel loglevel, bool appendPrefix)
|
||||
{
|
||||
// If the log level is less than the filter level, we skip it but claim we didn't
|
||||
if (loglevel < _filter)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// USER and ERROR writes to console
|
||||
if (loglevel == LogLevel.USER || loglevel == LogLevel.ERROR)
|
||||
{
|
||||
Console.WriteLine((loglevel == LogLevel.ERROR && appendPrefix ? loglevel.ToString() + " " : "") + output);
|
||||
}
|
||||
|
||||
// If we're writing to file, use the existing stream
|
||||
if (_tofile)
|
||||
{
|
||||
try
|
||||
{
|
||||
_log.WriteLine((appendPrefix ? loglevel.ToString() + " - " + DateTime.Now + " - " : "" ) + output);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine(ex);
|
||||
Console.WriteLine("Could not write to log file!");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write the given exact string to the log output
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="line">Line number to write out to</param>
|
||||
/// <param name="column">Column number to write out to</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>
|
||||
public bool WriteExact(string output, int line, int column)
|
||||
{
|
||||
// Set the cursor position (if not being redirected)
|
||||
if (!Console.IsOutputRedirected)
|
||||
{
|
||||
Console.CursorTop = line;
|
||||
Console.CursorLeft = column;
|
||||
}
|
||||
|
||||
// Write out to the console
|
||||
Console.Write(output);
|
||||
|
||||
// If we're writing to file, use the existing stream
|
||||
if (_tofile)
|
||||
{
|
||||
try
|
||||
{
|
||||
_log.Write(DateTime.Now + " - " + output);
|
||||
}
|
||||
catch
|
||||
{
|
||||
Console.WriteLine("Could not write to log file!");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write the given string as a verbose message to the log output
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="appendPrefix">True if the level and datetime should be prepended to each statement (default), false otherwise</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>s
|
||||
public bool Verbose(string output, bool appendPrefix = true)
|
||||
{
|
||||
return Log(output, LogLevel.VERBOSE, appendPrefix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write the given string as a user message to the log output
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="appendPrefix">True if the level and datetime should be prepended to each statement (default), false otherwise</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>
|
||||
public bool User(string output, bool appendPrefix = true)
|
||||
{
|
||||
return Log(output, LogLevel.USER, appendPrefix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write the given string as a warning to the log output
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="appendPrefix">True if the level and datetime should be prepended to each statement (default), false otherwise</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>
|
||||
public bool Warning(string output, bool appendPrefix = true)
|
||||
{
|
||||
_warnings = true;
|
||||
return Log(output, LogLevel.WARNING, appendPrefix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Writes the given string as an error in the log
|
||||
/// </summary>
|
||||
/// <param name="output">String to be written log</param>
|
||||
/// <param name="appendPrefix">True if the level and datetime should be prepended to each statement (default), false otherwise</param>
|
||||
/// <returns>True if the output could be written, false otherwise</returns>
|
||||
public bool Error(string output, bool appendPrefix = true)
|
||||
{
|
||||
_errors = true;
|
||||
return Log(output, LogLevel.ERROR, appendPrefix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clear lines beneath the given line in the console
|
||||
/// </summary>
|
||||
/// <param name="line">Line number to clear beneath</param>
|
||||
/// <returns>True</returns>
|
||||
public bool ClearBeneath(int line)
|
||||
{
|
||||
if (!Console.IsOutputRedirected)
|
||||
{
|
||||
for (int i = line; i < Console.WindowHeight; i++)
|
||||
{
|
||||
// http://stackoverflow.com/questions/8946808/can-console-clear-be-used-to-only-clear-a-line-instead-of-whole-console
|
||||
Console.SetCursorPosition(0, Console.CursorTop);
|
||||
Console.Write(new string(' ', Console.WindowWidth));
|
||||
Console.SetCursorPosition(0, i);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
36
SabreTools.Library/Properties/AssemblyInfo.cs
Normal file
36
SabreTools.Library/Properties/AssemblyInfo.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
// General Information about an assembly is controlled through the following
|
||||
// set of attributes. Change these attribute values to modify the information
|
||||
// associated with an assembly.
|
||||
[assembly: AssemblyTitle("SabreHelper")]
|
||||
[assembly: AssemblyDescription("")]
|
||||
[assembly: AssemblyConfiguration("")]
|
||||
[assembly: AssemblyCompany("")]
|
||||
[assembly: AssemblyProduct("SabreHelper")]
|
||||
[assembly: AssemblyCopyright("Copyright © 2016")]
|
||||
[assembly: AssemblyTrademark("")]
|
||||
[assembly: AssemblyCulture("")]
|
||||
|
||||
// Setting ComVisible to false makes the types in this assembly not visible
|
||||
// to COM components. If you need to access a type in this assembly from
|
||||
// COM, set the ComVisible attribute to true on that type.
|
||||
[assembly: ComVisible(false)]
|
||||
|
||||
// The following GUID is for the ID of the typelib if this project is exposed to COM
|
||||
[assembly: Guid("225a1afd-0890-44e8-b779-7502665c23a5")]
|
||||
|
||||
// Version information for an assembly consists of the following four values:
|
||||
//
|
||||
// Major Version
|
||||
// Minor Version
|
||||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("1.0.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.0.0.0")]
|
||||
1243
SabreTools.Library/README.1ST
Normal file
1243
SabreTools.Library/README.1ST
Normal file
File diff suppressed because it is too large
Load Diff
660
SabreTools.Library/README.DEPRECIATED
Normal file
660
SabreTools.Library/README.DEPRECIATED
Normal file
@@ -0,0 +1,660 @@
|
||||
Removed Commands
|
||||
|
||||
In this section, there is a list of commands that have been retired and will no longer work.
|
||||
One thing to note is that not all commands will have a full description and are often copied
|
||||
from the original help text from the program. Also worth noting is that some of them were used
|
||||
in conjuction with other flags that may still exist and are thus not listed below.
|
||||
|
||||
Below are originally from DATabaseTwo (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-g, --generate Start tool in generate mode
|
||||
-ga, --generate-all Start tool in generate all mode
|
||||
-i, --ignore Don't prompt for new sources
|
||||
-lsy, --list-systems List all systems (id <= name)
|
||||
-nr, --no-rename Don't auto-rename games by source/system
|
||||
-o, --old Output DAT in CMP format instead of XML
|
||||
-sys=, --system= System ID to generate from
|
||||
|
||||
Below are originally from DATFromDir (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-m, --noMD5 Don't include MD5 in output
|
||||
-nm, --noMD5 Don't include MD5 in output
|
||||
-ns, --noSHA1 Don't include SHA1 in output
|
||||
-b, --bare Don't include date in file name
|
||||
-u, --unzip Force unzipping in created DAT
|
||||
-f, --files Treat archives as files
|
||||
-o, --old Output DAT in CMP format instead of XML
|
||||
-gz, --gz-files Allow reading of GZIP files as archives
|
||||
-ro, --romba Read files from a Romba input
|
||||
-n=, --name= Set the internal name of the DAT
|
||||
-d=, --desc= Set the filename and description of the DAT
|
||||
-c=, --cat= Set the category of the DAT
|
||||
-v=, --version= Set the version of the DAT
|
||||
-au=, --author= Set the author of the DAT
|
||||
-sd, --superdat Enable SuperDAT creation
|
||||
-t=, --temp= Set the temporary directory to use
|
||||
|
||||
Below are originally from DatToMiss (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-l, --log Enable log to file
|
||||
-r, --roms Output roms to miss instead of sets
|
||||
-pre=, --prefix= Set prefix to be printed in front of all lines
|
||||
-post=, --postfix= Set postfix to be printed behind all lines
|
||||
|
||||
Below are originally from Filter (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-out=, --out= Output directory
|
||||
-gn=, --game-name= Game name to be filtered on
|
||||
-rn=, --rom-name= Rom name to be filtered on
|
||||
-rt=, --rom-type= Rom type to be filtered on
|
||||
-sgt=, --greater= Size greater than or equal to
|
||||
-slt=, --less= Size less than or equal to
|
||||
-seq=, --equal= Size equal to
|
||||
-crc=, --crc= CRC to be filtered on
|
||||
-md5=, --md5= MD5 to be filtered on
|
||||
-sha1=, --sha1= SHA-1 to be filtered on
|
||||
-nd, --nodump Only match nodump roms
|
||||
-nnd, --not-nodump Exclude all nodump roms
|
||||
|
||||
Game name, Rom name, CRC, MD5, SHA-1 can do partial matches using asterisks as
|
||||
follows (case insensitive):
|
||||
*00 means ends with '00'
|
||||
00* means starts with '00'
|
||||
*00* means contains '00'
|
||||
00 means exactly equals '00'
|
||||
|
||||
Below are originally from HashSplit (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-out= Output directory
|
||||
|
||||
Below are originally from Headerer (Standalone) -
|
||||
|
||||
-?, -h, -help Show the built-in help text
|
||||
Built-in to most of the programs is a basic help text
|
||||
|
||||
-e, -extract Enable detect and remove mode
|
||||
This mode allows the user to detect, store, and remove copier headers from a file
|
||||
or folder of files. The headers are backed up and collated by the hash of the un-
|
||||
headered file. Files are then output without the detected copier header alongside
|
||||
the originals with the suffix .new. No input files are altered in the process.
|
||||
|
||||
-r, -restore Restore headers to file(s)
|
||||
This mode uses stored copier headers and reapplies them to files if they match the
|
||||
included hash. More than one header can be applied to a file, so they will be out-
|
||||
put to new files, suffixed with .newX, where X is a number. No input files are
|
||||
altered in the process.
|
||||
|
||||
Below are originally from MergeDAT / DiffDat (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-l, --log Enable log to file
|
||||
-d, --diff Switch to diffdat mode
|
||||
-di, --diff Switch to diffdat mode
|
||||
-m, --merge Enable deduping in the created DAT
|
||||
-dd, --dedup Enable deduping in the created DAT
|
||||
-b, --bare Don't include date in file name
|
||||
-u, --unzip Force unzipping in created DAT
|
||||
-o, --old Output DAT in CMP format instead of XML
|
||||
-n=, --name= Set the name of the DAT
|
||||
-d=, --desc= Set the description of the DAT
|
||||
-c=, --cat= Set the category of the DAT
|
||||
-v=, --version= Set the version of the DAT
|
||||
-a=, --author= Set the author of the DAT");
|
||||
|
||||
Below are originally from OfflineMerge (Standalone) -
|
||||
|
||||
-h, -?, --help Show this help dialog
|
||||
-f, --fake Replace all hashes and sizes by the default
|
||||
|
||||
-Inputs:
|
||||
-com= Complete current DAT
|
||||
-fix= Complete current Missing
|
||||
-new= New Complete DAT
|
||||
com= Complete current DAT
|
||||
fix= Complete current Missing
|
||||
new= New Complete DAT
|
||||
|
||||
This program will output the following DATs:
|
||||
(a) Net New - (NewComplete)-(Complete)
|
||||
(b) Unneeded - (Complete)-(NewComplete)
|
||||
(c) New Missing - (Net New)+(Missing-(Unneeded))
|
||||
(d) Have - (NewComplete)-(New Missing)
|
||||
OR (Complete or NewComplete)-(Missing) if one is missing");
|
||||
|
||||
Below are originally from SabreTools / DATabase -
|
||||
|
||||
-a, --add Add a new system or source to the database
|
||||
Add a new system or source to the DAT database, including additional information.
|
||||
|
||||
manu= Manufacturer name (system only)
|
||||
system= System name (system only)
|
||||
source= Source name (source only)
|
||||
url= URL (source only)
|
||||
|
||||
-manu= Manufacturer name
|
||||
Used only when adding a system to the database
|
||||
|
||||
-system= System name
|
||||
Used only when adding a system to the database
|
||||
|
||||
-source= Source name
|
||||
Used only when adding a source to the database
|
||||
|
||||
-url= Source URL
|
||||
Used only when adding a source to the database
|
||||
|
||||
-ad, --all-diff Enable output of all diff variants
|
||||
|
||||
-c=, --cat= Set the category of the DAT
|
||||
|
||||
-cc, --convert-cmp Convert any DAT to CMP
|
||||
-clean Clean game names according to WoD standards
|
||||
-out= Output directory
|
||||
out= Output directory
|
||||
|
||||
-cv, --convert Enable conversion of input files to unarchived folders
|
||||
Using a folder or set of folders, rebuild to another folder.
|
||||
|
||||
-dat= Name of the DAT to be used as a filter
|
||||
A supplied DAT file to be used as a filter in conversion. If a file is found in the
|
||||
DAT, it will be skipped on output. This allows convert to act like an anti-sort,
|
||||
useful for finding useless files in an input folder.
|
||||
|
||||
-out= Set the name of the output directory
|
||||
This sets an output folder to be used when the files are created. If a path
|
||||
is not defined, the application directory is used instead.
|
||||
|
||||
-t=, --temp= Set the name of the temporary directory
|
||||
Optionally, a temp folder can be supplied in the case the default temp directory
|
||||
(inside the running folder) is not preferred. This is used for any operations
|
||||
that require an archive to be extracted.
|
||||
|
||||
-del, --delete Delete input files
|
||||
This is a WIP flag that allows for deletion of input files once they have been
|
||||
rebuilt. It is not recommended for normal use because it does not discriminate
|
||||
whether or not the input files were rebuilt or not before deletion
|
||||
|
||||
-t7z Enable Torrent 7zip output [NOT IMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to Torrent7Zip (T7Z)
|
||||
files. This format is based on the LZMA container format 7zip, but with custom header
|
||||
information. This is currently unused by any major application.
|
||||
|
||||
-tar Enable Tape ARchive output [NOT IMPLEMENTED]
|
||||
Instead of outputting the fiels to folder, files will be rebuilt to Tape ARchive (TAR)
|
||||
files. This format is a standardized storage archive without any compression, usually
|
||||
used with other compression formats around it. It is widely used in backup applications
|
||||
and source code archives.
|
||||
|
||||
-tgz Enable Torrent GZ output
|
||||
Instead of outputting the files to folder, files will be rebuilt to TorrentGZ (TGZ)
|
||||
files. This format is based on the GZip archive format, but with custom header
|
||||
information and a file name replaced by the SHA-1 of the file inside. This is
|
||||
primarily used by external tool Romba (https://github.com/uwedeportivo/romba), but
|
||||
may be used more widely in the future.
|
||||
|
||||
-r, --romba Enable Romba depot directory output
|
||||
As an extension of the parent flag, this outputs the TGZ files into directories
|
||||
based on the structure used by Romba. This uses nested folders using the first
|
||||
4 bytes of the SHA-1, 1 byte for each layer of the directory name. It also
|
||||
includes two auxilary files, .romba_size and .romba_size.backup, that have the
|
||||
compressed size of the folder inside for use with Romba.
|
||||
|
||||
-tlrz Enable Torrent Long-Range Zip output [NOT IMPLEMENTED]
|
||||
Instead of ouputting the files to folder, files will be rebuilt to Torrent Long-Range
|
||||
Zip (TLRZ) files. This format is based on the LRZip file format as defined at
|
||||
https://github.com/ckolivas/lrzip but with custom header information. This is currently
|
||||
unused by any major application.
|
||||
|
||||
-trar Enable Torrent RAR output [NOT IMPLEMENTED]
|
||||
Instead of outputting files to folder, files will be rebuilt to Torrent RAR (TRAR)
|
||||
files. This format is based on the RAR propietary format but with custom header
|
||||
information. This is currently unused by any major application;
|
||||
|
||||
-txz Enable Torrent XZ output [NOT IMPLEMENTED]
|
||||
Instead of outputting files to folder, files will be rebuilt to Torrent XZ (TXZ) files.
|
||||
This format is based on the LZMA container format XZ, but with custom header
|
||||
information. This is currently unused by any major application;
|
||||
|
||||
-tzip Enable Torrent Zip output
|
||||
Instead of ouputting files to folder, files will be rebuilt to TorrentZip (TZ) files.
|
||||
This format is based on the ZIP archive format, but with custom header information.
|
||||
This is primarily used by external tool RomVault (http://www.romvault.com/) and is
|
||||
already widely used.
|
||||
|
||||
-7z={0} Set scanning level for 7z archives
|
||||
-gz={2} Set scanning level for GZip archives
|
||||
-rar={2} Set scanning level for RAR archives
|
||||
-zip={0} Set scanning level for ZIP archives
|
||||
For each of the major archive types recognized by the libraries used by this
|
||||
program, scan the archive in one of the following ways:
|
||||
0 Hash both archive and its contents
|
||||
1 Only hash contents of the archive
|
||||
2 Only hash archive itself (treat like a regular file)
|
||||
|
||||
-cm, --convert-miss Convert from DAT to miss
|
||||
-r, --roms Output roms to miss instead of sets
|
||||
-gp, --game-prefix Add game name as a prefix to each item
|
||||
-pre=, --prefix= Set prefix to be printed in front of all lines
|
||||
-post=, --postfix= Set postfix to be printed behind all lines
|
||||
-q, --quotes Put double-quotes around each item
|
||||
-ae=, --add-ext= Add an extension to each item
|
||||
-re=, --rep-ext= Replace all extensions with specified
|
||||
-ro, --romba Output roms in Romba format (requires SHA-1)
|
||||
-tsv, --tsv Output roms in Tab-Separated Value format
|
||||
-cs, --convert-sd Convert any DAT to SabreDAT
|
||||
-clean Clean game names according to WoD standards
|
||||
-out= Output directory
|
||||
-cr, --convert-rc Convert any DAT to RomCenter
|
||||
-clean Clean game names according to WoD standards
|
||||
-out= Output directory
|
||||
-cr, --convert-rv Convert an XML DAT to RV
|
||||
out= Output directory
|
||||
-cx, --convert-xml Convert a RV DAT to XML
|
||||
out= Output directory
|
||||
-cx, --convert-xml Convert any DAT to XML
|
||||
-clean Clean game names according to WoD standards
|
||||
-out= Output directory
|
||||
out= Output directory
|
||||
|
||||
-d=, --desc= Set the description of the DAT
|
||||
Set the description for the output DAT(s) [default is the folder name]
|
||||
|
||||
-dp, -dfdp Create a DAT from each input directory using parallel code
|
||||
Create a DAT file from an input directory or set of files. By default, this will
|
||||
output a DAT named based on the input directory and the current date. It will also
|
||||
treat all archives as possible games and add all three hashes for each file.
|
||||
|
||||
-nm, -noMD5 Don't include MD5 in output
|
||||
This allows the user to skip calculating the MD5 for each of the files which will
|
||||
speed up the creation of the DAT.
|
||||
|
||||
-ns, -noSHA1 Don't include SHA1 in output
|
||||
This allows the user to skip calculating the SHA-1 for each of the files which will
|
||||
speed up the creation of the DAT.
|
||||
|
||||
-b, -bare Don't include date in file name
|
||||
Normally, the DAT will be created with the date in the file name. This flag removes
|
||||
that but keeps the date tag intact.
|
||||
|
||||
-u, -unzip Force unzipping in created DAT
|
||||
This sets the 'forcepacking="unzip"' flag in the outputted DAT. When used with a
|
||||
file manager that allows for it, this will force the outputted files to be in
|
||||
subdirectories instead of archives.
|
||||
|
||||
-f, -files Treat archives as files
|
||||
Instead of trying to enumerate the files within archives, treat the archives as
|
||||
files themselves. This is good for uncompressed sets that include archives that
|
||||
should be read as-is.
|
||||
|
||||
-oc, --output-cmp Output in CMP format
|
||||
Add outputting the created DAT to clrmamepro format
|
||||
|
||||
-om, --output-miss Output in Missfile format
|
||||
Add outputting the created DAT to GoodTools miss format
|
||||
|
||||
-omd5, --output-md5 Output in MD5 format
|
||||
Add outputting the created DAT to MD5 format
|
||||
|
||||
-or, --output-rc Output in RomCenter format
|
||||
Add outputting the created DAT to RomCenter format
|
||||
|
||||
-os, --output-sd Output in SabreDAT format
|
||||
Add outputting the created DAT to SabreDAT XML format
|
||||
|
||||
-osfv, --output-sfv Output in SFV format
|
||||
Add outputting the created DAT to SFV format
|
||||
|
||||
-osha1, -output-sha1 Output in SHA-1 format
|
||||
Add outputting the created DAT to SHA1 format
|
||||
|
||||
-ox, -output-xml Output in Logiqx XML format (default)
|
||||
Add outputting the created DAT to Logiqx XML format
|
||||
|
||||
-gz, -gz-files Allow reading of GZIP files as archives
|
||||
Since GZip files are not commonly used for file storage, this flag allows for
|
||||
any GZip archives to have their contents hashed instead.
|
||||
|
||||
-ro, -romba Read files from a Romba input
|
||||
Allow for reading of GZipped files as if they were from a Romba depot. This
|
||||
implies that the files will be in the TorrentGZ format as well, including
|
||||
naming convention.
|
||||
|
||||
-f=, -filename= Set the external name of the DAT
|
||||
Set the base filename for the output DAT(s) [default is folder name plus date]
|
||||
|
||||
-n=, -name= Set the internal name of the DAT
|
||||
Set the internal name for the output DAT(s) [default is folder name plus date]
|
||||
|
||||
-de=, -desc= Set the description of the DAT
|
||||
Set the description for the output DAT(s) [default is the folder name]
|
||||
|
||||
-c=, -cat= Set the category of the DAT
|
||||
Set the category for the output DAT(s) [default is blank]
|
||||
|
||||
-v=, -version= Set the version of the DAT
|
||||
Set the version for the output DAT(s) [default is blank]
|
||||
|
||||
-au=, -author= Set the author of the DAT
|
||||
Set the author for the output DAT(s) [default is blank]
|
||||
|
||||
-sd, -superdat Enable SuperDAT creation
|
||||
Set the type flag to "SuperDAT" for the output DAT as well as preserving the
|
||||
directory structure of the inputted folder, if applicable
|
||||
|
||||
-t=, -temp= Set the name of the temporary directory
|
||||
Optionally, a temp folder can be supplied in the case the default temp directory
|
||||
(inside the running folder) is not preferred. This is used for any operations
|
||||
that require an archive to be extracted.
|
||||
|
||||
-mt={4} Amount of threads to use
|
||||
Optionally, set the number of threads to use for the multithreaded operations.
|
||||
The default is 4 threads; -1 means unlimited threads created.
|
||||
|
||||
exta= First extension to split by
|
||||
extb= Second extension to split by
|
||||
|
||||
-f, --filter Filter DATs by inputted criteria
|
||||
-out=, --out= Output directory
|
||||
-gn=, --game-name= Game name to be filtered on
|
||||
-rn=, --rom-name= Rom name to be filtered on
|
||||
-rt=, --rom-type= Rom type to be filtered on
|
||||
-sgt=, --greater= Size greater than or equal to
|
||||
-slt=, --less= Size less than or equal to
|
||||
-seq=, --equal= Size equal to
|
||||
-crc=, --crc= CRC to be filtered on
|
||||
-md5=, --md5= MD5 to be filtered on
|
||||
-sha1=, --sha1= SHA-1 to be filtered on
|
||||
-nd, --nodump Only match nodump roms
|
||||
-nnd, --not-nodump Exclude all nodump roms
|
||||
|
||||
-g, -generate Start tool in generate mode
|
||||
This starts the tool in DATabase generate mode. This will allow for creation of
|
||||
managed DATs based on the inputted systems and sources as defined by other flags.
|
||||
|
||||
-system= Comma-separated list of system IDs
|
||||
-source= Comma-separated list of source IDs
|
||||
-out= Output directory
|
||||
|
||||
-system= System ID to generate from
|
||||
Set the system ID to be used to create an output DAT
|
||||
|
||||
-nr, -no-rename Don't auto-rename games
|
||||
By default, games are automatically renamed with the source (for system-derived
|
||||
DATs), system (for source-derived DATs), or both (for the complete merged DAT).
|
||||
This flag disables the automatic renaming and uses the game names as they are.
|
||||
|
||||
-o, -old Output DAT in CMP format instead of XML
|
||||
As a holdover from only two output formats, this tool defaults to Logiqx XML
|
||||
DAT outputs. If this flag is enabled, a clrmamepro DAT will be created instead.
|
||||
|
||||
-ga, -generate-all Start tool in generate all mode
|
||||
This starts the tool in DATabase generate all mode. This will allow for creation of
|
||||
managed DATs based on the entire DAT folder.
|
||||
|
||||
-nr, -no-rename Don't auto-rename games
|
||||
By default, games are automatically renamed with the source (for system-derived
|
||||
DATs), system (for source-derived DATs), or both (for the complete merged DAT).
|
||||
This flag disables the automatic renaming and uses the game names as they are.
|
||||
|
||||
-o, -old Output DAT in CMP format instead of XML
|
||||
As a holdover from only two output formats, this tool defaults to Logiqx XML
|
||||
DAT outputs. If this flag is enabled, a clrmamepro DAT will be created instead.
|
||||
|
||||
-old, --romvault Produce a DAT in RV format
|
||||
|
||||
-hd, --headerer Backup or restore copier headers from a variety of file types
|
||||
Headerer is meant as an intermediary between header skipper files (which, a bit
|
||||
apart from their name, do not just show how to skip copier headers) and rom managers
|
||||
that do not use them.
|
||||
|
||||
By default, this will detect, store, and remove copier headers from a file or folder
|
||||
of files. The headers are backed up and collated by the hash of the unheadered file.
|
||||
Files are then output without the detected copier header alongside the originals with
|
||||
the suffix .new. No input files are altered in the process.
|
||||
|
||||
The following systems have headers that this program can work with:
|
||||
- Atari 7800
|
||||
- Atari Lynx
|
||||
- Commodore PSID Music
|
||||
- NEC PC-Engine / TurboGrafx 16
|
||||
- Nintendo Famicom / Nintendo Entertainment System
|
||||
- Nintendo Famicom Disk System
|
||||
- Nintendo Super Famicom / Super Nintendo Entertainment System
|
||||
- Nintendo Super Famicom / Super Nintendo Entertainment System SPC Music
|
||||
|
||||
-re, --restore Restore headers to file(s)
|
||||
Instead of the default extraction, this flag enables use of stored copier headers
|
||||
to reapply them to files if they match the included hash. More than one header can
|
||||
be applied to a file, so they will be output to new files, suffixed with .newX,
|
||||
where X is a number. No input files are altered in the process.
|
||||
|
||||
-out= Set the name of the output directory
|
||||
This sets an output folder to be used when the files are created. If a path
|
||||
is not defined, the application directory is used instead.
|
||||
|
||||
-i, -import Start tool in import mode
|
||||
This starts the tool in DATabase import mode. This will allow for hashing of new
|
||||
DAT files in the dats folder. If a source for the DAT cannot be automatically
|
||||
determined, the user will be promted to select a source or enter a new one.
|
||||
|
||||
-ig, -ignore Don't prompt for new sources
|
||||
If a source cannot be determined, then use the "Default" source instead of
|
||||
asking the user.
|
||||
|
||||
-l, --log Enable logging of program output
|
||||
|
||||
-lso, -list-sources List all sources (id <= name)
|
||||
List all sources in the database, ordered by the internal ID and mapped to the name
|
||||
|
||||
-lsy, -list-systems List all systems (id <= name)
|
||||
List all systems in the database, ordered by the internal ID and mapped to the name
|
||||
|
||||
-nd, --nodump Include only match nodump roms
|
||||
-nnd, --not-nodump Exclude all nodump roms
|
||||
These flags allow for filtering based on the nodump status in the source DAT(s)
|
||||
|
||||
-ol, --offmerge Update DATS for offline arrays
|
||||
This is a power user tool for dealing with offline arrays, specifically creating
|
||||
have and miss lists without having to reconnect drives.
|
||||
|
||||
This option will output the following DATs based on the inputs as defined below:
|
||||
(a) Net New = (New Complete)-(Current Complete)
|
||||
(b) Unneeded (Current Complete)-(New Complete)
|
||||
(c) New Missing (Net New)+(Current Missing-(Unneeded))
|
||||
(d) Have (New Complete)-(New Missing)
|
||||
OR (Complete or NewComplete) (Missing) if one is missing
|
||||
|
||||
-com= Complete current DAT
|
||||
This is a merged and deduped DAT that includes all DATs that should be checked
|
||||
|
||||
-fix= Complete current Missing
|
||||
This is a merged and deduped DAT that includes all fixdats representing missing
|
||||
files
|
||||
|
||||
-new= New Complete DAT
|
||||
This is a merged and deduped DAT that includes all current and updated DATs that
|
||||
should be checked
|
||||
|
||||
-fk, -fake Replace all hashes and sizes by the default
|
||||
For further offline management, this option will replace all hashes in the output
|
||||
DATs to be their 0-byte equivalents. This allows for file managers to be used
|
||||
to still deal with the DATs without reconnecting the drives
|
||||
|
||||
out= Output directory
|
||||
|
||||
-rm, -remove Remove a system or source from the database
|
||||
Remove a system or source to the DAT database so it can no longer be used
|
||||
|
||||
-system= System ID
|
||||
Internal ID of the system to be removed
|
||||
|
||||
-source= Source ID
|
||||
Internal ID of the source to be removed
|
||||
|
||||
system= Comma-separated list of system IDs
|
||||
source= Comma-separated list of source IDs
|
||||
|
||||
-tm, --trim-merge Consolidate DAT into a single game and trim entries
|
||||
In the cases where files will have too long a name, this allows for trimming
|
||||
the name of the files to the NTFS maximum length at most
|
||||
|
||||
-rd=, --root-dir= Set the root directory for trimming calculation
|
||||
In the case that the files will not be stored from the root directory,
|
||||
a new root can be set for path length calculations
|
||||
|
||||
-nr, --no-rename Keep game names instead of using '!'
|
||||
By default, this will rename games to a single "!" character. This flag enables
|
||||
the original game names to be kept
|
||||
|
||||
-df, --disable-force Disable forceunzipping
|
||||
By default, this will set the forcepacking="unzip" flag on the output DAT. This
|
||||
flag disables this output
|
||||
|
||||
-u, --unzip Force unzipping in created DAT
|
||||
This sets the 'forcepacking="unzip"' flag in the outputted DAT. When used with a
|
||||
file manager that allows for it, this will force the outputted files to be in
|
||||
subdirectories instead of archives.
|
||||
|
||||
Below are originally from SimpleSort (Standalone) -
|
||||
|
||||
-?, -h, --help Show the built-in help text
|
||||
Built-in to most of the programs is a basic help text
|
||||
|
||||
-dat= Name of the DAT to be used for the various options
|
||||
This DAT file is required for everything that SimpleSort currently does, so not
|
||||
supplying one will result in an error. Depending on the additional flags that are
|
||||
supplied, the DAT will be used differently, as described below. By default, the
|
||||
DAT is used to check which files need to be rebuilt.
|
||||
|
||||
-out= Set the name of the output directory
|
||||
This sets an output folder to be used by various parts of the program. As with the
|
||||
DAT file, this input is used differently based on the flags that are supplied. By
|
||||
default, the output folder is used as the target to rebuild files to.
|
||||
|
||||
-t=, --temp= Set the name of the temporary directory
|
||||
Optionally, a temp folder can be supplied in the case the default temp directory
|
||||
(inside the running folder) is not preferred. This is used for any operations that
|
||||
require an archive to be extracted.
|
||||
|
||||
-d, --delete Enable deletion of the input files
|
||||
Optionally, the input files, once processed, can be deleted. This can be useful
|
||||
when the original file structure is no longer needed or if there is limited space
|
||||
on the source drive.
|
||||
|
||||
-qs, --quick Enable quick scanning of archives
|
||||
For all archives, if this flag is enabled, it will only use the header information
|
||||
to get the archive entries' file information. The upside to this is that it is much
|
||||
quicker than extracting all files to the temp folder. On the downside, it can only
|
||||
get the CRC and size from most archive formats, leading to possible issues.
|
||||
|
||||
-ad, --add-date Write dates for each file parsed, if available
|
||||
If this flag is set, the the date in the DAT will be used for the output file
|
||||
instead of the standard date and time for TorrentZip. This will technically
|
||||
invalidate the output files as proper TorrentZip files because the date will not
|
||||
match the standard.
|
||||
|
||||
-v, --verify Enable verification of output directory
|
||||
This overrides the default rebuilding and only requires the DAT and the output folder.
|
||||
Here, the DAT is used to verify the output directory directly and then output a
|
||||
simple FixDAT. This can be misleading, currently, because it only checks for exact
|
||||
matches.
|
||||
|
||||
-c, --convert Enable filtering by input DAT
|
||||
This overrides the default rebuilding by using the DAT as a filter of what not to
|
||||
output. If no DAT is supplied, the entire input folder will be rebuild file-by-file
|
||||
to the output folder.
|
||||
|
||||
-tgz Enable Torrent GZ output
|
||||
Instead of outputting the files to ZIP archives, files will be rebuilt to TorrentGZ
|
||||
(TGZ) files. This format is based on the GZip archive format, but with custom header
|
||||
information and a file name replaced by the SHA-1 of the file inside. This is
|
||||
primarily used by external tool Romba (https://github.com/uwedeportivo/romba), but
|
||||
may be used more widely in the future.
|
||||
|
||||
-r, --romba Enable Romba depot directory output
|
||||
As an extension of the parent flag, this outputs the TGZ files into directories
|
||||
based on the structure used by Romba. This uses nested folders using the first
|
||||
4 bytes of the SHA-1, 1 byte for each layer of the directory name. It also
|
||||
includes two auxilary files, .romba_size and .romba_size.backup, that have the
|
||||
compressed size of the folder inside for use with Romba.
|
||||
|
||||
-tzip Enable TorrentZip output
|
||||
Instead of outputting the files to ZIP archives, files will be rebuilt to TorrentZip
|
||||
(TZip) files. This format is based on the ZIP archive format, but with custom header
|
||||
information. This is primarily used by external tool RomVault (http://www.romvault.com/)
|
||||
and is already widely used.
|
||||
|
||||
-do, --directory Enable outputting files uncompressed
|
||||
Instead of outputting the files to ZIP archives, files will be rebuilt to named
|
||||
subdirectories within the output folder. This is useful for when the DAT does not
|
||||
already have the flag specified.
|
||||
|
||||
-h=, --header= Remove headers from hash calculations
|
||||
If this is set, then all files that have copier headers that are detected will
|
||||
have them removed from the hash calculation. This will allow for a headered collection
|
||||
to be hashed without possibly variant information. If a particular header skipper is
|
||||
defined, and that skipper exists, then it will be used instead of trying to find one
|
||||
that matches.
|
||||
|
||||
-7z={0} Set scanning level for 7z archives
|
||||
-gz={2} Set scanning level for GZip archives
|
||||
-rar={2} Set scanning level for RAR archives
|
||||
-zip={0} Set scanning level for ZIP archives
|
||||
For each of the major archive types recognized by the libraries used by this
|
||||
program, scan the archive in one of the following ways:
|
||||
0 Hash both archive and its contents
|
||||
1 Only hash contents of the archive
|
||||
2 Only hash archive itself (treat like a regular file)
|
||||
|
||||
-ud, --update-dat Output updated DAT (rebuild only)
|
||||
Once the files that were able to rebuilt are taken care of, a DAT of the files
|
||||
that could not be matched will be output to the program directory.
|
||||
|
||||
Below are originally from SingleGame (Standalone) -
|
||||
|
||||
-r=rootdir Set the directory name for path size
|
||||
-rd=, --root-dir= Set the directory name for path size
|
||||
-l, --log Enable logging to file
|
||||
-nr, --no-rename Disable single-game mode
|
||||
-df, --disable-force Disable forceunzipping
|
||||
|
||||
Below are originally from TGZConvert / TGZTest (Standalone) -
|
||||
|
||||
-?, -h, -help Show the built-in help text
|
||||
Built-in to most of the programs is a basic help text
|
||||
|
||||
-out= Set the name of the output directory
|
||||
This sets an output folder to be used by when files are rebuilt. It also serves
|
||||
as the base folder if Romba mode is enabled. See -romba for more details
|
||||
|
||||
-t=, -temp= Set the name of the temporary directory
|
||||
Optionally, a temp folder can be supplied in the case the default temp directory
|
||||
(inside the running folder) is not preferred. This is used for any operations that
|
||||
require an archive to be extracted.
|
||||
|
||||
-d, -delete Enable deletion of the input files
|
||||
Optionally, the input files, once processed, can be deleted. This can be useful
|
||||
when the original file structure is no longer needed or if there is limited space
|
||||
on the source drive.
|
||||
|
||||
-r, -romba Enable Romba depot directory output
|
||||
Optionally, this outputs the TGZ files into directories based on the structure
|
||||
used by Romba. This uses nested folders using the first 4 bytes of the SHA-1,
|
||||
1 byte for each layer of the directory name. It also includes two auxilary
|
||||
files, .romba_size and .romba_size.backup, that have the compressed size of the
|
||||
folder inside for use with Romba.
|
||||
|
||||
-7z={0} Set scanning level for 7z archives
|
||||
-gz={2} Set scanning level for GZip archives
|
||||
-rar={2} Set scanning level for RAR archives
|
||||
-zip={0} Set scanning level for ZIP archives
|
||||
For each of the major archive types recognized by the libraries used by this
|
||||
program, scan the archive in one of the following ways:
|
||||
0 Hash both archive and its contents
|
||||
1 Only hash contents of the archive
|
||||
2 Only hash archive itself (treat like a regular file)
|
||||
223
SabreTools.Library/SabreTools.Library.csproj
Normal file
223
SabreTools.Library/SabreTools.Library.csproj
Normal file
@@ -0,0 +1,223 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
<ProjectGuid>{225A1AFD-0890-44E8-B779-7502665C23A5}</ProjectGuid>
|
||||
<OutputType>Library</OutputType>
|
||||
<AppDesignerFolder>Properties</AppDesignerFolder>
|
||||
<RootNamespace>SabreTools.Helper</RootNamespace>
|
||||
<AssemblyName>SabreTools.Helper</AssemblyName>
|
||||
<TargetFrameworkVersion>v4.5.2</TargetFrameworkVersion>
|
||||
<FileAlignment>512</FileAlignment>
|
||||
<NuGetPackageImportStamp>
|
||||
</NuGetPackageImportStamp>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<DebugType>full</DebugType>
|
||||
<Optimize>false</Optimize>
|
||||
<OutputPath>..\..\Debug\</OutputPath>
|
||||
<DefineConstants>DEBUG;TRACE</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<DebugType>pdbonly</DebugType>
|
||||
<Optimize>true</Optimize>
|
||||
<OutputPath>..\..\Release\</OutputPath>
|
||||
<DefineConstants>TRACE</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<OutputPath>..\..\Debug-x64</OutputPath>
|
||||
<DefineConstants>DEBUG;TRACE</DefineConstants>
|
||||
<DebugType>full</DebugType>
|
||||
<PlatformTarget>x64</PlatformTarget>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
|
||||
<OutputPath>..\..\Release-x64</OutputPath>
|
||||
<DefineConstants>TRACE</DefineConstants>
|
||||
<Optimize>true</Optimize>
|
||||
<DebugType>
|
||||
</DebugType>
|
||||
<PlatformTarget>x64</PlatformTarget>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Mono|AnyCPU'">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<OutputPath>..\..\Debug-mono</OutputPath>
|
||||
<DefineConstants>TRACE;DEBUG;MONO</DefineConstants>
|
||||
<DebugType>full</DebugType>
|
||||
<PlatformTarget>x64</PlatformTarget>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Mono|x64'">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<OutputPath>..\..\Debug-mono-x64</OutputPath>
|
||||
<DefineConstants>TRACE;DEBUG;MONO</DefineConstants>
|
||||
<DebugType>full</DebugType>
|
||||
<PlatformTarget>x64</PlatformTarget>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<RunPostBuildEvent>OnBuildSuccess</RunPostBuildEvent>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Reference Include="Mono.Data.Sqlite, Version=2.0.0.0, Culture=neutral, PublicKeyToken=0738eb9f132ed756, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\lib\net4\Mono.Data.Sqlite.dll</HintPath>
|
||||
<Private>True</Private>
|
||||
</Reference>
|
||||
<Reference Include="SevenZipSharp, Version=0.64.3890.29348, Culture=neutral, PublicKeyToken=20de82c62b055c88, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\SevenZipSharp.0.64\lib\SevenZipSharp.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="SharpCompress, Version=0.15.2.0, Culture=neutral, PublicKeyToken=afb0a02973931d96, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\SharpCompress.0.15.2\lib\net45\SharpCompress.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System" />
|
||||
<Reference Include="System.ComponentModel.DataAnnotations" />
|
||||
<Reference Include="System.Core" />
|
||||
<Reference Include="System.Data.Portable, Version=4.0.0.0, Culture=neutral, PublicKeyToken=59e704a76bc4613a, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\lib\net4\System.Data.Portable.dll</HintPath>
|
||||
<Private>True</Private>
|
||||
</Reference>
|
||||
<Reference Include="System.IO.Compression" />
|
||||
<Reference Include="System.IO.Compression.FileSystem" />
|
||||
<Reference Include="System.Transactions" />
|
||||
<Reference Include="System.Transactions.Portable, Version=4.0.0.0, Culture=neutral, PublicKeyToken=59e704a76bc4613a, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\lib\net4\System.Transactions.Portable.dll</HintPath>
|
||||
<Private>True</Private>
|
||||
</Reference>
|
||||
<Reference Include="System.ValueTuple, Version=4.0.1.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\System.ValueTuple.4.3.0\lib\netstandard1.0\System.ValueTuple.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Web" />
|
||||
<Reference Include="System.Xml.Linq" />
|
||||
<Reference Include="System.Data.DataSetExtensions" />
|
||||
<Reference Include="Microsoft.CSharp" />
|
||||
<Reference Include="System.Data" />
|
||||
<Reference Include="System.Net.Http" />
|
||||
<Reference Include="System.Xml" />
|
||||
<Reference Include="AlphaFS">
|
||||
<HintPath>..\packages\AlphaFS.2.1.2\lib\net452\AlphaFS.dll</HintPath>
|
||||
</Reference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="Data\Constants.cs" />
|
||||
<Compile Include="Data\Flags.cs" />
|
||||
<Compile Include="Data\Globals.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Manipulate.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.ConvertUpdate.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.DFD.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Parsers.cs" />
|
||||
<Compile Include="Dats\Filter.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Rebuild.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Splitters.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Statistics.cs" />
|
||||
<Compile Include="Dats\Partials\DatFile.Writers.cs" />
|
||||
<Compile Include="External\CoreRarArchive.cs" />
|
||||
<Compile Include="External\NaturalSort\NaturalComparer.cs" />
|
||||
<Compile Include="External\NaturalSort\NaturalReversedComparer.cs" />
|
||||
<Compile Include="External\OptimizedCRC.cs" />
|
||||
<Compile Include="External\Traverse.cs" />
|
||||
<Compile Include="External\xxHash\xxHash.cs" />
|
||||
<Compile Include="External\Zlib\CRC32.cs" />
|
||||
<Compile Include="External\Zlib\Deflate.cs" />
|
||||
<Compile Include="External\Zlib\DeflateStream.cs" />
|
||||
<Compile Include="External\Zlib\GZipStream.cs" />
|
||||
<Compile Include="External\Zlib\Inflate.cs" />
|
||||
<Compile Include="External\Zlib\InfTree.cs" />
|
||||
<Compile Include="External\Zlib\ParallelDeflateOutputStream.cs" />
|
||||
<Compile Include="External\Zlib\Tree.cs" />
|
||||
<Compile Include="External\Zlib\Zlib.cs" />
|
||||
<Compile Include="External\Zlib\ZlibBaseStream.cs" />
|
||||
<Compile Include="External\Zlib\ZlibCodec.cs" />
|
||||
<Compile Include="External\Zlib\ZlibConstants.cs" />
|
||||
<Compile Include="External\Zlib\ZlibStream.cs" />
|
||||
<Compile Include="Dats\Archive.cs" />
|
||||
<Compile Include="Dats\BiosSet.cs" />
|
||||
<Compile Include="Dats\DatFile.cs" />
|
||||
<Compile Include="Dats\DatItem.cs" />
|
||||
<Compile Include="Dats\Disk.cs" />
|
||||
<Compile Include="Dats\Machine.cs" />
|
||||
<Compile Include="Dats\Release.cs" />
|
||||
<Compile Include="Dats\Sample.cs" />
|
||||
<Compile Include="Dats\Rom.cs" />
|
||||
<Compile Include="External\SupportedFiles\ZipFileEntry.cs" />
|
||||
<Compile Include="External\SupportedFiles\ZipFile.cs" />
|
||||
<Compile Include="Help\Feature.cs" />
|
||||
<Compile Include="Help\Help.cs" />
|
||||
<Compile Include="Skippers\Skipper.cs" />
|
||||
<Compile Include="Skippers\SkipperRule.cs" />
|
||||
<Compile Include="Tools\ArchiveTools.cs" />
|
||||
<Compile Include="Tools\FileTools.cs" />
|
||||
<Compile Include="Tools\DatabaseTools.cs" />
|
||||
<Compile Include="Data\Enums.cs" />
|
||||
<Compile Include="Logger.cs" />
|
||||
<Compile Include="Properties\AssemblyInfo.cs" />
|
||||
<Compile Include="Tools\Style.cs" />
|
||||
<Compile Include="Data\Build.cs" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="Licenses\LICENSE">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="packages.config" />
|
||||
<None Include="README.1ST">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="README.DEPRECIATED" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="Licenses\LICENSE.alphafs.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="Licenses\LICENSE.jzlib.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="Licenses\LICENSE.sevenzipsharp.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="Licenses\LICENSE.sharpcompress.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="Licenses\LICENSE.zlib.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<Content Include="Skippers\*.xml">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="*.dll">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
<ItemGroup />
|
||||
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
|
||||
<Import Project="..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\tools\Mono.Data.Sqlite.Portable.targets" Condition="Exists('..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\tools\Mono.Data.Sqlite.Portable.targets')" />
|
||||
<Target Name="EnsureMonoDataSqlitePortableImported" BeforeTargets="BeforeBuild" Condition="'$(MonoDataSqlitePortableImported)' == ''">
|
||||
<Error Condition="!Exists('..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\tools\Mono.Data.Sqlite.Portable.targets')" Text="This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them." />
|
||||
<Error Condition="Exists('..\packages\Mono.Data.Sqlite.Portable.1.0.3.5\tools\Mono.Data.Sqlite.Portable.targets')" Text="The build restored NuGet packages. Build the project again to include these packages in the build." />
|
||||
</Target>
|
||||
<PropertyGroup>
|
||||
<PostBuildEvent>
|
||||
</PostBuildEvent>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<PreBuildEvent>copy "$(SolutionDir)$(ProjectName)\$(PlatformName)\*.dll" "$(SolutionDir)$(ProjectName)" || cp "$(SolutionDir)$(ProjectName)\$(PlatformName)\*.dll" "$(SolutionDir)$(ProjectName)"</PreBuildEvent>
|
||||
</PropertyGroup>
|
||||
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
|
||||
Other similar extension points exist, see Microsoft.Common.targets.
|
||||
<Target Name="BeforeBuild">
|
||||
</Target>
|
||||
<Target Name="AfterBuild">
|
||||
</Target>
|
||||
-->
|
||||
</Project>
|
||||
546
SabreTools.Library/Skippers/Skipper.cs
Normal file
546
SabreTools.Library/Skippers/Skipper.cs
Normal file
@@ -0,0 +1,546 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Xml;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using BinaryReader = System.IO.BinaryReader;
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
using SeekOrigin = System.IO.SeekOrigin;
|
||||
using Stream = System.IO.Stream;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Skippers
|
||||
{
|
||||
public class Skipper
|
||||
{
|
||||
#region Fields
|
||||
|
||||
public string Name;
|
||||
public string Author;
|
||||
public string Version;
|
||||
public List<SkipperRule> Rules;
|
||||
public string SourceFile;
|
||||
|
||||
// Local paths
|
||||
public static string LocalPath = Path.Combine(Globals.ExeDir, "Skippers") + Path.DirectorySeparatorChar;
|
||||
|
||||
// Header skippers represented by a list of skipper objects
|
||||
private static List<Skipper> _list;
|
||||
public static List<Skipper> List
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_list == null || _list.Count == 0)
|
||||
{
|
||||
PopulateSkippers();
|
||||
}
|
||||
return _list;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create an empty Skipper object
|
||||
/// </summary>
|
||||
public Skipper()
|
||||
{
|
||||
Name = "";
|
||||
Author = "";
|
||||
Version = "";
|
||||
Rules = new List<SkipperRule>();
|
||||
SourceFile = "";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a Skipper object parsed from an input file
|
||||
/// </summary>
|
||||
/// <param name="filename">Name of the file to parse</param>
|
||||
public Skipper(string filename)
|
||||
{
|
||||
Rules = new List<SkipperRule>();
|
||||
SourceFile = Path.GetFileNameWithoutExtension(filename);
|
||||
|
||||
Logger logger = new Logger();
|
||||
XmlReader xtr = FileTools.GetXmlTextReader(filename);
|
||||
|
||||
if (xtr == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
bool valid = false;
|
||||
xtr.MoveToContent();
|
||||
while (!xtr.EOF)
|
||||
{
|
||||
if (xtr.NodeType != XmlNodeType.Element)
|
||||
{
|
||||
xtr.Read();
|
||||
}
|
||||
|
||||
switch (xtr.Name.ToLowerInvariant())
|
||||
{
|
||||
case "detector":
|
||||
valid = true;
|
||||
xtr.Read();
|
||||
break;
|
||||
case "name":
|
||||
Name = xtr.ReadElementContentAsString();
|
||||
break;
|
||||
case "author":
|
||||
Author = xtr.ReadElementContentAsString();
|
||||
break;
|
||||
case "version":
|
||||
Version = xtr.ReadElementContentAsString();
|
||||
break;
|
||||
case "rule":
|
||||
// Get the information from the rule first
|
||||
SkipperRule rule = new SkipperRule
|
||||
{
|
||||
StartOffset = 0,
|
||||
EndOffset = 0,
|
||||
Operation = HeaderSkipOperation.None,
|
||||
Tests = new List<SkipperTest>(),
|
||||
SourceFile = Path.GetFileNameWithoutExtension(filename),
|
||||
};
|
||||
|
||||
if (xtr.GetAttribute("start_offset") != null)
|
||||
{
|
||||
string offset = xtr.GetAttribute("start_offset");
|
||||
if (offset.ToLowerInvariant() == "eof")
|
||||
{
|
||||
rule.StartOffset = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
rule.StartOffset = Convert.ToInt64(offset, 16);
|
||||
}
|
||||
}
|
||||
if (xtr.GetAttribute("end_offset") != null)
|
||||
{
|
||||
string offset = xtr.GetAttribute("end_offset");
|
||||
if (offset.ToLowerInvariant() == "eof")
|
||||
{
|
||||
rule.EndOffset = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
rule.EndOffset = Convert.ToInt64(offset, 16);
|
||||
}
|
||||
}
|
||||
if (xtr.GetAttribute("operation") != null)
|
||||
{
|
||||
string operation = xtr.GetAttribute("operation");
|
||||
switch (operation.ToLowerInvariant())
|
||||
{
|
||||
case "bitswap":
|
||||
rule.Operation = HeaderSkipOperation.Bitswap;
|
||||
break;
|
||||
case "byteswap":
|
||||
rule.Operation = HeaderSkipOperation.Byteswap;
|
||||
break;
|
||||
case "wordswap":
|
||||
rule.Operation = HeaderSkipOperation.Wordswap;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Now read the individual tests into the Rule
|
||||
XmlReader subreader = xtr.ReadSubtree();
|
||||
|
||||
if (subreader != null)
|
||||
{
|
||||
while (!subreader.EOF)
|
||||
{
|
||||
if (subreader.NodeType != XmlNodeType.Element)
|
||||
{
|
||||
subreader.Read();
|
||||
}
|
||||
|
||||
// Get the test type
|
||||
SkipperTest test = new SkipperTest
|
||||
{
|
||||
Offset = 0,
|
||||
Value = new byte[0],
|
||||
Result = true,
|
||||
Mask = new byte[0],
|
||||
Size = 0,
|
||||
Operator = HeaderSkipTestFileOperator.Equal,
|
||||
};
|
||||
switch (subreader.Name.ToLowerInvariant())
|
||||
{
|
||||
case "data":
|
||||
test.Type = HeaderSkipTest.Data;
|
||||
break;
|
||||
case "or":
|
||||
test.Type = HeaderSkipTest.Or;
|
||||
break;
|
||||
case "xor":
|
||||
test.Type = HeaderSkipTest.Xor;
|
||||
break;
|
||||
case "and":
|
||||
test.Type = HeaderSkipTest.And;
|
||||
break;
|
||||
case "file":
|
||||
test.Type = HeaderSkipTest.File;
|
||||
break;
|
||||
default:
|
||||
subreader.Read();
|
||||
break;
|
||||
}
|
||||
|
||||
// Now populate all the parts that we can
|
||||
if (subreader.GetAttribute("offset") != null)
|
||||
{
|
||||
string offset = subreader.GetAttribute("offset");
|
||||
if (offset.ToLowerInvariant() == "eof")
|
||||
{
|
||||
test.Offset = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
test.Offset = Convert.ToInt64(offset, 16);
|
||||
}
|
||||
}
|
||||
if (subreader.GetAttribute("value") != null)
|
||||
{
|
||||
string value = subreader.GetAttribute("value");
|
||||
|
||||
// http://stackoverflow.com/questions/321370/how-can-i-convert-a-hex-string-to-a-byte-array
|
||||
test.Value = new byte[value.Length / 2];
|
||||
for (int index = 0; index < test.Value.Length; index++)
|
||||
{
|
||||
string byteValue = value.Substring(index * 2, 2);
|
||||
test.Value[index] = byte.Parse(byteValue, NumberStyles.HexNumber, CultureInfo.InvariantCulture);
|
||||
}
|
||||
}
|
||||
if (subreader.GetAttribute("result") != null)
|
||||
{
|
||||
string result = subreader.GetAttribute("result");
|
||||
switch (result.ToLowerInvariant())
|
||||
{
|
||||
case "false":
|
||||
test.Result = false;
|
||||
break;
|
||||
case "true":
|
||||
default:
|
||||
test.Result = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (subreader.GetAttribute("mask") != null)
|
||||
{
|
||||
string mask = subreader.GetAttribute("mask");
|
||||
|
||||
// http://stackoverflow.com/questions/321370/how-can-i-convert-a-hex-string-to-a-byte-array
|
||||
test.Mask = new byte[mask.Length / 2];
|
||||
for (int index = 0; index < test.Mask.Length; index++)
|
||||
{
|
||||
string byteValue = mask.Substring(index * 2, 2);
|
||||
test.Mask[index] = byte.Parse(byteValue, NumberStyles.HexNumber, CultureInfo.InvariantCulture);
|
||||
}
|
||||
}
|
||||
if (subreader.GetAttribute("size") != null)
|
||||
{
|
||||
string size = subreader.GetAttribute("size");
|
||||
if (size.ToLowerInvariant() == "po2")
|
||||
{
|
||||
test.Size = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
test.Size = Convert.ToInt64(size, 16);
|
||||
}
|
||||
}
|
||||
if (subreader.GetAttribute("operator") != null)
|
||||
{
|
||||
string oper = subreader.GetAttribute("operator");
|
||||
switch (oper.ToLowerInvariant())
|
||||
{
|
||||
case "less":
|
||||
test.Operator = HeaderSkipTestFileOperator.Less;
|
||||
break;
|
||||
case "greater":
|
||||
test.Operator = HeaderSkipTestFileOperator.Greater;
|
||||
break;
|
||||
case "equal":
|
||||
default:
|
||||
test.Operator = HeaderSkipTestFileOperator.Equal;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Add the created test to the rule
|
||||
rule.Tests.Add(test);
|
||||
subreader.Read();
|
||||
}
|
||||
}
|
||||
|
||||
// Add the created rule to the skipper
|
||||
Rules.Add(rule);
|
||||
xtr.Skip();
|
||||
break;
|
||||
default:
|
||||
xtr.Read();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If we somehow have an invalid file, zero out the fields
|
||||
if (!valid)
|
||||
{
|
||||
Name = null;
|
||||
Author = null;
|
||||
Version = null;
|
||||
Rules = null;
|
||||
SourceFile = null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Static Methods
|
||||
|
||||
/// <summary>
|
||||
/// Populate the entire list of header Skippers
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// http://mamedev.emulab.it/clrmamepro/docs/xmlheaders.txt
|
||||
/// http://www.emulab.it/forum/index.php?topic=127.0
|
||||
/// </remarks>
|
||||
private static void PopulateSkippers()
|
||||
{
|
||||
if (_list == null)
|
||||
{
|
||||
_list = new List<Skipper>();
|
||||
}
|
||||
|
||||
foreach (string skipperFile in Directory.EnumerateFiles(LocalPath, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
_list.Add(new Skipper(Path.GetFullPath(skipperFile)));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the SkipperRule associated with a given file
|
||||
/// </summary>
|
||||
/// <param name="input">Name of the file to be checked</param>
|
||||
/// <param name="skipperName">Name of the skipper to be used, blank to find a matching skipper</param>
|
||||
/// <param name="logger">Logger object for file and console output</param>
|
||||
/// <returns>The SkipperRule that matched the file</returns>
|
||||
public static SkipperRule GetMatchingRule(string input, string skipperName)
|
||||
{
|
||||
// If the file doesn't exist, return a blank skipper rule
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
Globals.Logger.Error("The file '" + input + "' does not exist so it cannot be tested");
|
||||
return new SkipperRule();
|
||||
}
|
||||
|
||||
return GetMatchingRule(FileTools.TryOpenRead(input), skipperName);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the SkipperRule associated with a given stream
|
||||
/// </summary>
|
||||
/// <param name="input">Name of the file to be checked</param>
|
||||
/// <param name="skipperName">Name of the skipper to be used, blank to find a matching skipper</param>
|
||||
/// <param name="logger">Logger object for file and console output</param>
|
||||
/// <param name="keepOpen">True if the underlying stream should be kept open, false otherwise</param>
|
||||
/// <returns>The SkipperRule that matched the file</returns>
|
||||
public static SkipperRule GetMatchingRule(Stream input, string skipperName, bool keepOpen = false)
|
||||
{
|
||||
SkipperRule skipperRule = new SkipperRule();
|
||||
|
||||
// If we have a null skipper name, we return since we're not matching skippers
|
||||
if (skipperName == null)
|
||||
{
|
||||
return skipperRule;
|
||||
}
|
||||
|
||||
// Loop through and find a Skipper that has the right name
|
||||
Globals.Logger.Verbose("Beginning search for matching header skip rules");
|
||||
List<Skipper> tempList = new List<Skipper>();
|
||||
tempList.AddRange(List);
|
||||
|
||||
foreach (Skipper skipper in tempList)
|
||||
{
|
||||
// If we're searching for the skipper OR we have a match to an inputted one
|
||||
if (String.IsNullOrEmpty(skipperName)
|
||||
|| (!String.IsNullOrEmpty(skipper.Name) && skipperName.ToLowerInvariant() == skipper.Name.ToLowerInvariant())
|
||||
|| (!String.IsNullOrEmpty(skipper.Name) && skipperName.ToLowerInvariant() == skipper.SourceFile.ToLowerInvariant()))
|
||||
{
|
||||
// Loop through the rules until one is found that works
|
||||
BinaryReader br = new BinaryReader(input);
|
||||
|
||||
foreach (SkipperRule rule in skipper.Rules)
|
||||
{
|
||||
// Always reset the stream back to the original place
|
||||
input.Seek(0, SeekOrigin.Begin);
|
||||
|
||||
// For each rule, make sure it passes each test
|
||||
bool success = true;
|
||||
foreach (SkipperTest test in rule.Tests)
|
||||
{
|
||||
bool result = true;
|
||||
switch (test.Type)
|
||||
{
|
||||
case HeaderSkipTest.Data:
|
||||
// First seek to the correct position
|
||||
if (test.Offset == null)
|
||||
{
|
||||
input.Seek(0, SeekOrigin.End);
|
||||
}
|
||||
else if (test.Offset > 0 && test.Offset <= input.Length)
|
||||
{
|
||||
input.Seek((long)test.Offset, SeekOrigin.Begin);
|
||||
}
|
||||
else if (test.Offset < 0 && Math.Abs((long)test.Offset) <= input.Length)
|
||||
{
|
||||
input.Seek((long)test.Offset, SeekOrigin.End);
|
||||
}
|
||||
|
||||
// Then read and compare bytewise
|
||||
result = true;
|
||||
for (int i = 0; i < test.Value.Length; i++)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (br.ReadByte() != test.Value[i])
|
||||
{
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Return if the expected and actual results match
|
||||
success &= (result == test.Result);
|
||||
break;
|
||||
case HeaderSkipTest.Or:
|
||||
case HeaderSkipTest.Xor:
|
||||
case HeaderSkipTest.And:
|
||||
// First seek to the correct position
|
||||
if (test.Offset == null)
|
||||
{
|
||||
input.Seek(0, SeekOrigin.End);
|
||||
}
|
||||
else if (test.Offset > 0 && test.Offset <= input.Length)
|
||||
{
|
||||
input.Seek((long)test.Offset, SeekOrigin.Begin);
|
||||
}
|
||||
else if (test.Offset < 0 && Math.Abs((long)test.Offset) <= input.Length)
|
||||
{
|
||||
input.Seek((long)test.Offset, SeekOrigin.End);
|
||||
}
|
||||
|
||||
result = true;
|
||||
try
|
||||
{
|
||||
// Then apply the mask if it exists
|
||||
byte[] read = br.ReadBytes(test.Mask.Length);
|
||||
byte[] masked = new byte[test.Mask.Length];
|
||||
for (int i = 0; i < read.Length; i++)
|
||||
{
|
||||
masked[i] = (byte)(test.Type == HeaderSkipTest.And ? read[i] & test.Mask[i] :
|
||||
(test.Type == HeaderSkipTest.Or ? read[i] | test.Mask[i] : read[i] ^ test.Mask[i])
|
||||
);
|
||||
}
|
||||
|
||||
// Finally, compare it against the value
|
||||
for (int i = 0; i < test.Value.Length; i++)
|
||||
{
|
||||
if (masked[i] != test.Value[i])
|
||||
{
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
result = false;
|
||||
}
|
||||
|
||||
// Return if the expected and actual results match
|
||||
success &= (result == test.Result);
|
||||
break;
|
||||
case HeaderSkipTest.File:
|
||||
// First get the file size from stream
|
||||
long size = input.Length;
|
||||
|
||||
// If we have a null size, check that the size is a power of 2
|
||||
result = true;
|
||||
if (test.Size == null)
|
||||
{
|
||||
// http://stackoverflow.com/questions/600293/how-to-check-if-a-number-is-a-power-of-2
|
||||
result = (((ulong)size & ((ulong)size - 1)) == 0);
|
||||
}
|
||||
else if (test.Operator == HeaderSkipTestFileOperator.Less)
|
||||
{
|
||||
result = (size < test.Size);
|
||||
}
|
||||
else if (test.Operator == HeaderSkipTestFileOperator.Greater)
|
||||
{
|
||||
result = (size > test.Size);
|
||||
}
|
||||
else if (test.Operator == HeaderSkipTestFileOperator.Equal)
|
||||
{
|
||||
result = (size == test.Size);
|
||||
}
|
||||
|
||||
// Return if the expected and actual results match
|
||||
success &= (result == test.Result);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If we still have a success, then return this rule
|
||||
if (success)
|
||||
{
|
||||
// If we're not keeping the stream open, dispose of the binary reader
|
||||
if (!keepOpen)
|
||||
{
|
||||
input.Dispose();
|
||||
}
|
||||
|
||||
Globals.Logger.User(" Matching rule found!");
|
||||
return rule;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we're not keeping the stream open, dispose of the binary reader
|
||||
if (!keepOpen)
|
||||
{
|
||||
input.Dispose();
|
||||
}
|
||||
|
||||
// If we have a blank rule, inform the user
|
||||
if (skipperRule.Tests == null)
|
||||
{
|
||||
Globals.Logger.Verbose("No matching rule found!");
|
||||
}
|
||||
|
||||
return skipperRule;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
215
SabreTools.Library/Skippers/SkipperRule.cs
Normal file
215
SabreTools.Library/Skippers/SkipperRule.cs
Normal file
@@ -0,0 +1,215 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Tools;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using BinaryReader = System.IO.BinaryReader;
|
||||
using BinaryWriter = System.IO.BinaryWriter;
|
||||
using SeekOrigin = System.IO.SeekOrigin;
|
||||
using Stream = System.IO.Stream;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Skippers
|
||||
{
|
||||
public class SkipperRule
|
||||
{
|
||||
// Public variables
|
||||
public long? StartOffset; // null is EOF
|
||||
public long? EndOffset; // null if EOF
|
||||
public HeaderSkipOperation Operation;
|
||||
public List<SkipperTest> Tests;
|
||||
public string SourceFile;
|
||||
|
||||
/// <summary>
|
||||
/// Transform an input file using the given rule
|
||||
/// </summary>
|
||||
/// <param name="input">Input file name</param>
|
||||
/// <param name="output">Output file name</param>
|
||||
/// <returns>True if the file was transformed properly, false otherwise</returns>
|
||||
public bool TransformFile(string input, string output)
|
||||
{
|
||||
bool success = true;
|
||||
|
||||
// If the input file doesn't exist, fail
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
Globals.Logger.Error("I'm sorry but '" + input + "' doesn't exist!");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create the output directory if it doesn't already
|
||||
if (!Directory.Exists(Path.GetDirectoryName(output)))
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(output));
|
||||
}
|
||||
|
||||
Globals.Logger.User("Attempting to apply rule to '" + input + "'");
|
||||
success = TransformStream(FileTools.TryOpenRead(input), FileTools.TryCreate(output));
|
||||
|
||||
// If the output file has size 0, delete it
|
||||
if (new FileInfo(output).Length == 0)
|
||||
{
|
||||
FileTools.TryDeleteFile(output);
|
||||
success = false;
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Transform an input stream using the given rule
|
||||
/// </summary>
|
||||
/// <param name="input">Input stream</param>
|
||||
/// <param name="output">Output stream</param>
|
||||
/// <param name="keepReadOpen">True if the underlying read stream should be kept open, false otherwise</param>
|
||||
/// <param name="keepWriteOpen">True if the underlying write stream should be kept open, false otherwise</param>
|
||||
/// <returns>True if the file was transformed properly, false otherwise</returns>
|
||||
public bool TransformStream(Stream input, Stream output, bool keepReadOpen = false, bool keepWriteOpen = false)
|
||||
{
|
||||
bool success = true;
|
||||
|
||||
// If the sizes are wrong for the values, fail
|
||||
long extsize = input.Length;
|
||||
if ((Operation > HeaderSkipOperation.Bitswap && (extsize % 2) != 0)
|
||||
|| (Operation > HeaderSkipOperation.Byteswap && (extsize % 4) != 0)
|
||||
|| (Operation > HeaderSkipOperation.Bitswap && (StartOffset == null || StartOffset % 2 == 0)))
|
||||
{
|
||||
Globals.Logger.Error("The stream did not have the correct size to be transformed!");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Now read the proper part of the file and apply the rule
|
||||
BinaryWriter bw = null;
|
||||
BinaryReader br = null;
|
||||
try
|
||||
{
|
||||
Globals.Logger.User("Applying found rule to input stream");
|
||||
bw = new BinaryWriter(output);
|
||||
br = new BinaryReader(input);
|
||||
|
||||
// Seek to the beginning offset
|
||||
if (StartOffset == null)
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
else if (Math.Abs((long)StartOffset) > input.Length)
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
else if (StartOffset > 0)
|
||||
{
|
||||
input.Seek((long)StartOffset, SeekOrigin.Begin);
|
||||
}
|
||||
else if (StartOffset < 0)
|
||||
{
|
||||
input.Seek((long)StartOffset, SeekOrigin.End);
|
||||
}
|
||||
|
||||
// Then read and apply the operation as you go
|
||||
if (success)
|
||||
{
|
||||
byte[] buffer = new byte[4];
|
||||
int pos = 0;
|
||||
while (input.Position < (EndOffset ?? input.Length)
|
||||
&& input.Position < input.Length)
|
||||
{
|
||||
byte b = br.ReadByte();
|
||||
switch (Operation)
|
||||
{
|
||||
case HeaderSkipOperation.Bitswap:
|
||||
// http://stackoverflow.com/questions/3587826/is-there-a-built-in-function-to-reverse-bit-order
|
||||
uint r = b;
|
||||
int s = 7;
|
||||
for (b >>= 1; b != 0; b >>= 1)
|
||||
{
|
||||
r <<= 1;
|
||||
r |= (byte)(b & 1);
|
||||
s--;
|
||||
}
|
||||
r <<= s;
|
||||
buffer[pos] = (byte)r;
|
||||
break;
|
||||
case HeaderSkipOperation.Byteswap:
|
||||
if (pos % 2 == 1)
|
||||
{
|
||||
buffer[pos - 1] = b;
|
||||
}
|
||||
if (pos % 2 == 0)
|
||||
{
|
||||
buffer[pos + 1] = b;
|
||||
}
|
||||
break;
|
||||
case HeaderSkipOperation.Wordswap:
|
||||
buffer[3 - pos] = b;
|
||||
break;
|
||||
case HeaderSkipOperation.WordByteswap:
|
||||
buffer[(pos + 2) % 4] = b;
|
||||
break;
|
||||
case HeaderSkipOperation.None:
|
||||
default:
|
||||
buffer[pos] = b;
|
||||
break;
|
||||
}
|
||||
|
||||
// Set the buffer position to default write to
|
||||
pos = (pos + 1) % 4;
|
||||
|
||||
// If we filled a buffer, flush to the stream
|
||||
if (pos == 0)
|
||||
{
|
||||
bw.Write(buffer);
|
||||
bw.Flush();
|
||||
buffer = new byte[4];
|
||||
}
|
||||
}
|
||||
// If there's anything more in the buffer, write only the left bits
|
||||
for (int i = 0; i < pos; i++)
|
||||
{
|
||||
bw.Write(buffer[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// If we're not keeping the read stream open, dispose of the binary reader
|
||||
if (!keepReadOpen)
|
||||
{
|
||||
br?.Dispose();
|
||||
}
|
||||
|
||||
// If we're not keeping the write stream open, dispose of the binary reader
|
||||
if (!keepWriteOpen)
|
||||
{
|
||||
bw?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Intermediate class for storing Skipper Test information
|
||||
/// </summary>
|
||||
public struct SkipperTest
|
||||
{
|
||||
public HeaderSkipTest Type;
|
||||
public long? Offset; // null is EOF
|
||||
public byte[] Value;
|
||||
public bool Result;
|
||||
public byte[] Mask;
|
||||
public long? Size; // null is PO2, "power of 2" filesize
|
||||
public HeaderSkipTestFileOperator Operator;
|
||||
}
|
||||
}
|
||||
17
SabreTools.Library/Skippers/a7800.xml
Normal file
17
SabreTools.Library/Skippers/a7800.xml
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Atari 7800</name>
|
||||
<author>Roman Scherzer</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<rule start_offset="80" end_offset="EOF" operation="none">
|
||||
<data offset="1" value="415441524937383030" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="80" end_offset="EOF" operation="none">
|
||||
<data offset="64" value="41435455414C20434152542044415441205354415254532048455245" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
25
SabreTools.Library/Skippers/fds.xml
Normal file
25
SabreTools.Library/Skippers/fds.xml
Normal file
@@ -0,0 +1,25 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>fds</name>
|
||||
<author>Yori Yoshizuki</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<rule start_offset="10">
|
||||
<data offset="0" value="4644531A010000000000000000000000"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="10">
|
||||
<data offset="0" value="4644531A020000000000000000000000"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="10">
|
||||
<data offset="0" value="4644531A030000000000000000000000"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="10">
|
||||
<data offset="0" value="4644531A040000000000000000000000"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
17
SabreTools.Library/Skippers/lynx.xml
Normal file
17
SabreTools.Library/Skippers/lynx.xml
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Atari Lynx</name>
|
||||
<author>Roman Scherzer</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<rule start_offset="40" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="4C594E58" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="40" end_offset="EOF" operation="none">
|
||||
<data offset="6" value="425339" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
24
SabreTools.Library/Skippers/n64.xml
Normal file
24
SabreTools.Library/Skippers/n64.xml
Normal file
@@ -0,0 +1,24 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Nintendo 64 - ABCD</name>
|
||||
<author>CUE</author>
|
||||
<version>1.1</version>
|
||||
|
||||
<!-- V64 format -->
|
||||
<rule start_offset="0" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="80371240" result="true"/>
|
||||
</rule>
|
||||
|
||||
<!-- Z64 format -->
|
||||
<rule start_offset="0" end_offset="EOF" operation="byteswap">
|
||||
<data offset="0" value="37804012" result="true"/>
|
||||
</rule>
|
||||
|
||||
<!-- N64 format? -->
|
||||
<rule start_offset="0" end_offset="EOF" operation="wordswap">
|
||||
<data offset="0" value="40123780" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
13
SabreTools.Library/Skippers/nes.xml
Normal file
13
SabreTools.Library/Skippers/nes.xml
Normal file
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Nintendo Famicon/NES</name>
|
||||
<author>Roman Scherzer</author>
|
||||
<version>1.1</version>
|
||||
|
||||
<rule start_offset="10" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="4E45531A" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
13
SabreTools.Library/Skippers/pce.xml
Normal file
13
SabreTools.Library/Skippers/pce.xml
Normal file
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>NEC TurboGrafx-16/PC-Engine</name>
|
||||
<author>Matt Nadareski (darksabre76)</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<rule start_offset="200">
|
||||
<data offset="0" value="4000000000000000AABB02"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
29
SabreTools.Library/Skippers/psid.xml
Normal file
29
SabreTools.Library/Skippers/psid.xml
Normal file
@@ -0,0 +1,29 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>psid</name>
|
||||
<author>Yori Yoshizuki</author>
|
||||
<version>1.2</version>
|
||||
|
||||
<rule start_offset="76" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="5053494400010076" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="76" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="505349440003007c" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="7c" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="505349440002007c" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="7c" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="505349440001007c" result="true"/>
|
||||
</rule>
|
||||
|
||||
<rule start_offset="7c" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="525349440002007c" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
24
SabreTools.Library/Skippers/snes.xml
Normal file
24
SabreTools.Library/Skippers/snes.xml
Normal file
@@ -0,0 +1,24 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Nintendo Super Famicom/SNES</name>
|
||||
<author>Matt Nadareski (darksabre76)</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<!-- fig header -->
|
||||
<rule start_offset="200">
|
||||
<data offset="16" value="0000000000000000"/>
|
||||
</rule>
|
||||
|
||||
<!-- smc header -->
|
||||
<rule start_offset="200">
|
||||
<data offset="16" value="AABB040000000000"/>
|
||||
</rule>
|
||||
|
||||
<!-- ufo header -->
|
||||
<rule start_offset="200">
|
||||
<data offset="16" value="535550455255464F"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
13
SabreTools.Library/Skippers/spc.xml
Normal file
13
SabreTools.Library/Skippers/spc.xml
Normal file
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0"?>
|
||||
|
||||
<detector>
|
||||
|
||||
<name>Nintendo Super Famicon SPC</name>
|
||||
<author>Yori Yoshizuki</author>
|
||||
<version>1.0</version>
|
||||
|
||||
<rule start_offset="00100" end_offset="EOF" operation="none">
|
||||
<data offset="0" value="534E45532D535043" result="true"/>
|
||||
</rule>
|
||||
|
||||
</detector>
|
||||
3255
SabreTools.Library/Tools/ArchiveTools.cs
Normal file
3255
SabreTools.Library/Tools/ArchiveTools.cs
Normal file
File diff suppressed because it is too large
Load Diff
202
SabreTools.Library/Tools/DatabaseTools.cs
Normal file
202
SabreTools.Library/Tools/DatabaseTools.cs
Normal file
@@ -0,0 +1,202 @@
|
||||
using Mono.Data.Sqlite;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Tools
|
||||
{
|
||||
/// <summary>
|
||||
/// All general database operations
|
||||
/// </summary>
|
||||
public static class DatabaseTools
|
||||
{
|
||||
/// <summary>
|
||||
/// Add a header to the database
|
||||
/// </summary>
|
||||
/// <param name="header">String representing the header bytes</param>
|
||||
/// <param name="SHA1">SHA-1 of the deheadered file</param>
|
||||
/// <param name="type">Name of the source skipper file</param>
|
||||
public static void AddHeaderToDatabase(string header, string SHA1, string source)
|
||||
{
|
||||
bool exists = false;
|
||||
|
||||
// Ensure the database exists
|
||||
EnsureDatabase(Constants.HeadererDbSchema, Constants.HeadererFileName, Constants.HeadererConnectionString);
|
||||
|
||||
// Open the database connection
|
||||
SqliteConnection dbc = new SqliteConnection(Constants.HeadererConnectionString);
|
||||
dbc.Open();
|
||||
|
||||
string query = @"SELECT * FROM data WHERE sha1='" + SHA1 + "' AND header='" + header + "'";
|
||||
SqliteCommand slc = new SqliteCommand(query, dbc);
|
||||
SqliteDataReader sldr = slc.ExecuteReader();
|
||||
exists = sldr.HasRows;
|
||||
|
||||
if (!exists)
|
||||
{
|
||||
query = @"INSERT INTO data (sha1, header, type) VALUES ('" +
|
||||
SHA1 + "', " +
|
||||
"'" + header + "', " +
|
||||
"'" + source + "')";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
Globals.Logger.Verbose("Result of inserting header: " + slc.ExecuteNonQuery());
|
||||
}
|
||||
|
||||
// Dispose of database objects
|
||||
slc.Dispose();
|
||||
sldr.Dispose();
|
||||
dbc.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure that the databse exists and has the proper schema
|
||||
/// </summary>
|
||||
/// <param name="type">Schema type to use</param>
|
||||
/// <param name="db">Name of the databse</param>
|
||||
/// <param name="connectionString">Connection string for SQLite</param>
|
||||
public static void EnsureDatabase(string type, string db, string connectionString)
|
||||
{
|
||||
// Set the type to lowercase
|
||||
type = type.ToLowerInvariant();
|
||||
|
||||
// Make sure the file exists
|
||||
if (!File.Exists(db))
|
||||
{
|
||||
SqliteConnection.CreateFile(db);
|
||||
}
|
||||
|
||||
// Open the database connection
|
||||
SqliteConnection dbc = new SqliteConnection(connectionString);
|
||||
dbc.Open();
|
||||
|
||||
// Make sure the database has the correct schema
|
||||
try
|
||||
{
|
||||
if (type == "rombasharp")
|
||||
{
|
||||
string query = @"
|
||||
CREATE TABLE IF NOT EXISTS crc (
|
||||
'crc' TEXT NOT NULL,
|
||||
PRIMARY KEY (crc)
|
||||
)";
|
||||
SqliteCommand slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
|
||||
query = @"
|
||||
CREATE TABLE IF NOT EXISTS md5 (
|
||||
'md5' TEXT NOT NULL,
|
||||
PRIMARY KEY (md5)
|
||||
)";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
|
||||
query = @"
|
||||
CREATE TABLE IF NOT EXISTS sha1 (
|
||||
'sha1' TEXT NOT NULL,
|
||||
'depot' TEXT,
|
||||
PRIMARY KEY (sha1)
|
||||
)";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
|
||||
query = @"
|
||||
CREATE TABLE IF NOT EXISTS crcsha1 (
|
||||
'crc' TEXT NOT NULL,
|
||||
'sha1' TEXT NOT NULL,
|
||||
PRIMARY KEY (crc, sha1)
|
||||
)";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
|
||||
query = @"
|
||||
CREATE TABLE IF NOT EXISTS md5sha1 (
|
||||
'md5' TEXT NOT NULL,
|
||||
'sha1' TEXT NOT NULL,
|
||||
PRIMARY KEY (md5, sha1)
|
||||
)";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
|
||||
query = @"
|
||||
CREATE TABLE IF NOT EXISTS dat (
|
||||
'hash' TEXT NOT NULL,
|
||||
PRIMARY KEY (hash)
|
||||
)";
|
||||
slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
slc.Dispose();
|
||||
}
|
||||
else if (type == "headerer")
|
||||
{
|
||||
string query = @"
|
||||
CREATE TABLE IF NOT EXISTS data (
|
||||
'sha1' TEXT NOT NULL,
|
||||
'header' TEXT NOT NULL,
|
||||
'type' TEXT NOT NULL,
|
||||
PRIMARY KEY (sha1, header, type)
|
||||
)";
|
||||
SqliteCommand slc = new SqliteCommand(query, dbc);
|
||||
slc.ExecuteNonQuery();
|
||||
slc.Dispose();
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
dbc.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve headers from the database
|
||||
/// </summary>
|
||||
/// <param name="SHA1">SHA-1 of the deheadered file</param>
|
||||
/// <returns>List of strings representing the headers to add</returns>
|
||||
public static List<string> RetrieveHeadersFromDatabase(string SHA1)
|
||||
{
|
||||
// Ensure the database exists
|
||||
EnsureDatabase(Constants.HeadererDbSchema, Constants.HeadererFileName, Constants.HeadererConnectionString);
|
||||
|
||||
// Open the database connection
|
||||
SqliteConnection dbc = new SqliteConnection(Constants.HeadererConnectionString);
|
||||
dbc.Open();
|
||||
|
||||
// Create the output list of headers
|
||||
List<string> headers = new List<string>();
|
||||
|
||||
string query = @"SELECT header, type FROM data WHERE sha1='" + SHA1 + "'";
|
||||
SqliteCommand slc = new SqliteCommand(query, dbc);
|
||||
SqliteDataReader sldr = slc.ExecuteReader();
|
||||
|
||||
if (sldr.HasRows)
|
||||
{
|
||||
while (sldr.Read())
|
||||
{
|
||||
Globals.Logger.Verbose("Found match with rom type " + sldr.GetString(1));
|
||||
headers.Add(sldr.GetString(0));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Globals.Logger.Warning("No matching header could be found!");
|
||||
}
|
||||
|
||||
// Dispose of database objects
|
||||
slc.Dispose();
|
||||
sldr.Dispose();
|
||||
dbc.Dispose();
|
||||
|
||||
return headers;
|
||||
}
|
||||
}
|
||||
}
|
||||
899
SabreTools.Library/Tools/FileTools.cs
Normal file
899
SabreTools.Library/Tools/FileTools.cs
Normal file
@@ -0,0 +1,899 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Xml;
|
||||
using System.Xml.Schema;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Dats;
|
||||
using SabreTools.Helper.External;
|
||||
using SabreTools.Helper.Skippers;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using BinaryReader = System.IO.BinaryReader;
|
||||
using BinaryWriter = System.IO.BinaryWriter;
|
||||
using FileAccess = System.IO.FileAccess;
|
||||
using FileMode = System.IO.FileMode;
|
||||
using FileShare = System.IO.FileShare;
|
||||
using FileStream = System.IO.FileStream;
|
||||
using IOException = System.IO.IOException;
|
||||
using MemoryStream = System.IO.MemoryStream;
|
||||
using PathTooLongException = System.IO.PathTooLongException;
|
||||
using SearchOption = System.IO.SearchOption;
|
||||
using SeekOrigin = System.IO.SeekOrigin;
|
||||
using Stream = System.IO.Stream;
|
||||
using StreamReader = System.IO.StreamReader;
|
||||
#endif
|
||||
using NaturalSort;
|
||||
using OCRC;
|
||||
|
||||
namespace SabreTools.Helper.Tools
|
||||
{
|
||||
public static class FileTools
|
||||
{
|
||||
#region File Information
|
||||
|
||||
/// <summary>
|
||||
/// Get what type of DAT the input file is
|
||||
/// </summary>
|
||||
/// <param name="filename">Name of the file to be parsed</param>
|
||||
/// <returns>The DatFormat corresponding to the DAT</returns>
|
||||
/// <remarks>There is currently no differentiation between XML and SabreDAT here</remarks>
|
||||
public static DatFormat GetDatFormat(string filename)
|
||||
{
|
||||
// Limit the output formats based on extension
|
||||
string ext = Path.GetExtension(filename).ToLowerInvariant();
|
||||
if (ext.StartsWith("."))
|
||||
{
|
||||
ext = ext.Substring(1);
|
||||
}
|
||||
if (ext != "csv" && ext != "dat" && ext != "md5" && ext != "sfv" && ext != "sha1"
|
||||
&& ext != "sha384" && ext != "sha512" && ext != "tsv" && ext != "txt" && ext != "xml")
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Read the input file, if possible
|
||||
Globals.Logger.Verbose("Attempting to read file to get format: \"" + filename + "\"");
|
||||
|
||||
// Check if file exists
|
||||
if (!File.Exists(filename))
|
||||
{
|
||||
Globals.Logger.Warning("File '" + filename + "' could not read from!");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Some formats only require the extension to know
|
||||
if (ext == "md5")
|
||||
{
|
||||
return DatFormat.RedumpMD5;
|
||||
}
|
||||
if (ext == "sfv")
|
||||
{
|
||||
return DatFormat.RedumpSFV;
|
||||
}
|
||||
if (ext == "sha1")
|
||||
{
|
||||
return DatFormat.RedumpSHA1;
|
||||
}
|
||||
if (ext == "sha256")
|
||||
{
|
||||
return DatFormat.RedumpSHA256;
|
||||
}
|
||||
if (ext == "sha384")
|
||||
{
|
||||
return DatFormat.RedumpSHA384;
|
||||
}
|
||||
if (ext == "sha512")
|
||||
{
|
||||
return DatFormat.RedumpSHA512;
|
||||
}
|
||||
|
||||
// For everything else, we need to read it
|
||||
try
|
||||
{
|
||||
// Get the first two lines to check
|
||||
StreamReader sr = File.OpenText(filename);
|
||||
string first = sr.ReadLine().ToLowerInvariant();
|
||||
string second = sr.ReadLine().ToLowerInvariant();
|
||||
sr.Dispose();
|
||||
|
||||
// If we have an XML-based DAT
|
||||
if (first.Contains("<?xml") && first.Contains("?>"))
|
||||
{
|
||||
if (second.StartsWith("<!doctype datafile"))
|
||||
{
|
||||
return DatFormat.Logiqx;
|
||||
}
|
||||
else if (second.StartsWith("<!doctype softwarelist"))
|
||||
{
|
||||
return DatFormat.SoftwareList;
|
||||
}
|
||||
else if (second.StartsWith("<!doctype sabredat"))
|
||||
{
|
||||
return DatFormat.SabreDat;
|
||||
}
|
||||
else if (second.StartsWith("<dat") && !second.StartsWith("<datafile"))
|
||||
{
|
||||
return DatFormat.OfflineList;
|
||||
}
|
||||
// Older and non-compliant DATs
|
||||
else
|
||||
{
|
||||
return DatFormat.Logiqx;
|
||||
}
|
||||
}
|
||||
|
||||
// If we have an INI-based DAT
|
||||
else if (first.Contains("[") && first.Contains("]"))
|
||||
{
|
||||
return DatFormat.RomCenter;
|
||||
}
|
||||
|
||||
// If we have a CMP-based DAT
|
||||
else if (first.Contains("clrmamepro"))
|
||||
{
|
||||
return DatFormat.ClrMamePro;
|
||||
}
|
||||
else if (first.Contains("romvault"))
|
||||
{
|
||||
return DatFormat.ClrMamePro;
|
||||
}
|
||||
else if (first.Contains("doscenter"))
|
||||
{
|
||||
return DatFormat.DOSCenter;
|
||||
}
|
||||
else if (first.Contains("#Name;Title;Emulator;CloneOf;Year;Manufacturer;Category;Players;Rotation;Control;Status;DisplayCount;DisplayType;AltRomname;AltTitle;Extra"))
|
||||
{
|
||||
return DatFormat.AttractMode;
|
||||
}
|
||||
else if (first.Contains("\"File Name\",\"Internal Name\",\"Description\""))
|
||||
{
|
||||
return DatFormat.CSV;
|
||||
}
|
||||
else if (first.Contains("\"File Name\"\t\"Internal Name\"\t\"Description\""))
|
||||
{
|
||||
return DatFormat.TSV;
|
||||
}
|
||||
else
|
||||
{
|
||||
return DatFormat.ClrMamePro;
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get all empty folders within a root folder
|
||||
/// </summary>
|
||||
/// <param name="root">Root directory to parse</param>
|
||||
/// <returns>IEumerable containing all directories that are empty, an empty enumerable if the root is empty, null otherwise</returns>
|
||||
public static IEnumerable<string> GetEmptyDirectories(string root)
|
||||
{
|
||||
// Check if the root exists first
|
||||
if (!Directory.Exists(root))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// If it does and it is empty, return a blank enumerable
|
||||
if (Directory.EnumerateFileSystemEntries(root, "*", SearchOption.AllDirectories).Count() == 0)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
// Otherwise, get the complete list
|
||||
return Directory.EnumerateDirectories(root, "*", SearchOption.AllDirectories)
|
||||
.Where(dir => Directory.EnumerateFileSystemEntries(dir, "*", SearchOption.AllDirectories).Count() == 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve file information for a single file
|
||||
/// </summary>
|
||||
/// <param name="input">Filename to get information from</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated (defaults to none)</param>
|
||||
/// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param>
|
||||
/// <param name="date">True if the file Date should be included, false otherwise (default)</param>
|
||||
/// <param name="header">Populated string representing the name of the skipper to use, a blank string to use the first available checker, null otherwise</param>
|
||||
/// <returns>Populated RomData object if success, empty one on error</returns>
|
||||
public static Rom GetFileInfo(string input, Hash omitFromScan = 0x0,
|
||||
long offset = 0, bool date = false, string header = null)
|
||||
{
|
||||
// Add safeguard if file doesn't exist
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
return new Rom();
|
||||
}
|
||||
|
||||
// Get the information from the file stream
|
||||
Rom rom = new Rom();
|
||||
if (header != null)
|
||||
{
|
||||
SkipperRule rule = Skipper.GetMatchingRule(input, Path.GetFileNameWithoutExtension(header));
|
||||
|
||||
// If there's a match, get the new information from the stream
|
||||
if (rule.Tests != null && rule.Tests.Count != 0)
|
||||
{
|
||||
// Create the input and output streams
|
||||
MemoryStream outputStream = new MemoryStream();
|
||||
FileStream inputStream = FileTools.TryOpenRead(input);
|
||||
|
||||
// Transform the stream and get the information from it
|
||||
rule.TransformStream(inputStream, outputStream, keepReadOpen: false, keepWriteOpen: true);
|
||||
rom = GetStreamInfo(outputStream, outputStream.Length, omitFromScan: omitFromScan, keepReadOpen: false);
|
||||
|
||||
// Dispose of the streams
|
||||
outputStream.Dispose();
|
||||
inputStream.Dispose();
|
||||
}
|
||||
// Otherwise, just get the info
|
||||
else
|
||||
{
|
||||
long length = new FileInfo(input).Length;
|
||||
rom = GetStreamInfo(TryOpenRead(input), length, omitFromScan, offset, false);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
long length = new FileInfo(input).Length;
|
||||
rom = GetStreamInfo(TryOpenRead(input), length, omitFromScan, offset, false);
|
||||
}
|
||||
|
||||
// Add unique data from the file
|
||||
rom.Name = Path.GetFileName(input);
|
||||
rom.Date = (date ? new FileInfo(input).LastWriteTime.ToString("yyyy/MM/dd HH:mm:ss") : "");
|
||||
|
||||
return rom;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve a list of files from a directory recursively in proper order
|
||||
/// </summary>
|
||||
/// <param name="directory">Directory to parse</param>
|
||||
/// <param name="infiles">List representing existing files</param>
|
||||
/// <returns>List with all new files</returns>
|
||||
public static List<string> RetrieveFiles(string directory, List<string> infiles)
|
||||
{
|
||||
// Take care of the files in the top directory
|
||||
List<string> toadd = Directory.EnumerateFiles(directory, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
toadd.Sort(new NaturalComparer());
|
||||
infiles.AddRange(toadd);
|
||||
|
||||
// Then recurse through and add from the directories
|
||||
List<string> dirs = Directory.EnumerateDirectories(directory, "*", SearchOption.TopDirectoryOnly).ToList();
|
||||
dirs = Style.OrderByAlphaNumeric(dirs, s => s).ToList();
|
||||
foreach (string dir in dirs)
|
||||
{
|
||||
infiles = RetrieveFiles(dir, infiles);
|
||||
}
|
||||
|
||||
// Return the new list
|
||||
return infiles;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Manipulation
|
||||
|
||||
/// <summary>
|
||||
/// Add an aribtrary number of bytes to the inputted file
|
||||
/// </summary>
|
||||
/// <param name="input">File to be appended to</param>
|
||||
/// <param name="output">Outputted file</param>
|
||||
/// <param name="bytesToAddToHead">String representing bytes to be added to head of file</param>
|
||||
/// <param name="bytesToAddToTail">String representing bytes to be added to tail of file</param>
|
||||
public static void AppendBytesToFile(string input, string output, string bytesToAddToHead, string bytesToAddToTail)
|
||||
{
|
||||
// Source: http://stackoverflow.com/questions/311165/how-do-you-convert-byte-array-to-hexadecimal-string-and-vice-versa
|
||||
byte[] bytesToAddToHeadArray = new byte[bytesToAddToHead.Length / 2];
|
||||
for (int i = 0; i < bytesToAddToHead.Length; i += 2)
|
||||
{
|
||||
bytesToAddToHeadArray[i / 2] = Convert.ToByte(bytesToAddToHead.Substring(i, 2), 16);
|
||||
}
|
||||
byte[] bytesToAddToTailArray = new byte[bytesToAddToTail.Length / 2];
|
||||
for (int i = 0; i < bytesToAddToTail.Length; i += 2)
|
||||
{
|
||||
bytesToAddToTailArray[i / 2] = Convert.ToByte(bytesToAddToTail.Substring(i, 2), 16);
|
||||
}
|
||||
|
||||
AppendBytesToFile(input, output, bytesToAddToHeadArray, bytesToAddToTailArray);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add an aribtrary number of bytes to the inputted file
|
||||
/// </summary>
|
||||
/// <param name="input">File to be appended to</param>
|
||||
/// <param name="output">Outputted file</param>
|
||||
/// <param name="bytesToAddToHead">Bytes to be added to head of file</param>
|
||||
/// <param name="bytesToAddToTail">Bytes to be added to tail of file</param>
|
||||
public static void AppendBytesToFile(string input, string output, byte[] bytesToAddToHead, byte[] bytesToAddToTail)
|
||||
{
|
||||
// If any of the inputs are invalid, skip
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
FileStream fsr = TryOpenRead(input);
|
||||
FileStream fsw = TryOpenWrite(output);
|
||||
|
||||
AppendBytesToStream(fsr, fsw, bytesToAddToHead, bytesToAddToTail);
|
||||
|
||||
fsr.Dispose();
|
||||
fsw.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cleans out the temporary directory
|
||||
/// </summary>
|
||||
/// <param name="dirname">Name of the directory to clean out</param>
|
||||
public static void CleanDirectory(string dirname)
|
||||
{
|
||||
foreach (string file in Directory.EnumerateFiles(dirname, "*", SearchOption.TopDirectoryOnly))
|
||||
{
|
||||
TryDeleteFile(file);
|
||||
}
|
||||
foreach (string dir in Directory.EnumerateDirectories(dirname, "*", SearchOption.TopDirectoryOnly))
|
||||
{
|
||||
TryDeleteDirectory(dir);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detect header skipper compliance and create an output file
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to be parsed</param>
|
||||
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
|
||||
/// <returns>True if the output file was created, false otherwise</returns>
|
||||
public static bool DetectSkipperAndTransform(string file, string outDir)
|
||||
{
|
||||
// Create the output directory if it doesn't exist
|
||||
if (outDir != "" && !Directory.Exists(outDir))
|
||||
{
|
||||
Directory.CreateDirectory(outDir);
|
||||
}
|
||||
|
||||
Globals.Logger.User("\nGetting skipper information for '" + file + "'");
|
||||
|
||||
// Get the skipper rule that matches the file, if any
|
||||
SkipperRule rule = Skipper.GetMatchingRule(file, "");
|
||||
|
||||
// If we have an empty rule, return false
|
||||
if (rule.Tests == null || rule.Tests.Count == 0 || rule.Operation != HeaderSkipOperation.None)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
Globals.Logger.User("File has a valid copier header");
|
||||
|
||||
// Get the header bytes from the file first
|
||||
string hstr = string.Empty;
|
||||
BinaryReader br = new BinaryReader(TryOpenRead(file));
|
||||
|
||||
// Extract the header as a string for the database
|
||||
byte[] hbin = br.ReadBytes((int)rule.StartOffset);
|
||||
for (int i = 0; i < (int)rule.StartOffset; i++)
|
||||
{
|
||||
hstr += BitConverter.ToString(new byte[] { hbin[i] });
|
||||
}
|
||||
br.Dispose();
|
||||
|
||||
// Apply the rule to the file
|
||||
string newfile = (outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file)));
|
||||
rule.TransformFile(file, newfile);
|
||||
|
||||
// If the output file doesn't exist, return false
|
||||
if (!File.Exists(newfile))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Now add the information to the database if it's not already there
|
||||
Rom rom = GetFileInfo(newfile);
|
||||
DatabaseTools.AddHeaderToDatabase(hstr, rom.SHA1, rule.SourceFile);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve a list of just files from inputs
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of strings representing directories and files</param>
|
||||
/// <param name="appendparent">True if the parent name should be appended after the special character "¬", false otherwise</param>
|
||||
/// <returns>List of strings representing just files from the inputs</returns>
|
||||
public static List<string> GetOnlyFilesFromInputs(List<string> inputs, bool appendparent = false)
|
||||
{
|
||||
List<string> outputs = new List<string>();
|
||||
foreach (string input in inputs)
|
||||
{
|
||||
if (Directory.Exists(input))
|
||||
{
|
||||
List<string> files = FileTools.RetrieveFiles(input, new List<string>());
|
||||
|
||||
// Make sure the files in the directory are ordered correctly
|
||||
files = Style.OrderByAlphaNumeric(files, s => s).ToList();
|
||||
foreach (string file in files)
|
||||
{
|
||||
try
|
||||
{
|
||||
outputs.Add(Path.GetFullPath(file) + (appendparent ? "¬" + Path.GetFullPath(input) : ""));
|
||||
}
|
||||
catch (PathTooLongException)
|
||||
{
|
||||
Globals.Logger.Warning("The path for " + file + " was too long");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (File.Exists(input))
|
||||
{
|
||||
try
|
||||
{
|
||||
outputs.Add(Path.GetFullPath(input) + (appendparent ? "¬" + Path.GetFullPath(input) : ""));
|
||||
}
|
||||
catch (PathTooLongException)
|
||||
{
|
||||
Globals.Logger.Warning("The path for " + input + " was too long");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Globals.Logger.Error(ex.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return outputs;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the XmlTextReader associated with a file, if possible
|
||||
/// </summary>
|
||||
/// <param name="filename">Name of the file to be parsed</param>
|
||||
/// <returns>The XmlTextReader representing the (possibly converted) file, null otherwise</returns>
|
||||
public static XmlReader GetXmlTextReader(string filename)
|
||||
{
|
||||
Globals.Logger.Verbose("Attempting to read file: \"" + filename + "\"");
|
||||
|
||||
// Check if file exists
|
||||
if (!File.Exists(filename))
|
||||
{
|
||||
Globals.Logger.Warning("File '" + filename + "' could not read from!");
|
||||
return null;
|
||||
}
|
||||
|
||||
XmlReader xtr = XmlReader.Create(filename, new XmlReaderSettings
|
||||
{
|
||||
CheckCharacters = false,
|
||||
DtdProcessing = DtdProcessing.Ignore,
|
||||
IgnoreComments = true,
|
||||
IgnoreWhitespace = true,
|
||||
ValidationFlags = XmlSchemaValidationFlags.None,
|
||||
ValidationType = ValidationType.None,
|
||||
});
|
||||
return xtr;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detect and replace header(s) to the given file
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to be parsed</param>
|
||||
/// <param name="outDir">Output directory to write the file to, empty means the same directory as the input file</param>
|
||||
/// <returns>True if a header was found and appended, false otherwise</returns>
|
||||
public static bool RestoreHeader(string file, string outDir)
|
||||
{
|
||||
// Create the output directory if it doesn't exist
|
||||
if (outDir != "" && !Directory.Exists(outDir))
|
||||
{
|
||||
Directory.CreateDirectory(outDir);
|
||||
}
|
||||
|
||||
// First, get the SHA-1 hash of the file
|
||||
Rom rom = GetFileInfo(file);
|
||||
|
||||
// Retrieve a list of all related headers from the database
|
||||
List<string> headers = DatabaseTools.RetrieveHeadersFromDatabase(rom.SHA1);
|
||||
|
||||
// If we have nothing retrieved, we return false
|
||||
if (headers.Count == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Now loop through and create the reheadered files, if possible
|
||||
for (int i = 0; i < headers.Count; i++)
|
||||
{
|
||||
Globals.Logger.User("Creating reheadered file: " +
|
||||
(outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i);
|
||||
AppendBytesToFile(file,
|
||||
(outDir == "" ? Path.GetFullPath(file) + ".new" : Path.Combine(outDir, Path.GetFileName(file))) + i, headers[i], string.Empty);
|
||||
Globals.Logger.User("Reheadered file created!");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to create a file for write, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to create</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>An opened stream representing the file on success, null otherwise</returns>
|
||||
public static FileStream TryCreate(string file, bool throwOnError = false)
|
||||
{
|
||||
// Now wrap opening the file
|
||||
try
|
||||
{
|
||||
return File.Open(file, FileMode.Create, FileAccess.Write, FileShare.ReadWrite);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to safely delete a directory, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the directory to delete</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>True if the file didn't exist or could be deleted, false otherwise</returns>
|
||||
public static bool TryDeleteDirectory(string file, bool throwOnError = false)
|
||||
{
|
||||
// Check if the file exists first
|
||||
if (!Directory.Exists(file))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Now wrap deleting the file
|
||||
try
|
||||
{
|
||||
Directory.Delete(file, true);
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to safely delete a file, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to delete</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>True if the file didn't exist or could be deleted, false otherwise</returns>
|
||||
public static bool TryDeleteFile(string file, bool throwOnError = false)
|
||||
{
|
||||
// Check if the file exists first
|
||||
if (!File.Exists(file))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Now wrap deleting the file
|
||||
try
|
||||
{
|
||||
File.Delete(file);
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to open a file for read, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to open</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>An opened stream representing the file on success, null otherwise</returns>
|
||||
public static FileStream TryOpenRead(string file, bool throwOnError = false)
|
||||
{
|
||||
// Check if the file exists first
|
||||
if (!File.Exists(file))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Now wrap opening the file
|
||||
try
|
||||
{
|
||||
return File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to open a file for read/write, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to open</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>An opened stream representing the file on success, null otherwise</returns>
|
||||
public static FileStream TryOpenReadWrite(string file, bool throwOnError = false)
|
||||
{
|
||||
// Check if the file exists first
|
||||
if (!File.Exists(file))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Now wrap opening the file
|
||||
try
|
||||
{
|
||||
return File.Open(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to open a file for write, optionally throwing the error
|
||||
/// </summary>
|
||||
/// <param name="file">Name of the file to open</param>
|
||||
/// <param name="throwOnError">True if the error that is thrown should be thrown back to the caller, false otherwise</param>
|
||||
/// <returns>An opened stream representing the file on success, null otherwise</returns>
|
||||
public static FileStream TryOpenWrite(string file, bool throwOnError = false)
|
||||
{
|
||||
// Check if the file exists first
|
||||
if (!File.Exists(file))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Now wrap opening the file
|
||||
try
|
||||
{
|
||||
return File.Open(file, FileMode.Open, FileAccess.Write, FileShare.ReadWrite);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (throwOnError)
|
||||
{
|
||||
throw ex;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Information
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve file information for a single file
|
||||
/// </summary>
|
||||
/// <param name="input">Filename to get information from</param>
|
||||
/// <param name="size">Size of the input stream</param>
|
||||
/// <param name="omitFromScan">Hash flag saying what hashes should not be calculated (defaults to none)</param>
|
||||
/// <param name="offset">Set a >0 number for getting hash for part of the file, 0 otherwise (default)</param>
|
||||
/// <param name="keepReadOpen">True if the underlying read stream should be kept open, false otherwise</param>
|
||||
/// <returns>Populated RomData object if success, empty one on error</returns>
|
||||
public static Rom GetStreamInfo(Stream input, long size, Hash omitFromScan = 0x0,
|
||||
long offset = 0, bool keepReadOpen = false)
|
||||
{
|
||||
Rom rom = new Rom
|
||||
{
|
||||
Type = ItemType.Rom,
|
||||
Size = size,
|
||||
CRC = string.Empty,
|
||||
MD5 = string.Empty,
|
||||
SHA1 = string.Empty,
|
||||
SHA256 = string.Empty,
|
||||
SHA384 = string.Empty,
|
||||
SHA512 = string.Empty,
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
// Initialize the hashers
|
||||
OptimizedCRC crc = new OptimizedCRC();
|
||||
MD5 md5 = MD5.Create();
|
||||
SHA1 sha1 = SHA1.Create();
|
||||
SHA256 sha256 = SHA256.Create();
|
||||
SHA384 sha384 = SHA384.Create();
|
||||
SHA512 sha512 = SHA512.Create();
|
||||
xxHash xxHash = new xxHash();
|
||||
xxHash.Init();
|
||||
|
||||
// Seek to the starting position, if one is set
|
||||
if (offset < 0)
|
||||
{
|
||||
input.Seek(offset, SeekOrigin.End);
|
||||
}
|
||||
else
|
||||
{
|
||||
input.Seek(offset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
byte[] buffer = new byte[8 * 1024];
|
||||
int read;
|
||||
while ((read = input.Read(buffer, 0, buffer.Length)) > 0)
|
||||
{
|
||||
crc.Update(buffer, 0, read);
|
||||
if ((omitFromScan & Hash.MD5) == 0)
|
||||
{
|
||||
md5.TransformBlock(buffer, 0, read, buffer, 0);
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA1) == 0)
|
||||
{
|
||||
sha1.TransformBlock(buffer, 0, read, buffer, 0);
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA256) == 0)
|
||||
{
|
||||
sha256.TransformBlock(buffer, 0, read, buffer, 0);
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA384) == 0)
|
||||
{
|
||||
sha384.TransformBlock(buffer, 0, read, buffer, 0);
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA512) == 0)
|
||||
{
|
||||
sha512.TransformBlock(buffer, 0, read, buffer, 0);
|
||||
}
|
||||
if ((omitFromScan & Hash.xxHash) == 0)
|
||||
{
|
||||
xxHash.Update(buffer, read);
|
||||
}
|
||||
}
|
||||
|
||||
crc.Update(buffer, 0, 0);
|
||||
rom.CRC = crc.Value.ToString("X8").ToLowerInvariant();
|
||||
|
||||
if ((omitFromScan & Hash.MD5) == 0)
|
||||
{
|
||||
md5.TransformFinalBlock(buffer, 0, 0);
|
||||
rom.MD5 = BitConverter.ToString(md5.Hash).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA1) == 0)
|
||||
{
|
||||
sha1.TransformFinalBlock(buffer, 0, 0);
|
||||
rom.SHA1 = BitConverter.ToString(sha1.Hash).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA256) == 0)
|
||||
{
|
||||
sha256.TransformFinalBlock(buffer, 0, 0);
|
||||
rom.SHA256 = BitConverter.ToString(sha256.Hash).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA384) == 0)
|
||||
{
|
||||
sha384.TransformFinalBlock(buffer, 0, 0);
|
||||
rom.SHA384 = BitConverter.ToString(sha384.Hash).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
if ((omitFromScan & Hash.SHA512) == 0)
|
||||
{
|
||||
sha512.TransformFinalBlock(buffer, 0, 0);
|
||||
rom.SHA512 = BitConverter.ToString(sha512.Hash).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
if ((omitFromScan & Hash.xxHash) == 0)
|
||||
{
|
||||
//rom.xxHash = xxHash.Digest().ToString("X8").ToLowerInvariant();
|
||||
}
|
||||
|
||||
// Dispose of the hashers
|
||||
crc.Dispose();
|
||||
md5.Dispose();
|
||||
sha1.Dispose();
|
||||
sha256.Dispose();
|
||||
sha384.Dispose();
|
||||
sha512.Dispose();
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
return new Rom();
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Seek to the beginning of the stream
|
||||
input.Seek(0, SeekOrigin.Begin);
|
||||
|
||||
if (!keepReadOpen)
|
||||
{
|
||||
input.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return rom;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Manipulation
|
||||
|
||||
/// <summary>
|
||||
/// Add an aribtrary number of bytes to the inputted stream
|
||||
/// </summary>
|
||||
/// <param name="input">Stream to be appended to</param>
|
||||
/// <param name="output">Outputted stream</param>
|
||||
/// <param name="bytesToAddToHead">Bytes to be added to head of stream</param>
|
||||
/// <param name="bytesToAddToTail">Bytes to be added to tail of stream</param>
|
||||
public static void AppendBytesToStream(Stream input, Stream output, byte[] bytesToAddToHead, byte[] bytesToAddToTail)
|
||||
{
|
||||
BinaryReader br = new BinaryReader(input);
|
||||
BinaryWriter bw = new BinaryWriter(output);
|
||||
|
||||
if (bytesToAddToHead.Count() > 0)
|
||||
{
|
||||
bw.Write(bytesToAddToHead);
|
||||
}
|
||||
|
||||
int bufferSize = 1024;
|
||||
|
||||
// Now read the file in chunks and write out
|
||||
byte[] buffer = new byte[bufferSize];
|
||||
while (br.BaseStream.Position <= (br.BaseStream.Length - bufferSize))
|
||||
{
|
||||
buffer = br.ReadBytes(bufferSize);
|
||||
bw.Write(buffer);
|
||||
}
|
||||
|
||||
// For the final chunk, if any, write out only that number of bytes
|
||||
int length = (int)(br.BaseStream.Length - br.BaseStream.Position);
|
||||
buffer = new byte[length];
|
||||
buffer = br.ReadBytes(length);
|
||||
bw.Write(buffer);
|
||||
|
||||
if (bytesToAddToTail.Count() > 0)
|
||||
{
|
||||
bw.Write(bytesToAddToTail);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
970
SabreTools.Library/Tools/Style.cs
Normal file
970
SabreTools.Library/Tools/Style.cs
Normal file
@@ -0,0 +1,970 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Web;
|
||||
|
||||
using SabreTools.Helper.Data;
|
||||
using SabreTools.Helper.Dats;
|
||||
|
||||
#if MONO
|
||||
using System.IO;
|
||||
#else
|
||||
using Alphaleonis.Win32.Filesystem;
|
||||
|
||||
using FileStream = System.IO.FileStream;
|
||||
#endif
|
||||
|
||||
namespace SabreTools.Helper.Tools
|
||||
{
|
||||
/// <summary>
|
||||
/// Include character normalization and replacement mappings
|
||||
/// </summary>
|
||||
public static class Style
|
||||
{
|
||||
#region DAT Cleaning
|
||||
|
||||
/// <summary>
|
||||
/// Clean a game (or rom) name to the WoD standard
|
||||
/// </summary>
|
||||
/// <param name="game">Name of the game to be cleaned</param>
|
||||
/// <returns>The cleaned name</returns>
|
||||
public static string CleanGameName(string game)
|
||||
{
|
||||
///Run the name through the filters to make sure that it's correct
|
||||
game = NormalizeChars(game);
|
||||
game = RussianToLatin(game);
|
||||
game = SearchPattern(game);
|
||||
|
||||
game = new Regex(@"(([[(].*[\)\]] )?([^([]+))").Match(game).Groups[1].Value;
|
||||
game = game.TrimStart().TrimEnd();
|
||||
return game;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clean a game (or rom) name to the WoD standard
|
||||
/// </summary>
|
||||
/// <param name="game">Array representing the path to be cleaned</param>
|
||||
/// <returns>The cleaned name</returns>
|
||||
public static string CleanGameName(string[] game)
|
||||
{
|
||||
game[game.Length - 1] = CleanGameName(game[game.Length - 1]);
|
||||
string outgame = String.Join(Path.DirectorySeparatorChar.ToString(), game);
|
||||
outgame = outgame.TrimStart().TrimEnd();
|
||||
return outgame;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clean a hash string and pad to the correct size
|
||||
/// </summary>
|
||||
/// <param name="hash">Hash string to sanitize</param>
|
||||
/// <param name="padding">Amount of characters to pad to</param>
|
||||
/// <returns>Cleaned string</returns>
|
||||
public static string CleanHashData(string hash, int padding)
|
||||
{
|
||||
// If we have a known blank hash, return blank
|
||||
if (string.IsNullOrEmpty(hash) || hash == "-" || hash == "_")
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
// Check to see if it's a "hex" hash
|
||||
hash = hash.Trim().Replace("0x", "");
|
||||
|
||||
// If we have a blank hash now, return blank
|
||||
if (string.IsNullOrEmpty(hash))
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
// If the hash shorter than the required length, pad it
|
||||
if (hash.Length < padding)
|
||||
{
|
||||
hash = hash.PadLeft(padding, '0');
|
||||
}
|
||||
// If the hash is longer than the required length, it's invalid
|
||||
else if (hash.Length > padding)
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
// Now normalize the hash
|
||||
hash = hash.ToLowerInvariant();
|
||||
|
||||
// Otherwise, make sure that every character is a proper match
|
||||
for (int i = 0; i < hash.Length; i++)
|
||||
{
|
||||
if ((hash[i] < '0' || hash[i] > '9') && (hash[i] < 'a' || hash[i] > 'f'))
|
||||
{
|
||||
hash = "";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generate a proper outfile name based on a DAT and output directory
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory</param>
|
||||
/// <param name="datdata">DAT information</param>
|
||||
/// <param name="overwrite">True if we ignore existing files (default), false otherwise</param>
|
||||
/// <returns>Dictionary of output formats mapped to file names</returns>
|
||||
public static Dictionary<DatFormat, string> CreateOutfileNames(string outDir, DatFile datdata, bool overwrite = true)
|
||||
{
|
||||
// Create the output dictionary
|
||||
Dictionary<DatFormat, string> outfileNames = new Dictionary<DatFormat, string>();
|
||||
|
||||
// Double check the outDir for the end delim
|
||||
if (!outDir.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
{
|
||||
outDir += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
// Get the extensions from the output type
|
||||
|
||||
// AttractMode
|
||||
if ((datdata.DatFormat & DatFormat.AttractMode) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.AttractMode, CreateOutfileNamesHelper(outDir, ".txt", datdata, overwrite));
|
||||
}
|
||||
|
||||
// ClrMamePro
|
||||
if ((datdata.DatFormat & DatFormat.ClrMamePro) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.ClrMamePro, CreateOutfileNamesHelper(outDir, ".dat", datdata, overwrite));
|
||||
};
|
||||
|
||||
// CSV
|
||||
if ((datdata.DatFormat & DatFormat.CSV) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.CSV, CreateOutfileNamesHelper(outDir, ".csv", datdata, overwrite));
|
||||
};
|
||||
|
||||
// DOSCenter
|
||||
if ((datdata.DatFormat & DatFormat.DOSCenter) != 0
|
||||
&& (datdata.DatFormat & DatFormat.ClrMamePro) == 0
|
||||
&& (datdata.DatFormat & DatFormat.RomCenter) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.DOSCenter, CreateOutfileNamesHelper(outDir, ".dat", datdata, overwrite));
|
||||
};
|
||||
if ((datdata.DatFormat & DatFormat.DOSCenter) != 0
|
||||
&& ((datdata.DatFormat & DatFormat.ClrMamePro) != 0
|
||||
|| (datdata.DatFormat & DatFormat.RomCenter) != 0))
|
||||
{
|
||||
outfileNames.Add(DatFormat.DOSCenter, CreateOutfileNamesHelper(outDir, ".dc.dat", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Logiqx XML
|
||||
if ((datdata.DatFormat & DatFormat.Logiqx) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.Logiqx, CreateOutfileNamesHelper(outDir, ".xml", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Missfile
|
||||
if ((datdata.DatFormat & DatFormat.MissFile) != 0
|
||||
&& (datdata.DatFormat & DatFormat.AttractMode) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.MissFile, CreateOutfileNamesHelper(outDir, ".txt", datdata, overwrite));
|
||||
};
|
||||
if ((datdata.DatFormat & DatFormat.MissFile) != 0
|
||||
&& (datdata.DatFormat & DatFormat.AttractMode) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.MissFile, CreateOutfileNamesHelper(outDir, ".miss.txt", datdata, overwrite));
|
||||
};
|
||||
|
||||
// OfflineList
|
||||
if (((datdata.DatFormat & DatFormat.OfflineList) != 0)
|
||||
&& (datdata.DatFormat & DatFormat.Logiqx) == 0
|
||||
&& (datdata.DatFormat & DatFormat.SabreDat) == 0
|
||||
&& (datdata.DatFormat & DatFormat.SoftwareList) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.OfflineList, CreateOutfileNamesHelper(outDir, ".xml", datdata, overwrite));
|
||||
}
|
||||
if (((datdata.DatFormat & DatFormat.OfflineList) != 0
|
||||
&& ((datdata.DatFormat & DatFormat.Logiqx) != 0
|
||||
|| (datdata.DatFormat & DatFormat.SabreDat) != 0
|
||||
|| (datdata.DatFormat & DatFormat.SoftwareList) != 0)))
|
||||
{
|
||||
outfileNames.Add(DatFormat.OfflineList, CreateOutfileNamesHelper(outDir, ".ol.xml", datdata, overwrite));
|
||||
}
|
||||
|
||||
// Redump MD5
|
||||
if ((datdata.DatFormat & DatFormat.RedumpMD5) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RedumpMD5, CreateOutfileNamesHelper(outDir, ".md5", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Redump SFV
|
||||
if ((datdata.DatFormat & DatFormat.RedumpSFV) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RedumpSFV, CreateOutfileNamesHelper(outDir, ".sfv", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Redump SHA-1
|
||||
if ((datdata.DatFormat & DatFormat.RedumpSHA1) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RedumpSHA1, CreateOutfileNamesHelper(outDir, ".sha1", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Redump SHA-256
|
||||
if ((datdata.DatFormat & DatFormat.RedumpSHA256) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RedumpSHA256, CreateOutfileNamesHelper(outDir, ".sha256", datdata, overwrite));
|
||||
};
|
||||
|
||||
// RomCenter
|
||||
if ((datdata.DatFormat & DatFormat.RomCenter) != 0
|
||||
&& (datdata.DatFormat & DatFormat.ClrMamePro) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RomCenter, CreateOutfileNamesHelper(outDir, ".dat", datdata, overwrite));
|
||||
};
|
||||
if ((datdata.DatFormat & DatFormat.RomCenter) != 0
|
||||
&& (datdata.DatFormat & DatFormat.ClrMamePro) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.RomCenter, CreateOutfileNamesHelper(outDir, ".rc.dat", datdata, overwrite));
|
||||
};
|
||||
|
||||
// SabreDAT
|
||||
if ((datdata.DatFormat & DatFormat.SabreDat) != 0 && (datdata.DatFormat & DatFormat.Logiqx) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.SabreDat, CreateOutfileNamesHelper(outDir, ".xml", datdata, overwrite));
|
||||
};
|
||||
if ((datdata.DatFormat & DatFormat.SabreDat) != 0 && (datdata.DatFormat & DatFormat.Logiqx) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.SabreDat, CreateOutfileNamesHelper(outDir, ".sd.xml", datdata, overwrite));
|
||||
};
|
||||
|
||||
// Software List
|
||||
if ((datdata.DatFormat & DatFormat.SoftwareList) != 0
|
||||
&& (datdata.DatFormat & DatFormat.Logiqx) == 0
|
||||
&& (datdata.DatFormat & DatFormat.SabreDat) == 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.SoftwareList, CreateOutfileNamesHelper(outDir, ".xml", datdata, overwrite));
|
||||
}
|
||||
if ((datdata.DatFormat & DatFormat.SoftwareList) != 0
|
||||
&& ((datdata.DatFormat & DatFormat.Logiqx) != 0
|
||||
|| (datdata.DatFormat & DatFormat.SabreDat) != 0))
|
||||
{
|
||||
outfileNames.Add(DatFormat.SoftwareList, CreateOutfileNamesHelper(outDir, ".sl.xml", datdata, overwrite));
|
||||
}
|
||||
|
||||
// TSV
|
||||
if ((datdata.DatFormat & DatFormat.TSV) != 0)
|
||||
{
|
||||
outfileNames.Add(DatFormat.TSV, CreateOutfileNamesHelper(outDir, ".tsv", datdata, overwrite));
|
||||
};
|
||||
|
||||
return outfileNames;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Help generating the outfile name
|
||||
/// </summary>
|
||||
/// <param name="outDir">Output directory</param>
|
||||
/// <param name="extension">Extension to use for the file</param>
|
||||
/// <param name="datdata">DAT information</param>
|
||||
/// <param name="overwrite">True if we ignore existing files, false otherwise</param>
|
||||
/// <returns>String containing the new filename</returns>
|
||||
private static string CreateOutfileNamesHelper(string outDir, string extension, DatFile datdata, bool overwrite)
|
||||
{
|
||||
string filename = (String.IsNullOrEmpty(datdata.FileName) ? datdata.Description : datdata.FileName);
|
||||
string outfile = outDir + filename + extension;
|
||||
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
|
||||
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
|
||||
outfile);
|
||||
if (!overwrite)
|
||||
{
|
||||
int i = 1;
|
||||
while (File.Exists(outfile))
|
||||
{
|
||||
outfile = outDir + filename + "_" + i + extension;
|
||||
outfile = (outfile.Contains(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString()) ?
|
||||
outfile.Replace(Path.DirectorySeparatorChar.ToString() + Path.DirectorySeparatorChar.ToString(), Path.DirectorySeparatorChar.ToString()) :
|
||||
outfile);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
return outfile;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Manipulation
|
||||
|
||||
/// <summary>
|
||||
/// Compare strings as numeric
|
||||
/// </summary>
|
||||
/// <param name="s1">First string to compare</param>
|
||||
/// <param name="s2">Second string to compare</param>
|
||||
/// <returns>-1 if s1 comes before s2, 0 if s1 and s2 are equal, 1 if s1 comes after s2</returns>
|
||||
/// <remarks>I want to be able to handle paths properly with no issue, can I do a recursive call based on separated by path separator?</remarks>
|
||||
public static int CompareNumeric(string s1, string s2)
|
||||
{
|
||||
// We want to normalize the strings, so we set both to lower case
|
||||
s1 = s1.ToLowerInvariant();
|
||||
s2 = s2.ToLowerInvariant();
|
||||
|
||||
// If the strings are the same exactly, return
|
||||
if (s1 == s2)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// If one is null, then say that's less than
|
||||
if (s1 == null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
if (s2 == null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Now split into path parts after converting AltDirSeparator to DirSeparator
|
||||
s1 = s1.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar);
|
||||
s2 = s2.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar);
|
||||
string[] s1parts = s1.Split(Path.DirectorySeparatorChar);
|
||||
string[] s2parts = s2.Split(Path.DirectorySeparatorChar);
|
||||
|
||||
// Then compare each part in turn
|
||||
for (int j = 0; j < s1parts.Length && j < s2parts.Length; j++)
|
||||
{
|
||||
int compared = CompareNumericPart(s1parts[j], s2parts[j]);
|
||||
if (compared != 0)
|
||||
{
|
||||
return compared;
|
||||
}
|
||||
}
|
||||
|
||||
// If we got out here, then it looped through at least one of the strings
|
||||
if (s1parts.Length > s2parts.Length)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
if (s1parts.Length < s2parts.Length)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper for CompareNumeric
|
||||
/// </summary>
|
||||
/// <param name="s1">First string to compare</param>
|
||||
/// <param name="s2">Second string to compare</param>
|
||||
/// <returns>-1 if s1 comes before s2, 0 if s1 and s2 are equal, 1 if s1 comes after s2</returns>
|
||||
private static int CompareNumericPart(string s1, string s2)
|
||||
{
|
||||
// Otherwise, loop through until we have an answer
|
||||
for (int i = 0; i < s1.Length && i < s2.Length; i++)
|
||||
{
|
||||
int s1c = s1[i];
|
||||
int s2c = s2[i];
|
||||
|
||||
// If the characters are the same, continue
|
||||
if (s1c == s2c)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// If they're different, check which one was larger
|
||||
if (s1c > s2c)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
if (s1c < s2c)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// If we got out here, then it looped through at least one of the strings
|
||||
if (s1.Length > s2.Length)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
if (s1.Length < s2.Length)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert all characters that are not considered XML-safe
|
||||
/// </summary>
|
||||
/// <param name="s">Input string to clean</param>
|
||||
/// <returns>Cleaned string</returns>
|
||||
public static string ConvertXMLUnsafeCharacters(string s)
|
||||
{
|
||||
return new String(s.Select(c =>
|
||||
(c == 0x9
|
||||
|| c == 0xA
|
||||
|| c == 0xD
|
||||
|| (c >= 0x20 && c <= 0xD77F)
|
||||
|| (c >= 0xE000 && c <= 0xFFFD)
|
||||
|| (c >= 0x10000 && c <= 0x10FFFF)
|
||||
? c
|
||||
: HttpUtility.HtmlEncode(c)[0]))
|
||||
.ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get a proper romba sub path
|
||||
/// </summary>
|
||||
/// <param name="hash">SHA-1 hash to get the path for</param>
|
||||
/// <returns>Subfolder path for the given hash</returns>
|
||||
public static string GetRombaPath(string hash)
|
||||
{
|
||||
// If the hash isn't the right size, then we return null
|
||||
if (hash.Length != Constants.SHA1Length) // TODO: When updating to SHA-256, this needs to update to Constants.SHA256Length
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return Path.Combine(hash.Substring(0, 2), hash.Substring(2, 2), hash.Substring(4, 2), hash.Substring(6, 2), hash + ".gz");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the multiplier to be used with the size given
|
||||
/// </summary>
|
||||
/// <param name="sizestring">String with possible size with extension</param>
|
||||
/// <returns>Tuple of multiplier to use on final size and fixed size string</returns>
|
||||
public static long GetSizeFromString(string sizestring)
|
||||
{
|
||||
long size = 0;
|
||||
|
||||
// Make sure the string is in lower case
|
||||
sizestring = sizestring.ToLowerInvariant();
|
||||
|
||||
// Get any trailing size identifiers
|
||||
long multiplier = 1;
|
||||
if (sizestring.EndsWith("k") || sizestring.EndsWith("kb"))
|
||||
{
|
||||
multiplier = Constants.KiloByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("ki") || sizestring.EndsWith("kib"))
|
||||
{
|
||||
multiplier = Constants.KibiByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("m") || sizestring.EndsWith("mb"))
|
||||
{
|
||||
multiplier = Constants.MegaByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("mi") || sizestring.EndsWith("mib"))
|
||||
{
|
||||
multiplier = Constants.MibiByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("g") || sizestring.EndsWith("gb"))
|
||||
{
|
||||
multiplier = Constants.GigaByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("gi") || sizestring.EndsWith("gib"))
|
||||
{
|
||||
multiplier = Constants.GibiByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("t") || sizestring.EndsWith("tb"))
|
||||
{
|
||||
multiplier = Constants.TeraByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("ti") || sizestring.EndsWith("tib"))
|
||||
{
|
||||
multiplier = Constants.TibiByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("p") || sizestring.EndsWith("pb"))
|
||||
{
|
||||
multiplier = Constants.PetaByte;
|
||||
}
|
||||
else if (sizestring.EndsWith("pi") || sizestring.EndsWith("pib"))
|
||||
{
|
||||
multiplier = Constants.PibiByte;
|
||||
}
|
||||
|
||||
// Remove any trailing identifiers
|
||||
sizestring = sizestring.TrimEnd(new char[] { 'k', 'm', 'g', 't', 'p', 'i', 'b', ' ' });
|
||||
|
||||
// Now try to get the size from the string
|
||||
if (!Int64.TryParse(sizestring, out size))
|
||||
{
|
||||
size = -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
size *= multiplier;
|
||||
}
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get if a string contains Unicode characters
|
||||
/// </summary>
|
||||
/// <param name="s">Input string to test</param>
|
||||
/// <returns>True if the string contains at least one Unicode character, false otherwise</returns>
|
||||
public static bool IsUnicode(string s)
|
||||
{
|
||||
return (s.Any(c => c > 255));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove all chars that are considered path unsafe
|
||||
/// </summary>
|
||||
/// <param name="s">Input string to clean</param>
|
||||
/// <returns>Cleaned string</returns>
|
||||
public static string RemovePathUnsafeCharacters(string s)
|
||||
{
|
||||
List<char> invalidPath = Path.GetInvalidPathChars().ToList();
|
||||
return new string(s.Where(c => !invalidPath.Contains(c)).ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove all unicode-specific chars from a string
|
||||
/// </summary>
|
||||
/// <param name="s">Input string to clean</param>
|
||||
/// <returns>Cleaned string</returns>
|
||||
public static string RemoveUnicodeCharacters(string s)
|
||||
{
|
||||
return new string(s.Where(c => c <= 255).ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Split a line as if it were a CMP rom line
|
||||
/// </summary>
|
||||
/// <param name="s">Line to split</param>
|
||||
/// <returns>Line split</returns>
|
||||
/// <remarks>Uses code from http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes</remarks>
|
||||
public static string[] SplitLineAsCMP(string s)
|
||||
{
|
||||
// Preprocess the string
|
||||
s = s.Trim();
|
||||
s = Regex.Replace(s, @"^\S* \(", ""); // Remove item identifier and opening brace
|
||||
s = Regex.Replace(s, @"\)\S*#.*$", ""); // Remove trailing comments
|
||||
s = s.TrimEnd(')'); // Remove closing brace
|
||||
s = s.Trim(); // Remove leading and trailing whitespace
|
||||
|
||||
// Now we get each string, divided up as cleanly as possible
|
||||
string[] matches = Regex
|
||||
//.Matches(s, @"([^\s]*""[^""]+""[^\s]*)|[^""]?\w+[^""]?")
|
||||
.Matches(s, @"[^\s""]+|""[^""]*""")
|
||||
.Cast<Match>()
|
||||
.Select(m => m.Groups[0].Value)
|
||||
.ToArray();
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region System.IO.Path Replacements
|
||||
|
||||
/// <summary>
|
||||
/// Replacement for System.IO.Path.GetDirectoryName
|
||||
/// </summary>
|
||||
/// <param name="s">Path to get directory name out of</param>
|
||||
/// <returns>Directory name from path</returns>
|
||||
/// <see cref="System.IO.Path.GetDirectoryName(string)"/>
|
||||
public static string GetDirectoryName(string s)
|
||||
{
|
||||
if (s == null)
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
if (s.Contains("/"))
|
||||
{
|
||||
string[] tempkey = s.Split('/');
|
||||
return String.Join("/", tempkey.Take(tempkey.Length - 1));
|
||||
}
|
||||
else if (s.Contains("\\"))
|
||||
{
|
||||
string[] tempkey = s.Split('\\');
|
||||
return String.Join("\\", tempkey.Take(tempkey.Length - 1));
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replacement for System.IO.Path.GetFileName
|
||||
/// </summary>
|
||||
/// <param name="s">Path to get file name out of</param>
|
||||
/// <returns>File name from path</returns>
|
||||
/// <see cref="System.IO.Path.GetFileName(string)"/>
|
||||
public static string GetFileName(string s)
|
||||
{
|
||||
if (s == null)
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
if (s.Contains("/"))
|
||||
{
|
||||
string[] tempkey = s.Split('/');
|
||||
return tempkey.Last();
|
||||
}
|
||||
else if (s.Contains("\\"))
|
||||
{
|
||||
string[] tempkey = s.Split('\\');
|
||||
return tempkey.Last();
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replacement for System.IO.Path.GetFileNameWithoutExtension
|
||||
/// </summary>
|
||||
/// <param name="s">Path to get file name out of</param>
|
||||
/// <returns>File name without extension from path</returns>
|
||||
/// <see cref="System.IO.Path.GetFileNameWithoutExtension(string)"/>
|
||||
public static string GetFileNameWithoutExtension(string s)
|
||||
{
|
||||
s = GetFileName(s);
|
||||
string[] tempkey = s.Split('.');
|
||||
if (tempkey.Count() == 1)
|
||||
{
|
||||
return s;
|
||||
}
|
||||
return String.Join(".", tempkey.Take(tempkey.Length - 1));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region WoD-based String Cleaning
|
||||
|
||||
/// <summary>
|
||||
/// Replace accented characters
|
||||
/// </summary>
|
||||
/// <param name="input">String to be parsed</param>
|
||||
/// <returns>String with characters replaced</returns>
|
||||
public static string NormalizeChars(string input)
|
||||
{
|
||||
string[,] charmap = {
|
||||
{ "Á", "A" }, { "á", "a" },
|
||||
{ "À", "A" }, { "à", "a" },
|
||||
{ "Â", "A" }, { "â", "a" },
|
||||
{ "Ä", "Ae" }, { "ä", "ae" },
|
||||
{ "Ã", "A" }, { "ã", "a" },
|
||||
{ "Å", "A" }, { "å", "a" },
|
||||
{ "Æ", "Ae" }, { "æ", "ae" },
|
||||
{ "Ç", "C" }, { "ç", "c" },
|
||||
{ "Ð", "D" }, { "ð", "d" },
|
||||
{ "É", "E" }, { "é", "e" },
|
||||
{ "È", "E" }, { "è", "e" },
|
||||
{ "Ê", "E" }, { "ê", "e" },
|
||||
{ "Ë", "E" }, { "ë", "e" },
|
||||
{ "ƒ", "f" },
|
||||
{ "Í", "I" }, { "í", "i" },
|
||||
{ "Ì", "I" }, { "ì", "i" },
|
||||
{ "Î", "I" }, { "î", "i" },
|
||||
{ "Ï", "I" }, { "ï", "i" },
|
||||
{ "Ñ", "N" }, { "ñ", "n" },
|
||||
{ "Ó", "O" }, { "ó", "o" },
|
||||
{ "Ò", "O" }, { "ò", "o" },
|
||||
{ "Ô", "O" }, { "ô", "o" },
|
||||
{ "Ö", "Oe" }, { "ö", "oe" },
|
||||
{ "Õ", "O" }, { "õ", "o" },
|
||||
{ "Ø", "O" }, { "ø", "o" },
|
||||
{ "Š", "S" }, { "š", "s" },
|
||||
{ "ß", "ss" },
|
||||
{ "Þ", "B" }, { "þ", "b" },
|
||||
{ "Ú", "U" }, { "ú", "u" },
|
||||
{ "Ù", "U" }, { "ù", "u" },
|
||||
{ "Û", "U" }, { "û", "u" },
|
||||
{ "Ü", "Ue" }, { "ü", "ue" },
|
||||
{ "ÿ", "y" },
|
||||
{ "Ý", "Y" }, { "ý", "y" },
|
||||
{ "Ž", "Z" }, { "ž", "z" },
|
||||
};
|
||||
|
||||
for (int i = 0; i < charmap.GetLength(0); i++)
|
||||
{
|
||||
input = input.Replace(charmap[i, 0], charmap[i, 1]);
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert Cyrillic lettering to Latin lettering
|
||||
/// </summary>
|
||||
/// <param name="input">String to be parsed</param>
|
||||
/// <returns>String with characters replaced</returns>
|
||||
public static string RussianToLatin(string input)
|
||||
{
|
||||
string[,] charmap = {
|
||||
{ "А", "A" }, { "Б", "B" }, { "В", "V" }, { "Г", "G" }, { "Д", "D" },
|
||||
{ "Е", "E" }, { "Ё", "Yo" }, { "Ж", "Zh" }, { "З", "Z" }, { "И", "I" },
|
||||
{ "Й", "J" }, { "К", "K" }, { "Л", "L" }, { "М", "M" }, { "Н", "N" },
|
||||
{ "О", "O" }, { "П", "P" }, { "Р", "R" }, { "С", "S" }, { "Т", "T" },
|
||||
{ "У", "U" }, { "Ф", "f" }, { "Х", "Kh" }, { "Ц", "Ts" }, { "Ч", "Ch" },
|
||||
{ "Ш", "Sh" }, { "Щ", "Sch" }, { "Ъ", "" }, { "Ы", "y" }, { "Ь", "" },
|
||||
{ "Э", "e" }, { "Ю", "yu" }, { "Я", "ya" }, { "а", "a" }, { "б", "b" },
|
||||
{ "в", "v" }, { "г", "g" }, { "д", "d" }, { "е", "e" }, { "ё", "yo" },
|
||||
{ "ж", "zh" }, { "з", "z" }, { "и", "i" }, { "й", "j" }, { "к", "k" },
|
||||
{ "л", "l" }, { "м", "m" }, { "н", "n" }, { "о", "o" }, { "п", "p" },
|
||||
{ "р", "r" }, { "с", "s" }, { "т", "t" }, { "у", "u" }, { "ф", "f" },
|
||||
{ "х", "kh" }, { "ц", "ts" }, { "ч", "ch" }, { "ш", "sh" }, { "щ", "sch" },
|
||||
{ "ъ", "" }, { "ы", "y" }, { "ь", "" }, { "э", "e" }, { "ю", "yu" },
|
||||
{ "я", "ya" },
|
||||
};
|
||||
|
||||
for (int i = 0; i < charmap.GetLength(0); i++)
|
||||
{
|
||||
input = input.Replace(charmap[i, 0], charmap[i, 1]);
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replace special characters and patterns
|
||||
/// </summary>
|
||||
/// <param name="input">String to be parsed</param>
|
||||
/// <returns>String with characters replaced</returns>
|
||||
public static string SearchPattern(string input)
|
||||
{
|
||||
string[,] charmap = {
|
||||
{ @"~", " - " },
|
||||
{ @"_", " " },
|
||||
{ @":", " " },
|
||||
{ @">", ")" },
|
||||
{ @"<", "(" },
|
||||
{ @"\|", "-" },
|
||||
{ "\"", "'" },
|
||||
{ @"\*", "." },
|
||||
{ @"\\", "-" },
|
||||
{ @"/", "-" },
|
||||
{ @"\?", " " },
|
||||
{ @"\(([^)(]*)\(([^)]*)\)([^)(]*)\)", " " },
|
||||
{ @"\(([^)]+)\)", " " },
|
||||
{ @"\[([^]]+)\]", " " },
|
||||
{ @"\{([^}]+)\}", " " },
|
||||
{ @"(ZZZJUNK|ZZZ-UNK-|ZZZ-UNK |zzz unknow |zzz unk |Copy of |[.][a-z]{3}[.][a-z]{3}[.]|[.][a-z]{3}[.])", " " },
|
||||
{ @" (r|rev|v|ver)\s*[\d\.]+[^\s]*", " " },
|
||||
{ @"(( )|(\A))(\d{6}|\d{8})(( )|(\Z))", " " },
|
||||
{ @"(( )|(\A))(\d{1,2})-(\d{1,2})-(\d{4}|\d{2})", " " },
|
||||
{ @"(( )|(\A))(\d{4}|\d{2})-(\d{1,2})-(\d{1,2})", " " },
|
||||
{ @"[-]+", "-" },
|
||||
{ @"\A\s*\)", " " },
|
||||
{ @"\A\s*(,|-)", " " },
|
||||
{ @"\s+", " " },
|
||||
{ @"\s+,", "," },
|
||||
{ @"\s*(,|-)\s*\Z", " " },
|
||||
};
|
||||
|
||||
for (int i = 0; i < charmap.GetLength(0); i++)
|
||||
{
|
||||
input = Regex.Replace(input, charmap[i, 0], charmap[i, 1]);
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Externally sourced methods
|
||||
|
||||
/// <summary>
|
||||
/// Returns the human-readable file size for an arbitrary, 64-bit file size
|
||||
/// The default format is "0.### XB", e.g. "4.2 KB" or "1.434 GB"
|
||||
/// </summary>
|
||||
/// <param name="input"></param>
|
||||
/// <returns>Human-readable file size</returns>
|
||||
/// <link>http://www.somacon.com/p576.php</link>
|
||||
public static string GetBytesReadable(long input)
|
||||
{
|
||||
// Get absolute value
|
||||
long absolute_i = (input < 0 ? -input : input);
|
||||
// Determine the suffix and readable value
|
||||
string suffix;
|
||||
double readable;
|
||||
if (absolute_i >= 0x1000000000000000) // Exabyte
|
||||
{
|
||||
suffix = "EB";
|
||||
readable = (input >> 50);
|
||||
}
|
||||
else if (absolute_i >= 0x4000000000000) // Petabyte
|
||||
{
|
||||
suffix = "PB";
|
||||
readable = (input >> 40);
|
||||
}
|
||||
else if (absolute_i >= 0x10000000000) // Terabyte
|
||||
{
|
||||
suffix = "TB";
|
||||
readable = (input >> 30);
|
||||
}
|
||||
else if (absolute_i >= 0x40000000) // Gigabyte
|
||||
{
|
||||
suffix = "GB";
|
||||
readable = (input >> 20);
|
||||
}
|
||||
else if (absolute_i >= 0x100000) // Megabyte
|
||||
{
|
||||
suffix = "MB";
|
||||
readable = (input >> 10);
|
||||
}
|
||||
else if (absolute_i >= 0x400) // Kilobyte
|
||||
{
|
||||
suffix = "KB";
|
||||
readable = input;
|
||||
}
|
||||
else
|
||||
{
|
||||
return input.ToString("0 B"); // Byte
|
||||
}
|
||||
// Divide by 1024 to get fractional value
|
||||
readable = (readable / 1024);
|
||||
// Return formatted number with suffix
|
||||
return readable.ToString("0.### ") + suffix;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// http://stackoverflow.com/questions/311165/how-do-you-convert-byte-array-to-hexadecimal-string-and-vice-versa
|
||||
/// </summary>
|
||||
public static byte[] StringToByteArray(string hex)
|
||||
{
|
||||
int NumberChars = hex.Length;
|
||||
byte[] bytes = new byte[NumberChars / 2];
|
||||
for (int i = 0; i < NumberChars; i += 2)
|
||||
bytes[i / 2] = Convert.ToByte(hex.Substring(i, 2), 16);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// http://stackoverflow.com/questions/5613279/c-sharp-hex-to-ascii
|
||||
/// </summary>
|
||||
public static string ConvertHexToAscii(string hexString)
|
||||
{
|
||||
if (hexString.Contains("-"))
|
||||
{
|
||||
hexString = hexString.Replace("-", "");
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
for (int i = 0; i < hexString.Length; i += 2)
|
||||
{
|
||||
String hs = hexString.Substring(i, 2);
|
||||
sb.Append(Convert.ToChar(Convert.ToUInt32(hs, 16)));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// http://stackoverflow.com/questions/15920741/convert-from-string-ascii-to-string-hex
|
||||
/// </summary>
|
||||
public static string ConvertAsciiToHex(string asciiString)
|
||||
{
|
||||
string hexOutput = "";
|
||||
foreach (char _eachChar in asciiString.ToCharArray())
|
||||
{
|
||||
// Get the integral value of the character.
|
||||
int value = Convert.ToInt32(_eachChar);
|
||||
// Convert the decimal value to a hexadecimal value in string form.
|
||||
hexOutput += String.Format("{0:X2}", value).Remove(0, 2);
|
||||
// to make output as your eg
|
||||
// hexOutput +=" "+ String.Format("{0:X}", value);
|
||||
}
|
||||
|
||||
return hexOutput;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adapted from 7-zip Source Code: CPP/Windows/TimeUtils.cpp:FileTimeToDosTime
|
||||
/// </summary>
|
||||
public static uint ConvertDateTimeToMsDosTimeFormat(DateTime dateTime)
|
||||
{
|
||||
uint year = (uint)((dateTime.Year - 1980) % 128);
|
||||
uint mon = (uint)dateTime.Month;
|
||||
uint day = (uint)dateTime.Day;
|
||||
uint hour = (uint)dateTime.Hour;
|
||||
uint min = (uint)dateTime.Minute;
|
||||
uint sec = (uint)dateTime.Second;
|
||||
|
||||
return (year << 25) | (mon << 21) | (day << 16) | (hour << 11) | (min << 5) | (sec >> 1);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adapted from 7-zip Source Code: CPP/Windows/TimeUtils.cpp:DosTimeToFileTime
|
||||
/// </summary>
|
||||
public static DateTime ConvertMsDosTimeFormatToDateTime(uint msDosDateTime)
|
||||
{
|
||||
return new DateTime((int)(1980 + (msDosDateTime >> 25)), (int)((msDosDateTime >> 21) & 0xF), (int)((msDosDateTime >> 16) & 0x1F),
|
||||
(int)((msDosDateTime >> 11) & 0x1F), (int)((msDosDateTime >> 5) & 0x3F), (int)((msDosDateTime & 0x1F) * 2));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines a text file's encoding by analyzing its byte order mark (BOM).
|
||||
/// Defaults to ASCII when detection of the text file's endianness fails.
|
||||
/// http://stackoverflow.com/questions/3825390/effective-way-to-find-any-files-encoding
|
||||
/// </summary>
|
||||
/// <param name="filename">The text file to analyze.</param>
|
||||
/// <returns>The detected encoding.</returns>
|
||||
public static Encoding GetEncoding(string filename)
|
||||
{
|
||||
// Read the BOM
|
||||
var bom = new byte[4];
|
||||
FileStream file = FileTools.TryOpenRead(filename);
|
||||
file.Read(bom, 0, 4);
|
||||
file.Dispose();
|
||||
|
||||
// Analyze the BOM
|
||||
if (bom[0] == 0x2b && bom[1] == 0x2f && bom[2] == 0x76) return Encoding.UTF7;
|
||||
if (bom[0] == 0xef && bom[1] == 0xbb && bom[2] == 0xbf) return Encoding.UTF8;
|
||||
if (bom[0] == 0xff && bom[1] == 0xfe) return Encoding.Unicode; //UTF-16LE
|
||||
if (bom[0] == 0xfe && bom[1] == 0xff) return Encoding.BigEndianUnicode; //UTF-16BE
|
||||
if (bom[0] == 0 && bom[1] == 0 && bom[2] == 0xfe && bom[3] == 0xff) return Encoding.UTF32;
|
||||
return Encoding.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// http://stackoverflow.com/questions/1600962/displaying-the-build-date
|
||||
/// </summary>
|
||||
public static DateTime GetLinkerTime(this Assembly assembly, TimeZoneInfo target = null)
|
||||
{
|
||||
var filePath = assembly.Location;
|
||||
const int c_PeHeaderOffset = 60;
|
||||
const int c_LinkerTimestampOffset = 8;
|
||||
|
||||
var buffer = new byte[2048];
|
||||
|
||||
using (var stream = FileTools.TryOpenRead(filePath))
|
||||
stream.Read(buffer, 0, 2048);
|
||||
|
||||
var offset = BitConverter.ToInt32(buffer, c_PeHeaderOffset);
|
||||
var secondsSince1970 = BitConverter.ToInt32(buffer, offset + c_LinkerTimestampOffset);
|
||||
var epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
|
||||
var linkTimeUtc = epoch.AddSeconds(secondsSince1970);
|
||||
|
||||
var tz = target ?? TimeZoneInfo.Local;
|
||||
var localTime = TimeZoneInfo.ConvertTimeFromUtc(linkTimeUtc, tz);
|
||||
|
||||
return localTime;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// http://stackoverflow.com/questions/248603/natural-sort-order-in-c-sharp
|
||||
/// </summary>
|
||||
public static IEnumerable<T> OrderByAlphaNumeric<T>(this IEnumerable<T> source, Func<T, string> selector)
|
||||
{
|
||||
int max = source
|
||||
.SelectMany(i => Regex.Matches(selector(i), @"\d+").Cast<Match>().Select(m => (int?)m.Value.Length))
|
||||
.Max() ?? 0;
|
||||
|
||||
return source.OrderBy(i => Regex.Replace(selector(i), @"\d+", m => m.Value.PadLeft(max, '0')));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
1
SabreTools.Library/obj/x64/Debug/CoreCompileInputs.cache
Normal file
1
SabreTools.Library/obj/x64/Debug/CoreCompileInputs.cache
Normal file
@@ -0,0 +1 @@
|
||||
9270cadb14162c6fe0aac5fc1b1481ee5619bc57
|
||||
Binary file not shown.
@@ -0,0 +1,23 @@
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\a7800.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\fds.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\lynx.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\n64.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\nes.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\pce.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\psid.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\snes.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\Skippers\spc.xml
|
||||
C:\Users\Matt\GitHub\Debug-x64\7za.dll
|
||||
C:\Users\Matt\GitHub\Debug-x64\sqlite3.dll
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE
|
||||
C:\Users\Matt\GitHub\Debug-x64\README.1ST
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE.alphafs.txt
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE.jzlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE.sevenzipsharp.txt
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE.sharpcompress.txt
|
||||
C:\Users\Matt\GitHub\Debug-x64\Licenses\LICENSE.zlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-x64\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\Debug-x64\SabreTools.Helper.pdb
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Debug\SabreTools.Helper.csprojResolveAssemblyReference.cache
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Debug\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Debug\SabreTools.Helper.pdb
|
||||
Binary file not shown.
BIN
SabreTools.Library/obj/x64/Debug/SabreTools.Helper.dll
Normal file
BIN
SabreTools.Library/obj/x64/Debug/SabreTools.Helper.dll
Normal file
Binary file not shown.
BIN
SabreTools.Library/obj/x64/Debug/SabreTools.Helper.pdb
Normal file
BIN
SabreTools.Library/obj/x64/Debug/SabreTools.Helper.pdb
Normal file
Binary file not shown.
1
SabreTools.Library/obj/x64/Mono/CoreCompileInputs.cache
Normal file
1
SabreTools.Library/obj/x64/Mono/CoreCompileInputs.cache
Normal file
@@ -0,0 +1 @@
|
||||
1fc2a811ebcbaa0043ca3a6244f1e4b04d4932a8
|
||||
Binary file not shown.
@@ -0,0 +1,23 @@
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\a7800.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\fds.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\lynx.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\n64.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\nes.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\pce.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\psid.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\snes.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\spc.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\7za.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\sqlite3.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\README.1ST
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.alphafs.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.jzlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.sevenzipsharp.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.sharpcompress.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.zlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\SabreTools.Helper.pdb
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Mono\SabreTools.Helper.csprojResolveAssemblyReference.cache
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Mono\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Mono\SabreTools.Helper.pdb
|
||||
Binary file not shown.
BIN
SabreTools.Library/obj/x64/Mono/SabreTools.Helper.dll
Normal file
BIN
SabreTools.Library/obj/x64/Mono/SabreTools.Helper.dll
Normal file
Binary file not shown.
BIN
SabreTools.Library/obj/x64/Mono/SabreTools.Helper.pdb
Normal file
BIN
SabreTools.Library/obj/x64/Mono/SabreTools.Helper.pdb
Normal file
Binary file not shown.
@@ -0,0 +1,25 @@
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Mono\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Mono\SabreTools.Helper.pdb
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\a7800.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\fds.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\lynx.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\n64.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\nes.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\pce.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\psid.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\snes.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Skippers\spc.xml
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\7za.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\sqlite3.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\README.1ST
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.alphafs.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.jzlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.sevenzipsharp.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.sharpcompress.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\Licenses\LICENSE.zlib.txt
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\Debug-mono-x64\SabreTools.Helper.pdb
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Library\obj\x64\Mono\SabreTools.Library.csprojResolveAssemblyReference.cache
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Library\obj\x64\Mono\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Library\obj\x64\Mono\SabreTools.Helper.pdb
|
||||
Binary file not shown.
@@ -0,0 +1 @@
|
||||
9270cadb14162c6fe0aac5fc1b1481ee5619bc57
|
||||
Binary file not shown.
@@ -0,0 +1,21 @@
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\a7800.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\fds.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\lynx.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\n64.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\nes.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\pce.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\psid.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\snes.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\Skippers\spc.xml
|
||||
C:\Users\Matt\GitHub\Release-x64\7za.dll
|
||||
C:\Users\Matt\GitHub\Release-x64\sqlite3.dll
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE
|
||||
C:\Users\Matt\GitHub\Release-x64\README.1ST
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE.alphafs.txt
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE.jzlib.txt
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE.sevenzipsharp.txt
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE.sharpcompress.txt
|
||||
C:\Users\Matt\GitHub\Release-x64\Licenses\LICENSE.zlib.txt
|
||||
C:\Users\Matt\GitHub\Release-x64\SabreTools.Helper.dll
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Release\SabreTools.Helper.csprojResolveAssemblyReference.cache
|
||||
C:\Users\Matt\GitHub\SabreTools\SabreTools.Helper\obj\x64\Release\SabreTools.Helper.dll
|
||||
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user