Compare commits

..

21 Commits
1.6.0 ... 1.6.3

Author SHA1 Message Date
Matt Nadareski
886825af11 Bump version 2024-05-07 05:17:06 -04:00
Matt Nadareski
198de925aa Update IO 2024-05-07 05:13:30 -04:00
Matt Nadareski
3f7b71e9a5 Bump version 2024-05-06 22:23:45 -04:00
Matt Nadareski
95baaf8603 Update SabreTools.IO 2024-05-06 22:12:14 -04:00
Matt Nadareski
3673264bab Bump version 2024-04-28 19:37:10 -04:00
Matt Nadareski
64fb5a6b63 Update SabreTools.IO 2024-04-28 19:32:06 -04:00
Matt Nadareski
e9c959ccdb Update SabreTools.IO 2024-04-28 17:39:30 -04:00
Matt Nadareski
4b7487e92e More rudimentary ZIP64 fixes 2024-04-28 00:24:35 -04:00
Matt Nadareski
52dbcffd8e Add shortcut if any other valid PKZIP blocks found 2024-04-27 23:57:32 -04:00
Matt Nadareski
24ae354bc2 Fix an indicator for ZIP64 2024-04-27 23:50:03 -04:00
Matt Nadareski
b30b91fd91 Remove redunant fix in StringBuilderExtensions 2024-04-27 23:48:55 -04:00
Matt Nadareski
efb63afc74 Fix PKZIP data printing 2024-04-27 23:45:33 -04:00
Matt Nadareski
16706f7169 Force writing values with proper width 2024-04-27 23:42:37 -04:00
Matt Nadareski
d7c32676b5 Add PKZIP printer implementation 2024-04-27 23:40:02 -04:00
Matt Nadareski
c8c45446bc Add PKZIP archive extra data record parsing 2024-04-27 23:01:50 -04:00
Matt Nadareski
f4de2e27d7 Notes cleanup 2024-04-27 22:49:09 -04:00
Matt Nadareski
970fcbd93b Add PKZIP shell wrapper 2024-04-27 22:45:49 -04:00
Matt Nadareski
57d1cd7f1e Initial code for PKZIP deserialization 2024-04-27 22:41:22 -04:00
Matt Nadareski
522fc372fa Fix instance of wrong extension 2024-04-27 22:23:33 -04:00
Matt Nadareski
7141690fcb Add override for compression handling 2024-04-27 22:04:52 -04:00
Matt Nadareski
c7d9177e68 Allow decompression to be skipped 2024-04-27 22:04:38 -04:00
12 changed files with 1090 additions and 20 deletions

View File

@@ -28,8 +28,8 @@
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.5" />
<PackageReference Include="xunit" Version="2.7.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -17,6 +17,11 @@ namespace SabreTools.Serialization.Deserializers
IFileDeserializer<TModel>,
IStreamDeserializer<TModel>
{
/// <summary>
/// Indicates if compressed files should be decompressed before processing
/// </summary>
protected virtual bool SkipCompression => false;
#region IByteDeserializer
/// <inheritdoc/>
@@ -42,7 +47,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public virtual TModel? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
using var stream = PathProcessor.OpenStream(path, SkipCompression);
return DeserializeStream(stream);
}

View File

@@ -0,0 +1,697 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PKZIP;
using static SabreTools.Models.PKZIP.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class PKZIP : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
protected override bool SkipCompression => true;
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
var archive = new Archive();
#region End of Central Directory Record
// Find the end of central directory record
long eocdrOffset = SearchForEndOfCentralDirectoryRecord(data);
if (eocdrOffset < 0 || eocdrOffset >= data.Length)
return null;
// Seek to the end of central directory record
data.Seek(eocdrOffset, SeekOrigin.Begin);
// Read the end of central directory record
var eocdr = ParseEndOfCentralDirectoryRecord(data);
if (eocdr == null)
return null;
// Assign the end of central directory record
archive.EndOfCentralDirectoryRecord = eocdr;
#endregion
#region ZIP64 End of Central Directory Locator and Record
// Set a flag for ZIP64 not found by default
bool zip64 = false;
// Process ZIP64 if any fields are set to max value
if (eocdr.DiskNumber == 0xFFFF
|| eocdr.StartDiskNumber == 0xFFFF
|| eocdr.TotalEntriesOnDisk == 0xFFFF
|| eocdr.TotalEntries == 0xFFFF
|| eocdr.CentralDirectorySize == 0xFFFFFFFF
|| eocdr.CentralDirectoryOffset == 0xFFFFFFFF)
{
// Set the ZIP64 flag
zip64 = true;
// Find the ZIP64 end of central directory locator
long eocdlOffset = SearchForZIP64EndOfCentralDirectoryLocator(data);
if (eocdlOffset < 0 || eocdlOffset >= data.Length)
return null;
// Seek to the ZIP64 end of central directory locator
data.Seek(eocdlOffset, SeekOrigin.Begin);
// Read the ZIP64 end of central directory locator
var eocdl64 = ParseEndOfCentralDirectoryLocator64(data);
if (eocdl64 == null)
return null;
// Assign the ZIP64 end of central directory record
archive.ZIP64EndOfCentralDirectoryLocator = eocdl64;
// Try to get the ZIP64 end of central directory record offset
if ((long)eocdl64.CentralDirectoryOffset < 0 || (long)eocdl64.CentralDirectoryOffset >= data.Length)
return null;
// Seek to the ZIP64 end of central directory record
data.Seek((long)eocdl64.CentralDirectoryOffset, SeekOrigin.Begin);
// Read the ZIP64 end of central directory record
var eocdr64 = ParseEndOfCentralDirectoryRecord64(data);
if (eocdr64 == null)
return null;
// Assign the ZIP64 end of central directory record
archive.ZIP64EndOfCentralDirectoryRecord = eocdr64;
}
#endregion
#region Central Directory Records
// Try to get the central directory record offset
long cdrOffset, cdrSize;
if (zip64 && archive.ZIP64EndOfCentralDirectoryRecord != null)
{
cdrOffset = (long)archive.ZIP64EndOfCentralDirectoryRecord.CentralDirectoryOffset;
cdrSize = (long)archive.ZIP64EndOfCentralDirectoryRecord.CentralDirectorySize;
}
else if (archive.EndOfCentralDirectoryRecord != null)
{
cdrOffset = archive.EndOfCentralDirectoryRecord.CentralDirectoryOffset;
cdrSize = archive.EndOfCentralDirectoryRecord.CentralDirectorySize;
}
else
{
return null;
}
// Try to get the central directory record offset
if (cdrOffset < 0 || cdrOffset >= data.Length)
return null;
// Seek to the first central directory record
data.Seek(cdrOffset, SeekOrigin.Begin);
// Cache the current offset
long currentOffset = data.Position;
// Read the central directory records
var cdrs = new List<CentralDirectoryFileHeader>();
while (data.Position < currentOffset + cdrSize)
{
// Read the central directory record
var cdr = ParseCentralDirectoryFileHeader(data);
if (cdr == null)
return null;
// Add the central directory record
cdrs.Add(cdr);
}
// Assign the central directory records
archive.CentralDirectoryHeaders = [.. cdrs];
#endregion
// TODO: Handle digital signature -- immediately following central directory records
#region Archive Extra Data Record
// Find the archive extra data record
long aedrOffset = SearchForArchiveExtraDataRecord(data, cdrOffset);
if (aedrOffset >= 0 && aedrOffset < data.Length)
{
// Seek to the archive extra data record
data.Seek(aedrOffset, SeekOrigin.Begin);
// Read the archive extra data record
var aedr = ParseArchiveExtraDataRecord(data);
if (aedr == null)
return null;
// Assign the archive extra data record
archive.ArchiveExtraDataRecord = aedr;
}
#endregion
#region Local File
// Setup all of the collections
var localFileHeaders = new List<LocalFileHeader?>();
var encryptionHeaders = new List<byte[]?>();
var fileData = new List<byte[]>(); // TODO: Should this data be read here?
var dataDescriptors = new List<DataDescriptor?>();
var zip64DataDescriptors = new List<DataDescriptor64?>();
// Read the local file headers
for (int i = 0; i < archive.CentralDirectoryHeaders.Length; i++)
{
var header = archive.CentralDirectoryHeaders[i];
// Get the local file header offset
long headerOffset = header.RelativeOffsetOfLocalHeader;
if (headerOffset == 0xFFFFFFFF && header.ExtraField != null)
{
// TODO: Parse into a proper structure instead of this
byte[] extraData = header.ExtraField;
if (BitConverter.ToUInt16(extraData, 0) == 0x0001)
headerOffset = BitConverter.ToInt64(extraData, 4);
}
if (headerOffset < 0 || headerOffset >= data.Length)
return null;
// Seek to the local file header
data.Seek(headerOffset, SeekOrigin.Begin);
// Try to parse the local header
var localFileHeader = ParseLocalFileHeader(data);
if (localFileHeader == null)
{
// Add a placeholder null item
localFileHeaders.Add(null);
encryptionHeaders.Add(null);
fileData.Add([]);
dataDescriptors.Add(null);
zip64DataDescriptors.Add(null);
continue;
}
// Add the local file header
localFileHeaders.Add(localFileHeader);
// Only read the encryption header if necessary
#if NET20 || NET35
if ((header.Flags & GeneralPurposeBitFlags.FileEncrypted) != 0)
#else
if (header.Flags.HasFlag(GeneralPurposeBitFlags.FileEncrypted))
#endif
{
// Try to read the encryption header data -- TODO: Verify amount to read
byte[] encryptionHeader = data.ReadBytes(12);
if (encryptionHeader.Length != 12)
return null;
// Add the encryption header
encryptionHeaders.Add(encryptionHeader);
}
else
{
// Add the null encryption header
encryptionHeaders.Add(null);
}
// Try to read the file data
byte[] fileDatum = data.ReadBytes((int)header.CompressedSize);
if (fileDatum.Length < header.CompressedSize)
return null;
// Add the file data
fileData.Add(fileDatum);
// Only read the data descriptor if necessary
#if NET20 || NET35
if ((header.Flags & GeneralPurposeBitFlags.NoCRC) != 0)
#else
if (header.Flags.HasFlag(GeneralPurposeBitFlags.NoCRC))
#endif
{
// Select the data descriptor that is being used
if (zip64)
{
// Try to parse the data descriptor
var dataDescriptor64 = ParseDataDescriptor64(data);
if (dataDescriptor64 == null)
return null;
// Add the data descriptor
dataDescriptors.Add(null);
zip64DataDescriptors.Add(dataDescriptor64);
}
else
{
// Try to parse the data descriptor
var dataDescriptor = ParseDataDescriptor(data);
if (dataDescriptor == null)
return null;
// Add the data descriptor
dataDescriptors.Add(dataDescriptor);
zip64DataDescriptors.Add(null);
}
}
else
{
// Add the null data descriptor
dataDescriptors.Add(null);
zip64DataDescriptors.Add(null);
}
}
// Assign the local file headers
archive.LocalFileHeaders = [.. localFileHeaders];
// Assign the encryption headers
archive.EncryptionHeaders = [.. encryptionHeaders];
// Assign the file data
archive.FileData = [.. fileData];
// Assign the data descriptors
archive.DataDescriptors = [.. dataDescriptors];
archive.ZIP64DataDescriptors = [.. zip64DataDescriptors];
#endregion
// TODO: Handle archive decryption header
return archive;
}
/// <summary>
/// Search for the end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Position of the end of central directory record, -1 on error</returns>
public static long SearchForEndOfCentralDirectoryRecord(Stream data)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the end
data.Seek(-22, SeekOrigin.End);
// Attempt to find the end of central directory signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == EndOfCentralDirectoryRecordSignature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case ArchiveExtraDataRecordSignature:
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryLocator64Signature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into an end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled end of central directory record on success, null on error</returns>
public static EndOfCentralDirectoryRecord? ParseEndOfCentralDirectoryRecord(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new EndOfCentralDirectoryRecord();
record.Signature = data.ReadUInt32();
if (record.Signature != EndOfCentralDirectoryRecordSignature)
return null;
record.DiskNumber = data.ReadUInt16();
record.StartDiskNumber = data.ReadUInt16();
record.TotalEntriesOnDisk = data.ReadUInt16();
record.TotalEntries = data.ReadUInt16();
record.CentralDirectorySize = data.ReadUInt32();
record.CentralDirectoryOffset = data.ReadUInt32();
record.FileCommentLength = data.ReadUInt16();
if (record.FileCommentLength > 0)
{
byte[] commentBytes = data.ReadBytes(record.FileCommentLength);
if (commentBytes.Length != record.FileCommentLength)
return null;
record.FileComment = Encoding.ASCII.GetString(commentBytes);
}
return record;
}
/// <summary>
/// Search for the ZIP64 end of central directory locator
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Position of the ZIP64 end of central directory locator, -1 on error</returns>
public static long SearchForZIP64EndOfCentralDirectoryLocator(Stream data)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the minimum start
// of theend of central directory record
data.Seek(-22 + -20, SeekOrigin.Current);
// Attempt to find the ZIP64 end of central directory locator signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == EndOfCentralDirectoryLocator64Signature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case ArchiveExtraDataRecordSignature:
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryRecordSignature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into a ZIP64 end of central directory locator
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 end of central directory locator on success, null on error</returns>
public static EndOfCentralDirectoryLocator64? ParseEndOfCentralDirectoryLocator64(Stream data)
{
return data.ReadType<EndOfCentralDirectoryLocator64>();
}
/// <summary>
/// Parse a Stream into a ZIP64 end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 end of central directory record on success, null on error</returns>
public static EndOfCentralDirectoryRecord64? ParseEndOfCentralDirectoryRecord64(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new EndOfCentralDirectoryRecord64();
record.Signature = data.ReadUInt32();
if (record.Signature != EndOfCentralDirectoryRecord64Signature)
return null;
record.DirectoryRecordSize = data.ReadUInt64();
record.HostSystem = (HostSystem)data.ReadByteValue();
record.VersionMadeBy = data.ReadByteValue();
record.VersionNeededToExtract = data.ReadUInt16();
record.DiskNumber = data.ReadUInt32();
record.StartDiskNumber = data.ReadUInt32();
record.TotalEntriesOnDisk = data.ReadUInt64();
record.TotalEntries = data.ReadUInt64();
record.CentralDirectorySize = data.ReadUInt64();
record.CentralDirectoryOffset = data.ReadUInt64();
// TODO: Handle the ExtensibleDataSector -- How to detect if exists?
return record;
}
/// <summary>
/// Parse a Stream into a central directory file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled central directory file header on success, null on error</returns>
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new CentralDirectoryFileHeader();
header.Signature = data.ReadUInt32();
if (header.Signature != CentralDirectoryFileHeaderSignature)
return null;
header.HostSystem = (HostSystem)data.ReadByteValue();
header.VersionMadeBy = data.ReadByteValue();
header.VersionNeededToExtract = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
header.FileCommentLength = data.ReadUInt16();
header.DiskNumberStart = data.ReadUInt16();
header.InternalFileAttributes = (InternalFileAttributes)data.ReadUInt16();
header.ExternalFileAttributes = data.ReadUInt32();
header.RelativeOffsetOfLocalHeader = data.ReadUInt32();
if (header.FileNameLength > 0)
{
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
if (filenameBytes.Length != header.FileNameLength)
return null;
header.FileName = Encoding.ASCII.GetString(filenameBytes);
}
if (header.ExtraFieldLength > 0)
{
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
if (extraBytes.Length != header.ExtraFieldLength)
return null;
header.ExtraField = extraBytes;
}
if (header.FileCommentLength > 0)
{
byte[] commentBytes = data.ReadBytes(header.FileCommentLength);
if (commentBytes.Length != header.FileCommentLength)
return null;
header.FileComment = Encoding.ASCII.GetString(commentBytes);
}
return header;
}
/// <summary>
/// Search for the archive extra data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="centralDirectoryoffset">Offset to the first central directory record</param>
/// <returns>Position of the archive extra data record, -1 on error</returns>
public static long SearchForArchiveExtraDataRecord(Stream data, long centralDirectoryoffset)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the central directory
data.Seek(centralDirectoryoffset - 8, SeekOrigin.Begin);
// Attempt to find the end of central directory signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == ArchiveExtraDataRecordSignature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryLocator64Signature:
case EndOfCentralDirectoryRecordSignature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into an archive extra data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive extra data record on success, null on error</returns>
public static ArchiveExtraDataRecord? ParseArchiveExtraDataRecord(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new ArchiveExtraDataRecord();
record.Signature = data.ReadUInt32();
if (record.Signature != ArchiveExtraDataRecordSignature)
return null;
record.ExtraFieldLength = data.ReadUInt32();
if (record.ExtraFieldLength > 0)
{
byte[] extraBytes = data.ReadBytes((int)record.ExtraFieldLength);
if (extraBytes.Length != record.ExtraFieldLength)
return null;
record.ExtraFieldData = extraBytes;
}
return record;
}
/// <summary>
/// Parse a Stream into a local file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled local file header on success, null on error</returns>
public static LocalFileHeader? ParseLocalFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new LocalFileHeader();
header.Signature = data.ReadUInt32();
if (header.Signature != LocalFileHeaderSignature)
return null;
header.Version = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
if (header.FileNameLength > 0)
{
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
if (filenameBytes.Length != header.FileNameLength)
return null;
header.FileName = Encoding.ASCII.GetString(filenameBytes);
}
if (header.ExtraFieldLength > 0)
{
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
if (extraBytes.Length != header.ExtraFieldLength)
return null;
header.ExtraField = extraBytes;
}
return header;
}
/// <summary>
/// Parse a Stream into a data descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled data descriptor on success, null on error</returns>
public static DataDescriptor? ParseDataDescriptor(Stream data)
{
// TODO: Use marshalling here instead of building
var dataDescriptor = new DataDescriptor();
// Signatures are expected but not required
dataDescriptor.Signature = data.ReadUInt32();
if (dataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
dataDescriptor.CRC32 = data.ReadUInt32();
dataDescriptor.CompressedSize = data.ReadUInt32();
dataDescriptor.UncompressedSize = data.ReadUInt32();
return dataDescriptor;
}
/// <summary>
/// Parse a Stream into a ZIP64 data descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 data descriptor on success, null on error</returns>
public static DataDescriptor64? ParseDataDescriptor64(Stream data)
{
// TODO: Use marshalling here instead of building
var zip64DataDescriptor = new DataDescriptor64();
// Signatures are expected but not required
zip64DataDescriptor.Signature = data.ReadUInt32();
if (zip64DataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
zip64DataDescriptor.CRC32 = data.ReadUInt32();
zip64DataDescriptor.CompressedSize = data.ReadUInt64();
zip64DataDescriptor.UncompressedSize = data.ReadUInt64();
return zip64DataDescriptor;
}
}
}

View File

@@ -495,7 +495,7 @@ namespace SabreTools.Serialization.Deserializers
entry.Value = data.ReadUInt32();
entry.SectionNumber = data.ReadUInt16();
entry.SymbolType = (SymbolType)data.ReadUInt16();
entry.StorageClass = (StorageClass)data.ReadByte();
entry.StorageClass = (StorageClass)data.ReadByteValue();
entry.NumberOfAuxSymbols = data.ReadByteValue();
coffSymbolTable[i] = entry;

View File

@@ -11,7 +11,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="path">Path to open as a stream</param>
/// <returns>Stream representing the file, null on error</returns>
public static Stream? OpenStream(string? path)
public static Stream? OpenStream(string? path, bool skipCompression = false)
{
try
{
@@ -26,11 +26,11 @@ namespace SabreTools.Serialization
string ext = Path.GetExtension(path).TrimStart('.');
// Determine what we do based on the extension
if (string.Equals(ext, "gz", StringComparison.OrdinalIgnoreCase))
if (!skipCompression && string.Equals(ext, "gz", StringComparison.OrdinalIgnoreCase))
{
return new GZipStream(stream, CompressionMode.Decompress);
}
else if (string.Equals(ext, "zip", StringComparison.OrdinalIgnoreCase))
else if (!skipCompression && string.Equals(ext, "zip", StringComparison.OrdinalIgnoreCase))
{
// TODO: Support zip-compressed files
return null;

View File

@@ -54,6 +54,7 @@ namespace SabreTools.Serialization
Wrapper.PAK item => item.PrettyPrint(),
Wrapper.PFF item => item.PrettyPrint(),
Wrapper.PIC item => item.PrettyPrint(),
Wrapper.PKZIP item => item.PrettyPrint(),
Wrapper.PlayJAudioFile item => item.PrettyPrint(),
Wrapper.PlayJPlaylist item => item.PrettyPrint(),
Wrapper.PortableExecutable item => item.PrettyPrint(),
@@ -97,6 +98,7 @@ namespace SabreTools.Serialization
Wrapper.PAK item => item.ExportJSON(),
Wrapper.PFF item => item.ExportJSON(),
Wrapper.PIC item => item.ExportJSON(),
Wrapper.PKZIP item => item.ExportJSON(),
Wrapper.PlayJAudioFile item => item.ExportJSON(),
Wrapper.PlayJPlaylist item => item.ExportJSON(),
Wrapper.PortableExecutable item => item.ExportJSON(),
@@ -314,6 +316,16 @@ namespace SabreTools.Serialization
PIC.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.PKZIP item)
{
var builder = new StringBuilder();
PKZIP.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text

View File

@@ -0,0 +1,279 @@
using System.Text;
using SabreTools.Models.PKZIP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class PKZIP : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("PKZIP Archive (or Derived Format) Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.EndOfCentralDirectoryRecord);
Print(builder, archive.ZIP64EndOfCentralDirectoryLocator);
Print(builder, archive.ZIP64EndOfCentralDirectoryRecord);
Print(builder, archive.CentralDirectoryHeaders);
Print(builder, archive.ArchiveExtraDataRecord);
Print(builder,
archive.LocalFileHeaders,
archive.EncryptionHeaders,
archive.FileData,
archive.DataDescriptors,
archive.ZIP64DataDescriptors);
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryRecord? record)
{
builder.AppendLine(" End of Central Directory Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No end of central directory record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.DiskNumber, " Disk number");
builder.AppendLine(record.StartDiskNumber, " Start disk number");
builder.AppendLine(record.TotalEntriesOnDisk, " Total entries on disk");
builder.AppendLine(record.TotalEntries, " Total entries");
builder.AppendLine(record.CentralDirectorySize, " Central directory size");
builder.AppendLine(record.CentralDirectoryOffset, " Central directory offset");
builder.AppendLine(record.FileCommentLength, " File comment length");
builder.AppendLine(record.FileComment, " File comment");
builder.AppendLine();
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryLocator64? locator)
{
builder.AppendLine(" ZIP64 End of Central Directory Locator Information:");
builder.AppendLine(" -------------------------");
if (locator == null)
{
builder.AppendLine(" No ZIP64 end of central directory locator");
builder.AppendLine();
return;
}
builder.AppendLine(locator.Signature, " Signature");
builder.AppendLine(locator.StartDiskNumber, " Start disk number");
builder.AppendLine(locator.CentralDirectoryOffset, " Central directory offset");
builder.AppendLine(locator.TotalDisks, " Total disks");
builder.AppendLine();
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryRecord64? record)
{
builder.AppendLine(" ZIP64 End of Central Directory Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No ZIP64 end of central directory record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.DirectoryRecordSize, " Directory record size");
builder.AppendLine($" Host system: {record.HostSystem} (0x{record.HostSystem:X})");
builder.AppendLine(record.VersionMadeBy, " Version made by");
builder.AppendLine(record.VersionNeededToExtract, " Version needed to extract");
builder.AppendLine(record.DiskNumber, " Disk number");
builder.AppendLine(record.StartDiskNumber, " Start disk number");
builder.AppendLine(record.TotalEntriesOnDisk, " Total entries on disk");
builder.AppendLine(record.TotalEntries, " Total entries");
builder.AppendLine(record.CentralDirectorySize, " Central directory size");
builder.AppendLine(record.CentralDirectoryOffset, " Central directory offset");
//builder.AppendLine(record.ExtensibleDataSector, " Extensible data sector");
builder.AppendLine();
}
private static void Print(StringBuilder builder, CentralDirectoryFileHeader?[]? headers)
{
builder.AppendLine(" Central Directory File Headers Information:");
builder.AppendLine(" -------------------------");
if (headers == null || headers.Length == 0)
{
builder.AppendLine(" No central directory file headers");
builder.AppendLine();
return;
}
for (int i = 0; i < headers.Length; i++)
{
var record = headers[i];
Print(builder, record, i);
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, CentralDirectoryFileHeader? header, int index)
{
builder.AppendLine($" Central Directory File Header Entry {index}");
if (header == null)
{
builder.AppendLine(" [NULL]");
return;
}
builder.AppendLine(header.Signature, " Signature");
builder.AppendLine($" Host system: {header.HostSystem} (0x{header.HostSystem:X})");
builder.AppendLine(header.VersionMadeBy, " Version made by");
builder.AppendLine(header.VersionNeededToExtract, " Version needed to extract");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression method: {header.CompressionMethod} (0x{header.CompressionMethod:X})");
builder.AppendLine(header.LastModifedFileTime, " Last modified file time"); // TODO: Parse from MS-DOS
builder.AppendLine(header.LastModifiedFileDate, " Last modified file date"); // TODO: Parse from MS-DOS
builder.AppendLine(header.CRC32, " CRC-32");
builder.AppendLine(header.CompressedSize, " Compressed size");
builder.AppendLine(header.UncompressedSize, " Uncompressed size");
builder.AppendLine(header.FileNameLength, " File name length");
builder.AppendLine(header.ExtraFieldLength, " Extra field length");
builder.AppendLine(header.FileCommentLength, " File comment length");
builder.AppendLine(header.DiskNumberStart, " Disk number start");
builder.AppendLine($" Internal file attributes: {header.InternalFileAttributes} (0x{header.InternalFileAttributes:X})");
builder.AppendLine(header.ExternalFileAttributes, " External file attributes");
builder.AppendLine(header.RelativeOffsetOfLocalHeader, " Relative offset of local header");
builder.AppendLine(header.FileName, " File name");
builder.AppendLine(header.ExtraField, " Extra field");
builder.AppendLine(header.FileComment, " File comment");
}
private static void Print(StringBuilder builder, ArchiveExtraDataRecord? record)
{
builder.AppendLine(" Archive Extra Data Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No archive extra data record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.ExtraFieldLength, " Extra field length");
builder.AppendLine(record.ExtraFieldData, " Extra field data");
builder.AppendLine();
}
private static void Print(StringBuilder builder,
LocalFileHeader[]? localFileHeaders,
byte[]?[]? encryptionHeaders,
byte[][]? fileData,
DataDescriptor?[]? dataDescriptors,
DataDescriptor64?[]? zip64DataDescriptors)
{
builder.AppendLine(" Local File Information:");
builder.AppendLine(" -------------------------");
if (localFileHeaders == null || localFileHeaders.Length == 0)
{
builder.AppendLine(" No local files");
builder.AppendLine();
return;
}
if (encryptionHeaders == null || localFileHeaders.Length > encryptionHeaders.Length
|| fileData == null || localFileHeaders.Length > fileData.Length
|| dataDescriptors == null || localFileHeaders.Length > dataDescriptors.Length
|| zip64DataDescriptors == null || localFileHeaders.Length > zip64DataDescriptors.Length)
{
builder.AppendLine(" Mismatch in local file array values");
builder.AppendLine();
}
for (int i = 0; i < localFileHeaders.Length; i++)
{
var localFileHeader = localFileHeaders[i];
var encryptionHeader = encryptionHeaders != null && i < encryptionHeaders.Length ? encryptionHeaders[i] : null;
var fileDatum = fileData != null && i < fileData.Length ? fileData[i] : null;
var dataDescriptor = dataDescriptors != null && i < dataDescriptors.Length ? dataDescriptors[i] : null;
var zip64DataDescriptor = zip64DataDescriptors != null && i < zip64DataDescriptors.Length ? zip64DataDescriptors[i] : null;
Print(builder, localFileHeader, encryptionHeader, fileDatum, dataDescriptor, zip64DataDescriptor, i);
}
builder.AppendLine();
}
private static void Print(StringBuilder builder,
LocalFileHeader localFileHeader,
byte[]? encryptionHeader,
byte[]? fileData,
DataDescriptor? dataDescriptor,
DataDescriptor64? zip64DataDescriptor,
int index)
{
builder.AppendLine($" Local File Entry {index}");
if (localFileHeader == null)
{
builder.AppendLine(" [NULL]");
return;
}
builder.AppendLine(localFileHeader.Signature, " [Local File Header] Signature");
builder.AppendLine(localFileHeader.Version, " [Local File Header] Version");
builder.AppendLine($" [Local File Header] Flags: {localFileHeader.Flags} (0x{localFileHeader.Flags:X})");
builder.AppendLine($" [Local File Header] Compression method: {localFileHeader.CompressionMethod} (0x{localFileHeader.CompressionMethod:X})");
builder.AppendLine(localFileHeader.LastModifedFileTime, " [Local File Header] Last modified file time"); // TODO: Parse from MS-DOS
builder.AppendLine(localFileHeader.LastModifiedFileDate, " [Local File Header] Last modified file date"); // TODO: Parse from MS-DOS
builder.AppendLine(localFileHeader.CRC32, " [Local File Header] CRC-32");
builder.AppendLine(localFileHeader.CompressedSize, " [Local File Header] Compressed size");
builder.AppendLine(localFileHeader.UncompressedSize, " [Local File Header] Uncompressed size");
builder.AppendLine(localFileHeader.FileNameLength, " [Local File Header] File name length");
builder.AppendLine(localFileHeader.ExtraFieldLength, " [Local File Header] Extra field length");
builder.AppendLine(localFileHeader.FileName, " [Local File Header] File name");
builder.AppendLine(localFileHeader.ExtraField, " [Local File Header] Extra field");
if (encryptionHeader == null)
{
builder.AppendLine(" [Encryption Header]: [NULL]");
}
else
{
builder.AppendLine(encryptionHeader.Length, " [Encryption Header] Length");
builder.AppendLine(encryptionHeader, " [Encryption Header] Data");
}
if (fileData == null)
{
builder.AppendLine(" [File Data]: [NULL]");
}
else
{
builder.AppendLine(fileData.Length, " [File Data] Length");
//builder.AppendLine(fileData, " [File Data] Data");
}
if (dataDescriptor == null)
{
builder.AppendLine(" [Data Descriptor]: [NULL]");
}
else
{
builder.AppendLine(dataDescriptor.Signature, " [Data Descriptor] Signature");
builder.AppendLine(dataDescriptor.CRC32, " [Data Descriptor] CRC-32");
builder.AppendLine(dataDescriptor.CompressedSize, " [Data Descriptor] Compressed size");
builder.AppendLine(dataDescriptor.UncompressedSize, " [Data Descriptor] Uncompressed size");
}
if (zip64DataDescriptor == null)
{
builder.AppendLine(" [ZIP64 Data Descriptor]: [NULL]");
}
else
{
builder.AppendLine(zip64DataDescriptor.Signature, " [ZIP64 Data Descriptor] Signature");
builder.AppendLine(zip64DataDescriptor.CRC32, " [ZIP64 Data Descriptor] CRC-32");
builder.AppendLine(zip64DataDescriptor.CompressedSize, " [ZIP64 Data Descriptor] Compressed size");
builder.AppendLine(zip64DataDescriptor.UncompressedSize, " [ZIP64 Data Descriptor] Uncompressed size");
}
}
}
}

View File

@@ -8,7 +8,7 @@
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.0</Version>
<Version>1.6.3</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -30,7 +30,7 @@
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.2" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.IO" Version="1.4.5" />
<PackageReference Include="SabreTools.IO" Version="1.4.10" />
<PackageReference Include="SabreTools.Models" Version="1.4.5" />
</ItemGroup>

View File

@@ -31,7 +31,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, sbyte? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X2})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -41,7 +41,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, byte? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X2})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -51,7 +51,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, short? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X4})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -61,7 +61,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, ushort? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X4})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -71,7 +71,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, int? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X8})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -81,7 +81,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, uint? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X8})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -91,7 +91,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, long? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X16})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -101,7 +101,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, ulong? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X16})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -110,7 +110,6 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, string? value, string prefixString)
{
value ??= string.Empty;
string valueString = value ?? "[NULL]";
return sb.AppendLine($"{prefixString}: {valueString}");
}

View File

@@ -0,0 +1,79 @@
using System.IO;
using SabreTools.Models.PKZIP;
namespace SabreTools.Serialization.Wrappers
{
public class PKZIP : WrapperBase<Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "PKZIP Archive (or Derived Format)";
#endregion
#region Constructors
/// <inheritdoc/>
public PKZIP(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public PKZIP(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a PKZIP archive (or derived format) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PKZIP wrapper on success, null on failure</returns>
public static PKZIP? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PKZIP archive (or derived format) from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A PKZIP wrapper on success, null on failure</returns>
public static PKZIP? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = Deserializers.PKZIP.DeserializeStream(data);
if (archive == null)
return null;
try
{
return new PKZIP(archive, data);
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -38,7 +38,7 @@ namespace SabreTools.Serialization.Wrappers
case WrapperType.PAK: return PAK.Create(data);
case WrapperType.PFF: return PFF.Create(data);
case WrapperType.PIC: return PIC.Create(data);
case WrapperType.PKZIP: return null; // TODO: Implement wrapper
case WrapperType.PKZIP: return PKZIP.Create(data);
case WrapperType.PlayJAudioFile: return PlayJAudioFile.Create(data);
case WrapperType.PlayJPlaylist: return PlayJPlaylist.Create(data);
case WrapperType.Quantum: return Quantum.Create(data);

View File

@@ -136,7 +136,6 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// PKWARE ZIP archive and derivatives
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
PKZIP,
/// <summary>