mirror of
https://github.com/SabreTools/SabreTools.Serialization.git
synced 2026-02-04 05:36:12 +00:00
Update Nuget packages
This commit is contained in:
@@ -66,7 +66,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.2" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="9.0.8" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.2" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.7.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.7.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
|
||||
namespace SabreTools.Serialization
|
||||
{
|
||||
// TODO: Move this to IO
|
||||
public static class ByteArrayExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Read string data from the source
|
||||
/// </summary>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
public static List<string>? ReadStringsFrom(this byte[]? input, int charLimit = 5)
|
||||
{
|
||||
// Validate the data
|
||||
if (input == null)
|
||||
return null;
|
||||
|
||||
// Check for ASCII strings
|
||||
var asciiStrings = input.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
|
||||
|
||||
// Check for UTF-8 strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var utf8Strings = input.ReadStringsWithEncoding(charLimit, Encoding.UTF8);
|
||||
|
||||
// Check for Unicode strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var unicodeStrings = input.ReadStringsWithEncoding(charLimit, Encoding.Unicode);
|
||||
|
||||
// Ignore duplicate strings across encodings
|
||||
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
|
||||
|
||||
// Sort the strings and return
|
||||
sourceStrings.Sort();
|
||||
return sourceStrings;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -45,7 +45,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
// Central Directory File Header
|
||||
case CentralDirectoryFileHeaderSignature:
|
||||
var cdr = ParseCentralDirectoryFileHeader(data, out _);
|
||||
var cdr = ParseCentralDirectoryFileHeader(data);
|
||||
if (cdr == null)
|
||||
return null;
|
||||
|
||||
@@ -170,10 +170,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled central directory file header on success, null on error</returns>
|
||||
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data, out ExtensibleDataField[]? extraFields)
|
||||
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data)
|
||||
{
|
||||
var obj = new CentralDirectoryFileHeader();
|
||||
extraFields = null;
|
||||
|
||||
obj.Signature = data.ReadUInt32LittleEndian();
|
||||
if (obj.Signature != CentralDirectoryFileHeaderSignature)
|
||||
@@ -220,8 +219,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (extraBytes.Length != obj.ExtraFieldLength)
|
||||
return null;
|
||||
|
||||
// TODO: This should live on the model instead of the byte representation
|
||||
extraFields = ParseExtraFields(obj, extraBytes);
|
||||
obj.ExtraFields = ParseExtraFields(obj, extraBytes);
|
||||
}
|
||||
if (obj.FileCommentLength > 0 && data.Position + obj.FileCommentLength <= data.Length)
|
||||
{
|
||||
@@ -416,7 +414,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Local File Header
|
||||
|
||||
// Try to read the header
|
||||
var localFileHeader = ParseLocalFileHeader(data, out var extraFields);
|
||||
var localFileHeader = ParseLocalFileHeader(data);
|
||||
if (localFileHeader == null)
|
||||
return null;
|
||||
|
||||
@@ -424,9 +422,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
obj.LocalFileHeader = localFileHeader;
|
||||
|
||||
ulong compressedSize = localFileHeader.CompressedSize;
|
||||
if (extraFields != null)
|
||||
if (localFileHeader.ExtraFields != null)
|
||||
{
|
||||
foreach (var field in extraFields)
|
||||
foreach (var field in localFileHeader.ExtraFields)
|
||||
{
|
||||
if (field is not Zip64ExtendedInformationExtraField infoField)
|
||||
continue;
|
||||
@@ -532,10 +530,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled local file header on success, null on error</returns>
|
||||
public static LocalFileHeader? ParseLocalFileHeader(Stream data, out ExtensibleDataField[]? extraFields)
|
||||
public static LocalFileHeader? ParseLocalFileHeader(Stream data)
|
||||
{
|
||||
var obj = new LocalFileHeader();
|
||||
extraFields = null;
|
||||
|
||||
obj.Signature = data.ReadUInt32LittleEndian();
|
||||
if (obj.Signature != LocalFileHeaderSignature)
|
||||
@@ -575,8 +572,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (extraBytes.Length != obj.ExtraFieldLength)
|
||||
return null;
|
||||
|
||||
// TODO: This should live on the model instead of the byte representation
|
||||
extraFields = ParseExtraFields(obj, extraBytes);
|
||||
obj.ExtraFields = ParseExtraFields(obj, extraBytes);
|
||||
}
|
||||
|
||||
return obj;
|
||||
|
||||
@@ -381,10 +381,10 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var obj = new CLRTokenDefinition();
|
||||
|
||||
obj.AuxFormat6AuxType = data.ReadByteValue();
|
||||
obj.AuxFormat6Reserved1 = data.ReadByteValue();
|
||||
obj.AuxFormat6SymbolTableIndex = data.ReadUInt32LittleEndian();
|
||||
obj.AuxFormat6Reserved2 = data.ReadBytes(12);
|
||||
obj.AuxType = data.ReadByteValue();
|
||||
obj.Reserved1 = data.ReadByteValue();
|
||||
obj.SymbolTableIndex = data.ReadUInt32LittleEndian();
|
||||
obj.Reserved2 = data.ReadBytes(12);
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
@@ -69,10 +69,7 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(localFileHeader.FileNameLength, " [Local File Header] File name length");
|
||||
builder.AppendLine(localFileHeader.ExtraFieldLength, " [Local File Header] Extra field length");
|
||||
builder.AppendLine(localFileHeader.FileName, " [Local File Header] File name");
|
||||
|
||||
// TODO: Reenable this when models are fixed
|
||||
// var extraFields = Deserializers.PKZIP.ParseExtraFields(localFileHeader, localFileHeader.ExtraField);
|
||||
// Print(builder, " [Local File Header] Extra Fields", extraFields);
|
||||
Print(builder, " [Local File Header] Extra Fields", localFileHeader.ExtraFields);
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -241,10 +238,7 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(entry.RelativeOffsetOfLocalHeader, " Relative offset of local header");
|
||||
builder.AppendLine(entry.FileName, " File name");
|
||||
builder.AppendLine(entry.FileComment, " File comment");
|
||||
|
||||
// TODO: Reenable this when models are fixed
|
||||
// var extraFields = Deserializers.PKZIP.ParseExtraFields(entry, entry.ExtraField);
|
||||
// Print(builder, " Extra Fields", extraFields);
|
||||
Print(builder, " Extra Fields", entry.ExtraFields);
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
|
||||
@@ -410,10 +410,10 @@ namespace SabreTools.Serialization.Printers
|
||||
private static void Print(StringBuilder builder, CLRTokenDefinition entry, int i)
|
||||
{
|
||||
builder.AppendLine($" COFF Symbol Table Entry {i} (CLR Token Defintion)");
|
||||
builder.AppendLine(entry.AuxFormat6AuxType, " Aux type");
|
||||
builder.AppendLine(entry.AuxFormat6Reserved1, " Reserved");
|
||||
builder.AppendLine(entry.AuxFormat6SymbolTableIndex, " Symbol table index");
|
||||
builder.AppendLine(entry.AuxFormat6Reserved2, " Reserved");
|
||||
builder.AppendLine(entry.AuxType, " Aux type");
|
||||
builder.AppendLine(entry.Reserved1, " Reserved");
|
||||
builder.AppendLine(entry.SymbolTableIndex, " Symbol table index");
|
||||
builder.AppendLine(entry.Reserved2, " Reserved");
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, COFFStringTable? stringTable)
|
||||
|
||||
@@ -63,10 +63,10 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="SabreTools.ASN1" Version="1.6.0" />
|
||||
<PackageReference Include="SabreTools.ASN1" Version="1.6.2" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.7.0" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.7.2" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.7.1" />
|
||||
<PackageReference Include="SabreTools.Matching" Version="1.6.0" />
|
||||
<PackageReference Include="SharpCompress" Version="0.40.0" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
|
||||
</ItemGroup>
|
||||
|
||||
Reference in New Issue
Block a user