Compare commits

...

38 Commits
1.8.1 ... 1.8.6

Author SHA1 Message Date
Matt Nadareski
f0f997fadd Bump version 2024-12-30 22:51:13 -05:00
Matt Nadareski
0ce3c9892d Remove attempt at caching version info strings 2024-12-30 22:40:52 -05:00
Matt Nadareski
9743565285 Update copyright 2024-12-30 21:39:36 -05:00
Matt Nadareski
fcfe9e4790 Remove unnecessary action step 2024-12-30 21:39:30 -05:00
Matt Nadareski
be36432296 Update packages 2024-12-30 21:28:13 -05:00
Matt Nadareski
fb725bff19 Bump version 2024-12-30 20:55:30 -05:00
Matt Nadareski
2384cf9f9f Add source data lock on cached values 2024-12-30 20:47:12 -05:00
Matt Nadareski
1261930fd9 Ensure .NET versions are installed for testing 2024-12-19 10:53:19 -05:00
Matt Nadareski
120de4e49f Allow symbols to be packed 2024-12-18 08:05:16 -05:00
Matt Nadareski
354a51769b Bump version 2024-12-18 08:01:07 -05:00
Matt Nadareski
a9f937baa3 Find remaining non-explicit endinaness calls 2024-12-17 22:38:57 -05:00
Matt Nadareski
1790d82a6e Cap overlay checks to 16 MiB 2024-12-17 15:50:02 -05:00
Matt Nadareski
261c20e95a Get rid of erroneous double semicolons 2024-12-17 15:43:22 -05:00
Matt Nadareski
ed6556b1f0 Cache version info strings 2024-12-17 01:05:41 -05:00
Matt Nadareski
a86af8c32a Expand printed detections 2024-12-17 00:52:24 -05:00
Matt Nadareski
1670ab45a0 Fix SFO deserialization 2024-12-17 00:27:30 -05:00
Deterous
7dc4750f3b Add Deserializer for AppPkgHeader (#14)
* Add Deserializer for AppPkgHeader

* Fix typo

* add using System.Text

* Fix typo
2024-12-17 00:10:38 -05:00
Matt Nadareski
b5f366680d Explicit endianness in extensions 2024-12-17 00:00:28 -05:00
Matt Nadareski
fa9e9a0b2b Be explicit about endianness 2024-12-16 23:08:45 -05:00
Matt Nadareski
2239b82a4b Update packages 2024-12-16 15:00:44 -05:00
Matt Nadareski
3b631678f5 Port Quantum extraction (nw) 2024-12-13 14:30:38 -05:00
Matt Nadareski
2b111a9688 Port BFPK extraction 2024-12-13 14:25:32 -05:00
Matt Nadareski
0bda1f4f88 Bump version 2024-12-13 10:44:00 -05:00
Matt Nadareski
7d50e0e1c5 Fix filename map 2024-12-11 21:17:39 -05:00
Matt Nadareski
224a4caab0 Add secondary link for ISAv3 2024-12-11 14:42:00 -05:00
Matt Nadareski
b4689da404 Add reference from UnshieldSharp 2024-12-11 14:39:27 -05:00
Matt Nadareski
af66657399 Slightly safer indexing 2024-12-11 14:34:11 -05:00
Matt Nadareski
0f3e2d8275 Add 2 more extension properties 2024-12-11 14:25:46 -05:00
Matt Nadareski
d664b6defc Use const for data offset 2024-12-11 14:18:32 -05:00
Matt Nadareski
adbf74a6e0 Add ISAv3 extraction 2024-12-11 14:17:35 -05:00
Matt Nadareski
7eb401efed Port obvious things from UnshieldSharp 2024-12-11 14:04:29 -05:00
Matt Nadareski
ba97381b99 Add more ISAv3 stuff 2024-12-11 13:56:01 -05:00
Matt Nadareski
3de92de225 Add Compression package 2024-12-11 13:31:54 -05:00
Matt Nadareski
01a195c8aa Bump version 2024-12-10 15:44:48 -05:00
Matt Nadareski
12d43ef68a Update Models to 1.5.6 2024-12-10 15:41:12 -05:00
Matt Nadareski
0df806a6d1 Display debug in selected options 2024-12-06 11:46:28 -05:00
Matt Nadareski
f8c713b260 Use publish script and update README 2024-12-06 11:42:22 -05:00
Matt Nadareski
4d0122f97c Trust nobody, not even yourself 2024-12-03 02:12:39 -05:00
81 changed files with 7682 additions and 3254 deletions

View File

@@ -1,4 +1,4 @@
name: Nuget Pack
name: Build and Test
on:
push:
@@ -16,31 +16,22 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build library
run: dotnet build
- name: Run tests
run: dotnet test
- name: Pack
run: dotnet pack
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: 'Nuget Package'
path: 'SabreTools.Serialization/bin/Release/*.nupkg'
- name: Run publish script
run: ./publish-nix.sh -d
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: 'SabreTools.Serialization/bin/Release/*.nupkg'
artifacts: "*.nupkg,*.snupkg,*.zip"
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -1,55 +0,0 @@
name: Build InfoPrint
on:
push:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
project: [InfoPrint]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
framework: [net9.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0, net9.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8') || startsWith(matrix.framework, 'net9')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: |
cd ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True
replacesArtifacts: True
tag: "rolling"
updateOnlyUnreleased: True

View File

@@ -11,10 +11,13 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Build
run: dotnet build
- name: Run tests
run: dotnet test
run: dotnet test

View File

@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.8.1</Version>
<Version>1.8.6</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -31,7 +31,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.6.1" />
<PackageReference Include="SabreTools.IO" Version="1.6.2" />
</ItemGroup>
</Project>

View File

@@ -1,5 +1,7 @@
# SabreTools.Serialization
[![Build and Test](https://github.com/SabreTools/SabreTools.Serialization/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.Serialization/actions/workflows/build_and_test.yml)
This library comprises of serializers that both read and write from files and streams to the dedicated models as well as convert to and from the common internal models. This library is partially used by the current parsing and writing code but none of the internal model serialization is used.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.Serialization).

View File

@@ -49,6 +49,7 @@ namespace SabreTools.Serialization.Test.CrossModel
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
SHA1 = "XXXXXX",
Date = "XXXXXX",
};
@@ -102,6 +103,7 @@ namespace SabreTools.Serialization.Test.CrossModel
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX", rom.Date);
}
}

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class AppPkgHeaderTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new AppPkgHeader();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -117,6 +117,7 @@ namespace SabreTools.Serialization.Test.Deserializers
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
SHA1 = "XXXXXX",
Date = "XXXXXX XXXXXX",
};
@@ -170,6 +171,7 @@ namespace SabreTools.Serialization.Test.Deserializers
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX XXXXXX", rom.Date);
}
}

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZKWAJTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,74 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZQBasicTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZSZDDTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -22,15 +22,15 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.2">
<PackageReference Include="coverlet.collector" Version="6.0.3">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.Models" Version="1.5.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.8" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class InstallShieldArchiveV3Tests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZKWAJTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZQBasicTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZSZDDTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -70,6 +70,7 @@ namespace SabreTools.Serialization.CrossModel
Name = item.ReadString(Models.Metadata.Rom.NameKey),
Size = item.ReadString(Models.Metadata.Rom.SizeKey),
CRC = item.ReadString(Models.Metadata.Rom.CRCKey),
SHA1 = item.ReadString(Models.Metadata.Rom.SHA1Key),
Date = item.ReadString(Models.Metadata.Rom.DateKey),
};
return file;

View File

@@ -73,6 +73,7 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Rom.NameKey] = item.Name,
[Models.Metadata.Rom.SizeKey] = item.Size,
[Models.Metadata.Rom.CRCKey] = item.CRC,
[Models.Metadata.Rom.SHA1Key] = item.SHA1,
[Models.Metadata.Rom.DateKey] = item.Date,
};
return rom;

View File

@@ -103,7 +103,7 @@ namespace SabreTools.Serialization.CrossModel
var romSizes = Array.ConvertAll(roms, r => r.ReadLong(Models.Metadata.Rom.SizeKey) ?? -1);
game.RomSize = Array.Find(romSizes, s => s > -1).ToString();
var romCRCs = Array.ConvertAll(roms, ConvertFromInternalModel);;
var romCRCs = Array.ConvertAll(roms, ConvertFromInternalModel);
game.Files = new Files { RomCRC = romCRCs };
}

View File

@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
try
{
// Create a new media key block to fill
var mediaKeyBlock = new MediaKeyBlock();
#region Records
// Create the records list
@@ -49,7 +46,7 @@ namespace SabreTools.Serialization.Deserializers
// Set the records
if (records.Count > 0)
return new MediaKeyBlock { Records = [.. records ]};
return new MediaKeyBlock { Records = [.. records] };
return null;
}
@@ -61,29 +58,29 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a record
/// Parse a Stream into a Record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled record on success, null on error</returns>
/// <returns>Filled Record on success, null on error</returns>
private static Record? ParseRecord(Stream data)
{
// The first 4 bytes make up the type and length
// The first byte is the type
RecordType type = (RecordType)data.ReadByteValue();
uint length = data.ReadUInt24();
data.Seek(-1, SeekOrigin.Current);
// Create a record based on the type
return type switch
{
// Known record types
RecordType.EndOfMediaKeyBlock => ParseEndOfMediaKeyBlockRecord(data, type, length),
RecordType.ExplicitSubsetDifference => ParseExplicitSubsetDifferenceRecord(data, type, length),
RecordType.MediaKeyData => ParseMediaKeyDataRecord(data, type, length),
RecordType.SubsetDifferenceIndex => ParseSubsetDifferenceIndexRecord(data, type, length),
RecordType.TypeAndVersion => ParseTypeAndVersionRecord(data, type, length),
RecordType.DriveRevocationList => ParseDriveRevocationListRecord(data, type, length),
RecordType.HostRevocationList => ParseHostRevocationListRecord(data, type, length),
RecordType.VerifyMediaKey => ParseVerifyMediaKeyRecord(data, type, length),
RecordType.Copyright => ParseCopyrightRecord(data, type, length),
RecordType.EndOfMediaKeyBlock => ParseEndOfMediaKeyBlockRecord(data),
RecordType.ExplicitSubsetDifference => ParseExplicitSubsetDifferenceRecord(data),
RecordType.MediaKeyData => ParseMediaKeyDataRecord(data),
RecordType.SubsetDifferenceIndex => ParseSubsetDifferenceIndexRecord(data),
RecordType.TypeAndVersion => ParseTypeAndVersionRecord(data),
RecordType.DriveRevocationList => ParseDriveRevocationListRecord(data),
RecordType.HostRevocationList => ParseHostRevocationListRecord(data),
RecordType.VerifyMediaKey => ParseVerifyMediaKeyRecord(data),
RecordType.Copyright => ParseCopyrightRecord(data),
// Unknown record type
_ => null,
@@ -91,324 +88,327 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into an end of media key block record
/// Parse a Stream into a CopyrightRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled end of media key block record on success, null on error</returns>
private static EndOfMediaKeyBlockRecord? ParseEndOfMediaKeyBlockRecord(Stream data, RecordType type, uint length)
/// <returns>Filled CopyrightRecord on success, null on error</returns>
public static CopyrightRecord ParseCopyrightRecord(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.EndOfMediaKeyBlock)
return null;
var obj = new CopyrightRecord();
var record = new EndOfMediaKeyBlockRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
if (obj.RecordLength > 4)
{
byte[] copyright = data.ReadBytes((int)(obj.RecordLength - 4));
obj.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0');
}
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
record.SignatureData = data.ReadBytes((int)(length - 4));
return obj;
}
/// <summary>
/// Parse a Stream into a DriveRevocationListEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DriveRevocationListEntry on success, null on error</returns>
public static DriveRevocationListEntry ParseDriveRevocationListEntry(Stream data)
{
var obj = new DriveRevocationListEntry();
return record;
obj.Range = data.ReadUInt16BigEndian();
obj.DriveID = data.ReadBytes(6);
return obj;
}
/// <summary>
/// Parse a Stream into an explicit subset-difference record
/// Parse a Stream into a DriveRevocationListRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled explicit subset-difference record on success, null on error</returns>
private static ExplicitSubsetDifferenceRecord? ParseExplicitSubsetDifferenceRecord(Stream data, RecordType type, uint length)
/// <returns>Filled DriveRevocationListRecord on success, null on error</returns>
public static DriveRevocationListRecord ParseDriveRevocationListRecord(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.ExplicitSubsetDifference)
return null;
var record = new ExplicitSubsetDifferenceRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
long initialOffset = data.Position;
// Create the subset difference list
var subsetDifferences = new List<SubsetDifference>();
var obj = new DriveRevocationListRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.TotalNumberOfEntries = data.ReadUInt32BigEndian();
// Try to parse the signature blocks
var blocks = new List<DriveRevocationSignatureBlock>();
uint entryCount = 0;
while (entryCount < obj.TotalNumberOfEntries && data.Position < initialOffset + obj.RecordLength)
{
var block = ParseDriveRevocationSignatureBlock(data);
entryCount += block.NumberOfEntries;
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
obj.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + obj.RecordLength)
_ = data.ReadBytes((int)(initialOffset + obj.RecordLength - data.Position));
return obj;
}
/// <summary>
/// Parse a Stream into a DriveRevocationSignatureBlock
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DriveRevocationSignatureBlock on success, null on error</returns>
public static DriveRevocationSignatureBlock ParseDriveRevocationSignatureBlock(Stream data)
{
var obj = new DriveRevocationSignatureBlock();
obj.NumberOfEntries = data.ReadUInt32BigEndian();
obj.EntryFields = new DriveRevocationListEntry[obj.NumberOfEntries];
for (int i = 0; i < obj.EntryFields.Length; i++)
{
obj.EntryFields[i] = ParseDriveRevocationListEntry(data);
}
return obj;
}
/// <summary>
/// Parse a Stream into a EndOfMediaKeyBlockRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled EndOfMediaKeyBlockRecord on success, null on error</returns>
public static EndOfMediaKeyBlockRecord ParseEndOfMediaKeyBlockRecord(Stream data)
{
var obj = new EndOfMediaKeyBlockRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
if (obj.RecordLength > 4)
obj.SignatureData = data.ReadBytes((int)(obj.RecordLength - 4));
return obj;
}
/// <summary>
/// Parse a Stream into a ExplicitSubsetDifferenceRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExplicitSubsetDifferenceRecord on success, null on error</returns>
public static ExplicitSubsetDifferenceRecord ParseExplicitSubsetDifferenceRecord(Stream data)
{
// Cache the current offset
long initialOffset = data.Position;
var obj = new ExplicitSubsetDifferenceRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
// Try to parse the subset differences
while (data.Position < initialOffset + length - 5)
var subsetDifferences = new List<SubsetDifference>();
while (data.Position < initialOffset + obj.RecordLength - 5)
{
var subsetDifference = new SubsetDifference();
subsetDifference.Mask = data.ReadByteValue();
subsetDifference.Number = data.ReadUInt32BigEndian();
var subsetDifference = ParseSubsetDifference(data);
subsetDifferences.Add(subsetDifference);
}
// Set the subset differences
record.SubsetDifferences = [.. subsetDifferences];
obj.SubsetDifferences = [.. subsetDifferences];
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
if (data.Position < initialOffset + obj.RecordLength)
_ = data.ReadBytes((int)(initialOffset + obj.RecordLength - data.Position));
return record;
return obj;
}
/// <summary>
/// Parse a Stream into a media key data record
/// Parse a Stream into a HostRevocationListEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled media key data record on success, null on error</returns>
private static MediaKeyDataRecord? ParseMediaKeyDataRecord(Stream data, RecordType type, uint length)
/// <returns>Filled HostRevocationListEntry on success, null on error</returns>
public static HostRevocationListEntry ParseHostRevocationListEntry(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.MediaKeyData)
return null;
var obj = new HostRevocationListEntry();
var record = new MediaKeyDataRecord();
obj.Range = data.ReadUInt16BigEndian();
obj.HostID = data.ReadBytes(6);
record.RecordType = type;
record.RecordLength = length;
return obj;
}
/// <summary>
/// Parse a Stream into a HostRevocationListRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HostRevocationListRecord on success, null on error</returns>
public static HostRevocationListRecord ParseHostRevocationListRecord(Stream data)
{
// Cache the current offset
long initialOffset = data.Position - 4;
long initialOffset = data.Position;
// Create the media key list
var mediaKeys = new List<byte[]>();
var obj = new HostRevocationListRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.TotalNumberOfEntries = data.ReadUInt32BigEndian();
// Try to parse the signature blocks
var blocks = new List<HostRevocationSignatureBlock>();
for (uint entryCount = 0; entryCount < obj.TotalNumberOfEntries && data.Position < initialOffset + obj.RecordLength;)
{
var block = ParseHostRevocationSignatureBlock(data);
entryCount += block.NumberOfEntries;
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
obj.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + obj.RecordLength)
_ = data.ReadBytes((int)(initialOffset + obj.RecordLength - data.Position));
return obj;
}
/// <summary>
/// Parse a Stream into a HostRevocationSignatureBlock
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HostRevocationSignatureBlock on success, null on error</returns>
public static HostRevocationSignatureBlock ParseHostRevocationSignatureBlock(Stream data)
{
var obj = new HostRevocationSignatureBlock();
obj.NumberOfEntries = data.ReadUInt32BigEndian();
obj.EntryFields = new HostRevocationListEntry[obj.NumberOfEntries];
for (int i = 0; i < obj.EntryFields.Length; i++)
{
obj.EntryFields[i] = ParseHostRevocationListEntry(data);
}
return obj;
}
/// <summary>
/// Parse a Stream into a MediaKeyDataRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled MediaKeyDataRecord on success, null on error</returns>
public static MediaKeyDataRecord ParseMediaKeyDataRecord(Stream data)
{
// Cache the current offset
long initialOffset = data.Position;
var obj = new MediaKeyDataRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
// Try to parse the media keys
while (data.Position < initialOffset + length)
var mediaKeys = new List<byte[]>();
while (data.Position < initialOffset + obj.RecordLength)
{
byte[] mediaKey = data.ReadBytes(0x10);
mediaKeys.Add(mediaKey);
}
// Set the media keys
record.MediaKeyData = [.. mediaKeys];
obj.MediaKeyData = [.. mediaKeys];
return record;
return obj;
}
/// <summary>
/// Parse a Stream into a subset-difference index record
/// Parse a Stream into a SubsetDifference
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled subset-difference index record on success, null on error</returns>
private static SubsetDifferenceIndexRecord? ParseSubsetDifferenceIndexRecord(Stream data, RecordType type, uint length)
/// <returns>Filled SubsetDifference on success, null on error</returns>
public static SubsetDifference ParseSubsetDifference(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.SubsetDifferenceIndex)
return null;
var obj = new SubsetDifference();
var record = new SubsetDifferenceIndexRecord();
obj.Mask = data.ReadByteValue();
obj.Number = data.ReadUInt32BigEndian();
record.RecordType = type;
record.RecordLength = length;
return obj;
}
/// <summary>
/// Parse a Stream into a SubsetDifferenceIndexRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SubsetDifferenceIndexRecord on success, null on error</returns>
public static SubsetDifferenceIndexRecord ParseSubsetDifferenceIndexRecord(Stream data)
{
// Cache the current offset
long initialOffset = data.Position - 4;
long initialOffset = data.Position;
record.Span = data.ReadUInt32BigEndian();
var obj = new SubsetDifferenceIndexRecord();
// Create the offset list
var offsets = new List<uint>();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.Span = data.ReadUInt32BigEndian();
// Try to parse the offsets
while (data.Position < initialOffset + length)
var offsets = new List<uint>();
while (data.Position < initialOffset + obj.RecordLength)
{
uint offset = data.ReadUInt32BigEndian();
offsets.Add(offset);
}
// Set the offsets
record.Offsets = [.. offsets];
obj.Offsets = [.. offsets];
return record;
return obj;
}
/// <summary>
/// Parse a Stream into a type and version record
/// Parse a Stream into a TypeAndVersionRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled type and version record on success, null on error</returns>
private static TypeAndVersionRecord? ParseTypeAndVersionRecord(Stream data, RecordType type, uint length)
/// <returns>Filled TypeAndVersionRecord on success, null on error</returns>
public static TypeAndVersionRecord ParseTypeAndVersionRecord(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.TypeAndVersion)
return null;
var obj = new TypeAndVersionRecord();
var record = new TypeAndVersionRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BigEndian();
obj.VersionNumber = data.ReadUInt32BigEndian();
record.RecordType = type;
record.RecordLength = length;
record.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BigEndian();
record.VersionNumber = data.ReadUInt32BigEndian();
return record;
return obj;
}
/// <summary>
/// Parse a Stream into a drive revocation list record
/// Parse a Stream into a VerifyMediaKeyRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled drive revocation list record on success, null on error</returns>
private static DriveRevocationListRecord? ParseDriveRevocationListRecord(Stream data, RecordType type, uint length)
/// <returns>Filled VerifyMediaKeyRecord on success, null on error</returns>
public static VerifyMediaKeyRecord ParseVerifyMediaKeyRecord(Stream data)
{
// Verify we're calling the right parser
if (type != RecordType.DriveRevocationList)
return null;
var obj = new VerifyMediaKeyRecord();
var record = new DriveRevocationListRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.CiphertextValue = data.ReadBytes(0x10);
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BigEndian();
// Create the signature blocks list
var blocks = new List<DriveRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new DriveRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BigEndian();
block.EntryFields = new DriveRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new DriveRevocationListEntry();
entry.Range = data.ReadUInt16BigEndian();
entry.DriveID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a host revocation list record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled host revocation list record on success, null on error</returns>
private static HostRevocationListRecord? ParseHostRevocationListRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.HostRevocationList)
return null;
var record = new HostRevocationListRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BigEndian();
// Create the signature blocks list
var blocks = new List<HostRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new HostRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BigEndian();
block.EntryFields = new HostRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new HostRevocationListEntry();
entry.Range = data.ReadUInt16BigEndian();
entry.HostID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a verify media key record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify media key record on success, null on error</returns>
private static VerifyMediaKeyRecord? ParseVerifyMediaKeyRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.VerifyMediaKey)
return null;
var record = new VerifyMediaKeyRecord();
record.RecordType = type;
record.RecordLength = length;
record.CiphertextValue = data.ReadBytes(0x10);
return record;
}
/// <summary>
/// Parse a Stream into a copyright record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled copyright record on success, null on error</returns>
private static CopyrightRecord? ParseCopyrightRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.Copyright)
return null;
var record = new CopyrightRecord();
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
{
byte[] copyright = data.ReadBytes((int)(length - 4));
record.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0');
}
return record;
return obj;
}
}
}

View File

@@ -0,0 +1,79 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using static SabreTools.Models.PlayStation4.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class AppPkgHeader : BaseBinaryDeserializer<Models.PlayStation4.AppPkgHeader>
{
/// <inheritdoc/>
public override Models.PlayStation4.AppPkgHeader? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Create a new app.pkg header to fill
var appPkgHeader = new Models.PlayStation4.AppPkgHeader();
appPkgHeader.Magic = data.ReadUInt32BigEndian();
if (appPkgHeader.Magic != AppPkgMagic)
return null;
appPkgHeader.Type = data.ReadUInt32BigEndian();
appPkgHeader.PKGUnknown = data.ReadUInt32BigEndian();
appPkgHeader.FileCount = data.ReadUInt32BigEndian();
appPkgHeader.EntryCount = data.ReadUInt32BigEndian();
appPkgHeader.SCEntryCount = data.ReadUInt16BigEndian();
appPkgHeader.EntryCount2 = data.ReadUInt16BigEndian();
appPkgHeader.TableOffset = data.ReadUInt32BigEndian();
appPkgHeader.EntryDataSize = data.ReadUInt32BigEndian();
appPkgHeader.BodyOffset = data.ReadUInt64BigEndian();
appPkgHeader.BodySize = data.ReadUInt64BigEndian();
appPkgHeader.ContentOffset = data.ReadUInt64BigEndian();
appPkgHeader.ContentSize = data.ReadUInt64BigEndian();
byte[] contentID = data.ReadBytes(0x24);
appPkgHeader.ContentID = Encoding.ASCII.GetString(contentID).TrimEnd('\0');
appPkgHeader.ContentZeroes = data.ReadBytes(0xC);
appPkgHeader.DRMType = data.ReadUInt32BigEndian();
appPkgHeader.ContentType = data.ReadUInt32BigEndian();
appPkgHeader.ContentFlags = data.ReadUInt32BigEndian();
appPkgHeader.PromoteSize = data.ReadUInt32BigEndian();
appPkgHeader.VersionDate = data.ReadUInt32BigEndian();
appPkgHeader.VersionHash = data.ReadUInt32BigEndian();
appPkgHeader.Zeroes1 = data.ReadBytes(0x78);
appPkgHeader.MainEntry1SHA256 = data.ReadBytes(0x20);
appPkgHeader.MainEntry2SHA256 = data.ReadBytes(0x20);
appPkgHeader.DigestTableSHA256 = data.ReadBytes(0x20);
appPkgHeader.MainTableSHA256 = data.ReadBytes(0x20);
appPkgHeader.Zeroes2 = data.ReadBytes(0x280);
appPkgHeader.PFSUnknown = data.ReadUInt32BigEndian();
appPkgHeader.PFSImageCount = data.ReadUInt32BigEndian();
appPkgHeader.PFSImageFlags = data.ReadUInt64BigEndian();
appPkgHeader.PFSImageOffset = data.ReadUInt64BigEndian();
appPkgHeader.PFSImageSize = data.ReadUInt64BigEndian();
appPkgHeader.MountImageOffset = data.ReadUInt64BigEndian();
appPkgHeader.MountImageSize = data.ReadUInt64BigEndian();
appPkgHeader.PKGSize = data.ReadUInt64BigEndian();
appPkgHeader.PKGSignedSize = data.ReadUInt32BigEndian();
appPkgHeader.PKGCacheSize = data.ReadUInt32BigEndian();
appPkgHeader.PFSImageSHA256 = data.ReadBytes(0x20);
appPkgHeader.PFSSignedSHA256 = data.ReadBytes(0x20);
appPkgHeader.PFSSplitSize0 = data.ReadUInt64BigEndian();
appPkgHeader.PFSSplitSize1 = data.ReadUInt64BigEndian();
appPkgHeader.Zeroes3 = data.ReadBytes(0xB50);
appPkgHeader.PKGSHA256 = data.ReadBytes(0x20);
return appPkgHeader;
}
catch
{
// Ignore the actual error
return null;
}
}
}
}

View File

@@ -35,8 +35,8 @@ namespace SabreTools.Serialization.Deserializers
if (svm.Day < 1 || svm.Day > 31)
return null;
svm.Unknown2 = data.ReadUInt32();
svm.Length = data.ReadUInt32();
svm.Unknown2 = data.ReadUInt32LittleEndian();
svm.Length = data.ReadUInt32LittleEndian();
if (svm.Length > 0)
svm.Data = data.ReadBytes((int)svm.Length);

View File

@@ -23,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Magic != SignatureString)
var header = ParseHeader(data);
if (header.Magic != SignatureString)
return null;
// Set the archive header
@@ -58,32 +58,49 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a file entry
/// Parse a Stream into a FileEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
/// <returns>Filled FileEntry on success, null on error</returns>
public static FileEntry ParseFileEntry(Stream data)
{
var fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
fileEntry.NameSize = data.ReadInt32LittleEndian();
if (fileEntry.NameSize > 0)
{
byte[] name = data.ReadBytes(fileEntry.NameSize);
fileEntry.Name = Encoding.ASCII.GetString(name);
}
fileEntry.UncompressedSize = data.ReadInt32();
fileEntry.Offset = data.ReadInt32();
fileEntry.UncompressedSize = data.ReadInt32LittleEndian();
fileEntry.Offset = data.ReadInt32LittleEndian();
if (fileEntry.Offset > 0)
{
long currentOffset = data.Position;
data.Seek(fileEntry.Offset, SeekOrigin.Begin);
fileEntry.CompressedSize = data.ReadInt32();
fileEntry.CompressedSize = data.ReadInt32LittleEndian();
data.Seek(currentOffset, SeekOrigin.Begin);
}
return fileEntry;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
byte[] magic = data.ReadBytes(4);
obj.Magic = Encoding.ASCII.GetString(magic);
obj.Version = data.ReadInt32LittleEndian();
obj.Files = data.ReadInt32LittleEndian();
return obj;
}
}
}

View File

@@ -25,9 +25,7 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<BspHeader>();
if (header?.Lumps == null || header.Lumps.Length != BSP_HEADER_LUMPS)
return null;
var header = ParseBspHeader(data);
if (header.Version < 29 || header.Version > 30)
return null;
@@ -41,7 +39,7 @@ namespace SabreTools.Serialization.Deserializers
for (int l = 0; l < BSP_HEADER_LUMPS; l++)
{
// Get the next lump entry
var lumpEntry = header.Lumps[l];
var lumpEntry = header.Lumps![l];
if (lumpEntry == null)
continue;
if (lumpEntry.Offset == 0 || lumpEntry.Length == 0)
@@ -115,6 +113,272 @@ namespace SabreTools.Serialization.Deserializers
}
}
/// <summary>
/// Parse a Stream into BspFace
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspFace on success, null on error</returns>
public static BspFace ParseBspFace(Stream data)
{
var obj = new BspFace();
obj.PlaneIndex = data.ReadUInt16LittleEndian();
obj.PlaneSideCount = data.ReadUInt16LittleEndian();
obj.FirstEdgeIndex = data.ReadUInt32LittleEndian();
obj.NumberOfEdges = data.ReadUInt16LittleEndian();
obj.TextureInfoIndex = data.ReadUInt16LittleEndian();
obj.LightingStyles = data.ReadBytes(4);
obj.LightmapOffset = data.ReadInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into BspHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspHeader on success, null on error</returns>
public static BspHeader ParseBspHeader(Stream data)
{
var obj = new BspHeader();
obj.Version = data.ReadInt32LittleEndian();
obj.Lumps = new BspLumpEntry[BSP_HEADER_LUMPS];
for (int i = 0; i < BSP_HEADER_LUMPS; i++)
{
obj.Lumps[i] = ParseBspLumpEntry(data);
}
return obj;
}
/// <summary>
/// Parse a Stream into BspLeaf
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspLeaf on success, null on error</returns>
public static BspLeaf ParseBspLeaf(Stream data)
{
var obj = new BspLeaf();
obj.Contents = (BspContents)data.ReadInt32LittleEndian();
obj.VisOffset = data.ReadInt32LittleEndian();
obj.Mins = new short[3];
for (int i = 0; i < 3; i++)
{
obj.Mins[i] = data.ReadInt16LittleEndian();
}
obj.Maxs = new short[3];
for (int i = 0; i < 3; i++)
{
obj.Maxs[i] = data.ReadInt16LittleEndian();
}
obj.FirstMarkSurfaceIndex = data.ReadUInt16LittleEndian();
obj.MarkSurfacesCount = data.ReadUInt16LittleEndian();
obj.AmbientLevels = data.ReadBytes(4);
return obj;
}
/// <summary>
/// Parse a Stream into BspLumpEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspLumpEntry on success, null on error</returns>
public static BspLumpEntry ParseBspLumpEntry(Stream data)
{
var obj = new BspLumpEntry();
obj.Offset = data.ReadInt32LittleEndian();
obj.Length = data.ReadInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into BspModel
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspModel on success, null on error</returns>
public static BspModel ParseBspModel(Stream data)
{
var obj = new BspModel();
obj.Mins = ParseVector3D(data);
obj.Maxs = ParseVector3D(data);
obj.OriginVector = ParseVector3D(data);
obj.HeadnodesIndex = new int[MAX_MAP_HULLS];
for (int i = 0; i < MAX_MAP_HULLS; i++)
{
obj.HeadnodesIndex[i] = data.ReadInt32LittleEndian();
}
obj.VisLeafsCount = data.ReadInt32LittleEndian();
obj.FirstFaceIndex = data.ReadInt32LittleEndian();
obj.FacesCount = data.ReadInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into BspNode
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspNode on success, null on error</returns>
public static BspNode ParseBspNode(Stream data)
{
var obj = new BspNode();
obj.PlaneIndex = data.ReadUInt32LittleEndian();
obj.Children = new ushort[2];
for (int i = 0; i < 2; i++)
{
obj.Children[i] = data.ReadUInt16LittleEndian();
}
obj.Mins = new ushort[3];
for (int i = 0; i < 3; i++)
{
obj.Mins[i] = data.ReadUInt16LittleEndian();
}
obj.Maxs = new ushort[3];
for (int i = 0; i < 3; i++)
{
obj.Maxs[i] = data.ReadUInt16LittleEndian();
}
obj.FirstFace = data.ReadUInt16LittleEndian();
obj.FaceCount = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into BspTexinfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BspTexinfo on success, null on error</returns>
public static BspTexinfo ParseBspTexinfo(Stream data)
{
var obj = new BspTexinfo();
obj.SVector = ParseVector3D(data);
obj.TextureSShift = data.ReadSingle();
obj.TVector = ParseVector3D(data);
obj.TextureTShift = data.ReadSingle();
obj.MiptexIndex = data.ReadUInt32LittleEndian();
obj.Flags = (TextureFlag)data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into Clipnode
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Clipnode on success, null on error</returns>
public static Clipnode ParseClipnode(Stream data)
{
var obj = new Clipnode();
obj.PlaneIndex = data.ReadInt32LittleEndian();
obj.ChildrenIndices = new short[2];
for (int i = 0; i < 2; i++)
{
obj.ChildrenIndices[i] = data.ReadInt16LittleEndian();
}
return obj;
}
/// <summary>
/// Parse a Stream into Edge
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Edge on success, null on error</returns>
public static Edge ParseEdge(Stream data)
{
var obj = new Edge();
obj.VertexIndices = new ushort[2];
for (int i = 0; i < 2; i++)
{
obj.VertexIndices[i] = data.ReadUInt16LittleEndian();
}
return obj;
}
/// <summary>
/// Parse a Stream into MipTexture
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled MipTexture on success, null on error</returns>
public static MipTexture ParseMipTexture(Stream data)
{
var obj = new MipTexture();
byte[] name = data.ReadBytes(MAXTEXTURENAME);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
obj.Width = data.ReadUInt32LittleEndian();
obj.Height = data.ReadUInt32LittleEndian();
obj.Offsets = new uint[MIPLEVELS];
for (int i = 0; i < MIPLEVELS; i++)
{
obj.Offsets[i] = data.ReadUInt32LittleEndian();
}
return obj;
}
/// <summary>
/// Parse a Stream into Plane
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Plane on success, null on error</returns>
public static Plane ParsePlane(Stream data)
{
var obj = new Plane();
obj.NormalVector = ParseVector3D(data);
obj.Distance = data.ReadSingle();
obj.PlaneType = (PlaneType)data.ReadInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a TextureHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled TextureHeader on success, null on error</returns>
public static TextureHeader ParseTextureHeader(Stream data)
{
var obj = new TextureHeader();
obj.MipTextureCount = data.ReadUInt32LittleEndian();
obj.Offsets = new int[obj.MipTextureCount];
for (int i = 0; i < obj.Offsets.Length; i++)
{
obj.Offsets[i] = data.ReadInt16LittleEndian();
}
return obj;
}
/// <summary>
/// Parse a Stream into Vector3D
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Vector3D on success, null on error</returns>
public static Vector3D ParseVector3D(Stream data)
{
var obj = new Vector3D();
obj.X = data.ReadSingle();
obj.Y = data.ReadSingle();
obj.Z = data.ReadSingle();
return obj;
}
/// <summary>
/// Parse a Stream into LUMP_ENTITIES
/// </summary>
@@ -167,9 +431,8 @@ namespace SabreTools.Serialization.Deserializers
var planes = new List<Plane>();
while (data.Position < offset + length)
{
var plane = data.ReadType<Plane>();
if (plane != null)
planes.Add(plane);
var plane = ParsePlane(data);
planes.Add(plane);
}
return new PlanesLump { Planes = [.. planes] };
@@ -188,34 +451,14 @@ namespace SabreTools.Serialization.Deserializers
var textures = new List<MipTexture>();
while (data.Position < offset + length)
{
var texture = data.ReadType<MipTexture>();
if (texture != null)
textures.Add(texture);
var texture = ParseMipTexture(data);
textures.Add(texture);
}
lump.Textures = [.. textures];
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Level texture header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
private static TextureHeader ParseTextureHeader(Stream data)
{
var textureHeader = new TextureHeader();
textureHeader.MipTextureCount = data.ReadUInt32();
textureHeader.Offsets = new int[textureHeader.MipTextureCount];
for (int i = 0; i < textureHeader.Offsets.Length; i++)
{
textureHeader.Offsets[i] = data.ReadInt32();
}
return textureHeader;
}
/// <summary>
/// Parse a Stream into LUMP_VERTICES
/// </summary>
@@ -226,10 +469,7 @@ namespace SabreTools.Serialization.Deserializers
var vertices = new List<Vector3D>();
while (data.Position < offset + length)
{
var vertex = data.ReadType<Vector3D>();
if (vertex == null)
break;
var vertex = ParseVector3D(data);
vertices.Add(vertex);
}
@@ -245,7 +485,7 @@ namespace SabreTools.Serialization.Deserializers
{
var lump = new VisibilityLump();
lump.NumClusters = data.ReadInt32();
lump.NumClusters = data.ReadInt32LittleEndian();
// BSP29 has an incompatible value here
int bytesNeeded = lump.NumClusters * 8;
@@ -258,7 +498,7 @@ namespace SabreTools.Serialization.Deserializers
lump.ByteOffsets[i] = new int[2];
for (int j = 0; j < 2; j++)
{
lump.ByteOffsets[i][j] = data.ReadInt32();
lump.ByteOffsets[i][j] = data.ReadInt32LittleEndian();
}
}
@@ -275,9 +515,8 @@ namespace SabreTools.Serialization.Deserializers
var nodes = new List<BspNode>();
while (data.Position < offset + length)
{
var node = data.ReadType<BspNode>();
if (node != null)
nodes.Add(node);
var node = ParseBspNode(data);
nodes.Add(node);
}
return new BspNodesLump { Nodes = [.. nodes] };
@@ -293,9 +532,8 @@ namespace SabreTools.Serialization.Deserializers
var texinfos = new List<BspTexinfo>();
while (data.Position < offset + length)
{
var texinfo = data.ReadType<BspTexinfo>();
if (texinfo != null)
texinfos.Add(texinfo);
var texinfo = ParseBspTexinfo(data);
texinfos.Add(texinfo);
}
return new BspTexinfoLump { Texinfos = [.. texinfos] };
@@ -311,9 +549,8 @@ namespace SabreTools.Serialization.Deserializers
var faces = new List<BspFace>();
while (data.Position < offset + length)
{
var face = data.ReadType<BspFace>();
if (face != null)
faces.Add(face);
var face = ParseBspFace(data);
faces.Add(face);
}
return new BspFacesLump { Faces = [.. faces] };
@@ -347,9 +584,8 @@ namespace SabreTools.Serialization.Deserializers
var clipnodes = new List<Clipnode>();
while (data.Position < offset + length)
{
var clipnode = data.ReadType<Clipnode>();
if (clipnode != null)
clipnodes.Add(clipnode);
var clipnode = ParseClipnode(data);
clipnodes.Add(clipnode);
}
return new ClipnodesLump { Clipnodes = [.. clipnodes] };
@@ -365,9 +601,8 @@ namespace SabreTools.Serialization.Deserializers
var leaves = new List<BspLeaf>();
while (data.Position < offset + length)
{
var leaf = data.ReadType<BspLeaf>();
if (leaf != null)
leaves.Add(leaf);
var leaf = ParseBspLeaf(data);
leaves.Add(leaf);
}
return new BspLeavesLump { Leaves = [.. leaves] };
@@ -383,7 +618,7 @@ namespace SabreTools.Serialization.Deserializers
var marksurfaces = new List<ushort>();
while (data.Position < offset + length)
{
marksurfaces.Add(data.ReadUInt16());
marksurfaces.Add(data.ReadUInt16LittleEndian());
}
return new MarksurfacesLump { Marksurfaces = [.. marksurfaces] };
@@ -399,9 +634,8 @@ namespace SabreTools.Serialization.Deserializers
var edges = new List<Edge>();
while (data.Position < offset + length)
{
var edge = data.ReadType<Edge>();
if (edge != null)
edges.Add(edge);
var edge = ParseEdge(data);
edges.Add(edge);
}
return new EdgesLump { Edges = [.. edges] };
@@ -417,7 +651,7 @@ namespace SabreTools.Serialization.Deserializers
var surfedges = new List<int>();
while (data.Position < offset + length)
{
surfedges.Add(data.ReadInt32());
surfedges.Add(data.ReadInt32LittleEndian());
}
return new SurfedgesLump { Surfedges = [.. surfedges] };
@@ -433,9 +667,8 @@ namespace SabreTools.Serialization.Deserializers
var models = new List<BspModel>();
while (data.Position < offset + length)
{
var model = data.ReadType<BspModel>();
if (model != null)
models.Add(model);
var model = ParseBspModel(data);
models.Add(model);
}
return new BspModelsLump { Models = [.. models] };

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CFB;
using static SabreTools.Models.CFB.Constants;
@@ -26,7 +27,17 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the file header
var fileHeader = ParseFileHeader(data);
if (fileHeader == null)
if (fileHeader?.Signature != SignatureUInt64)
return null;
if (fileHeader.ByteOrder != 0xFFFE)
return null;
if (fileHeader.MajorVersion == 3 && fileHeader.SectorShift != 0x0009)
return null;
else if (fileHeader.MajorVersion == 4 && fileHeader.SectorShift != 0x000C)
return null;
if (fileHeader.MajorVersion == 3 && fileHeader.NumberOfDirectorySectors != 0)
return null;
if (fileHeader.MiniStreamCutoffSize != 0x00001000)
return null;
// Set the file header
@@ -228,32 +239,69 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a file header
/// Parse a Stream into a DirectoryEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file header on success, null on error</returns>
private static FileHeader? ParseFileHeader(Stream data)
/// <returns>Filled DirectoryEntry on success, null on error</returns>
public static DirectoryEntry ParseDirectoryEntry(Stream data)
{
var header = data.ReadType<FileHeader>();
var obj = new DirectoryEntry();
if (header?.Signature != SignatureUInt64)
return null;
if (header.ByteOrder != 0xFFFE)
return null;
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
return null;
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
return null;
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
return null;
if (header.MiniStreamCutoffSize != 0x00001000)
return null;
byte[] name = data.ReadBytes(32);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
obj.NameLength = data.ReadUInt16LittleEndian();
obj.ObjectType = (ObjectType)data.ReadByteValue();
obj.ColorFlag = (ColorFlag)data.ReadByteValue();
obj.LeftSiblingID = (StreamID)data.ReadUInt32LittleEndian();
obj.RightSiblingID = (StreamID)data.ReadUInt32LittleEndian();
obj.ChildID = (StreamID)data.ReadUInt32LittleEndian();
obj.CLSID = data.ReadGuid();
obj.StateBits = data.ReadUInt32LittleEndian();
obj.CreationTime = data.ReadUInt64LittleEndian();
obj.ModifiedTime = data.ReadUInt64LittleEndian();
obj.StartingSectorLocation = data.ReadUInt32LittleEndian();
obj.StreamSize = data.ReadUInt64LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a FileHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FileHeader on success, null on error</returns>
public static FileHeader ParseFileHeader(Stream data)
{
var obj = new FileHeader();
obj.Signature = data.ReadUInt64LittleEndian();
obj.CLSID = data.ReadGuid();
obj.MinorVersion = data.ReadUInt16LittleEndian();
obj.MajorVersion = data.ReadUInt16LittleEndian();
obj.ByteOrder = data.ReadUInt16LittleEndian();
obj.SectorShift = data.ReadUInt16LittleEndian();
obj.MiniSectorShift = data.ReadUInt16LittleEndian();
obj.Reserved = data.ReadBytes(6);
obj.NumberOfDirectorySectors = data.ReadUInt32LittleEndian();
obj.NumberOfFATSectors = data.ReadUInt32LittleEndian();
obj.FirstDirectorySectorLocation = data.ReadUInt32LittleEndian();
obj.TransactionSignatureNumber = data.ReadUInt32LittleEndian();
obj.MiniStreamCutoffSize = data.ReadUInt32LittleEndian();
obj.FirstMiniFATSectorLocation = data.ReadUInt32LittleEndian();
obj.NumberOfMiniFATSectors = data.ReadUInt32LittleEndian();
obj.FirstDIFATSectorLocation = data.ReadUInt32LittleEndian();
obj.NumberOfDIFATSectors = data.ReadUInt32LittleEndian();
obj.DIFAT = new SectorNumber[109];
for (int i = 0; i < 109; i++)
{
obj.DIFAT[i] = (SectorNumber)data.ReadUInt32LittleEndian();
}
// Skip rest of sector for version 4
if (header.MajorVersion == 4)
if (obj.MajorVersion == 4)
_ = data.ReadBytes(3584);
return header;
return obj;
}
/// <summary>
@@ -269,7 +317,7 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < sectorNumbers.Length; i++)
{
sectorNumbers[i] = (SectorNumber)data.ReadUInt32();
sectorNumbers[i] = (SectorNumber)data.ReadUInt32LittleEndian();
}
return sectorNumbers;
@@ -294,9 +342,7 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryEntries.Length; i++)
{
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
return null;
var directoryEntry = ParseDirectoryEntry(data);
// Handle version 3 entries
if (majorVersion == 3)

View File

@@ -22,15 +22,70 @@ namespace SabreTools.Serialization.Deserializers
uint version = GetVersion(data);
// Read and return the current CHD
return version switch
switch (version)
{
1 => ParseHeaderV1(data),
2 => ParseHeaderV2(data),
3 => ParseHeaderV3(data),
4 => ParseHeaderV4(data),
5 => ParseHeaderV5(data),
_ => null,
};
case 1:
var headerV1 = ParseHeaderV1(data);
if (headerV1.Tag != Constants.SignatureString)
return null;
if (headerV1.Length != Constants.HeaderV1Size)
return null;
if (headerV1.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
return headerV1;
case 2:
var headerV2 = ParseHeaderV2(data);
if (headerV2.Tag != Constants.SignatureString)
return null;
if (headerV2.Length != Constants.HeaderV2Size)
return null;
if (headerV2.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
return headerV2;
case 3:
var headerV3 = ParseHeaderV3(data);
if (headerV3.Tag != Constants.SignatureString)
return null;
if (headerV3.Length != Constants.HeaderV3Size)
return null;
if (headerV3.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
return headerV3;
case 4:
var headerV4 = ParseHeaderV1(data);
if (headerV4.Tag != Constants.SignatureString)
return null;
if (headerV4.Length != Constants.HeaderV4Size)
return null;
if (headerV4.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
return headerV4;
case 5:
var headerV5 = ParseHeaderV1(data);
if (headerV5.Tag != Constants.SignatureString)
return null;
if (headerV5.Length != Constants.HeaderV5Size)
return null;
return headerV5;
default:
return null;
}
}
catch
{
@@ -83,81 +138,129 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a V1 header
/// Parse a Stream into a HeaderV1
/// </summary>
private static HeaderV1? ParseHeaderV1(Stream data)
public static HeaderV1 ParseHeaderV1(Stream data)
{
var header = data.ReadType<HeaderV1>();
if (header?.Tag != Constants.SignatureString)
return null;
if (header.Length != Constants.HeaderV1Size)
return null;
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
var obj = new HeaderV1();
return header;
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.HunkSize = data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.Cylinders = data.ReadUInt32LittleEndian();
obj.Heads = data.ReadUInt32LittleEndian();
obj.Sectors = data.ReadUInt32LittleEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
return obj;
}
/// <summary>
/// Parse a Stream into a V2 header
/// </summary>
private static HeaderV2? ParseHeaderV2(Stream data)
public static HeaderV2 ParseHeaderV2(Stream data)
{
var header = data.ReadType<HeaderV2>();
if (header?.Tag != Constants.SignatureString)
return null;
if (header.Length != Constants.HeaderV2Size)
return null;
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
var obj = new HeaderV2();
return header;
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.HunkSize = data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.Cylinders = data.ReadUInt32LittleEndian();
obj.Heads = data.ReadUInt32LittleEndian();
obj.Sectors = data.ReadUInt32LittleEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
obj.BytesPerSector = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a V3 header
/// </summary>
private static HeaderV3? ParseHeaderV3(Stream data)
public static HeaderV3 ParseHeaderV3(Stream data)
{
var header = data.ReadType<HeaderV3>();
if (header?.Tag != Constants.SignatureString)
return null;
if (header.Length != Constants.HeaderV3Size)
return null;
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
var obj = new HeaderV3();
return header;
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);
return obj;
}
/// <summary>
/// Parse a Stream into a V4 header
/// </summary>
private static HeaderV4? ParseHeaderV4(Stream data)
public static HeaderV4? ParseHeaderV4(Stream data)
{
var header = data.ReadType<HeaderV4>();
if (header?.Tag != Constants.SignatureString)
return null;
if (header.Length != Constants.HeaderV4Size)
return null;
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
var obj = new HeaderV4();
return header;
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);
obj.RawSHA1 = data.ReadBytes(20);
return obj;
}
/// <summary>
/// Parse a Stream into a V5 header
/// </summary>
private static HeaderV5? ParseHeaderV5(Stream data)
public static HeaderV5 ParseHeaderV5(Stream data)
{
var header = data.ReadType<HeaderV5>();
if (header?.Tag != Constants.SignatureString)
return null;
if (header.Length != Constants.HeaderV5Size)
return null;
var obj = new HeaderV5();
return header;
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Compressors = new CodecType[4];
for (int i = 0; i < 4; i++)
{
obj.Compressors[i] = (CodecType)data.ReadUInt32LittleEndian();
}
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MapOffset = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.UnitBytes = data.ReadUInt32LittleEndian();
obj.RawSHA1 = data.ReadBytes(20);
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);
return obj;
}
}
}

View File

@@ -22,9 +22,7 @@ namespace SabreTools.Serialization.Deserializers
#region CIA Header
// Try to parse the header
var header = data.ReadType<CIAHeader>();
if (header == null)
return null;
var header = ParseCIAHeader(data);
if (header.CertificateChainSize > data.Length)
return null;
if (header.TicketSize > data.Length)
@@ -119,15 +117,7 @@ namespace SabreTools.Serialization.Deserializers
// If we have a meta data
if (header.MetaSize > 0)
{
// Try to parse the meta
var meta = data.ReadType<MetaData>();
if (meta == null)
return null;
// Set the meta
cia.MetaData = meta;
}
cia.MetaData = ParseMetaData(data);
#endregion
@@ -141,161 +131,235 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a certificate
/// Parse a Stream into a Certificate
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled certificate on success, null on error</returns>
/// <returns>Filled Certificate on success, null on error</returns>
public static Certificate? ParseCertificate(Stream data)
{
var certificate = new Certificate();
var obj = new Certificate();
certificate.SignatureType = (SignatureType)data.ReadUInt32();
switch (certificate.SignatureType)
obj.SignatureType = (SignatureType)data.ReadUInt32LittleEndian();
switch (obj.SignatureType)
{
case SignatureType.RSA_4096_SHA1:
case SignatureType.RSA_4096_SHA256:
certificate.SignatureSize = 0x200;
certificate.PaddingSize = 0x3C;
obj.SignatureSize = 0x200;
obj.PaddingSize = 0x3C;
break;
case SignatureType.RSA_2048_SHA1:
case SignatureType.RSA_2048_SHA256:
certificate.SignatureSize = 0x100;
certificate.PaddingSize = 0x3C;
obj.SignatureSize = 0x100;
obj.PaddingSize = 0x3C;
break;
case SignatureType.ECDSA_SHA1:
case SignatureType.ECDSA_SHA256:
certificate.SignatureSize = 0x3C;
certificate.PaddingSize = 0x40;
obj.SignatureSize = 0x3C;
obj.PaddingSize = 0x40;
break;
default:
return null;
}
certificate.Signature = data.ReadBytes(certificate.SignatureSize);
certificate.Padding = data.ReadBytes(certificate.PaddingSize);
obj.Signature = data.ReadBytes(obj.SignatureSize);
obj.Padding = data.ReadBytes(obj.PaddingSize);
byte[] issuer = data.ReadBytes(0x40);
certificate.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
certificate.KeyType = (PublicKeyType)data.ReadUInt32();
obj.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
obj.KeyType = (PublicKeyType)data.ReadUInt32LittleEndian();
byte[] name = data.ReadBytes(0x40);
certificate.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
certificate.ExpirationTime = data.ReadUInt32();
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
obj.ExpirationTime = data.ReadUInt32LittleEndian();
switch (certificate.KeyType)
switch (obj.KeyType)
{
case PublicKeyType.RSA_4096:
certificate.RSAModulus = data.ReadBytes(0x200);
certificate.RSAPublicExponent = data.ReadUInt32();
certificate.RSAPadding = data.ReadBytes(0x34);
obj.RSAModulus = data.ReadBytes(0x200);
obj.RSAPublicExponent = data.ReadUInt32LittleEndian();
obj.RSAPadding = data.ReadBytes(0x34);
break;
case PublicKeyType.RSA_2048:
certificate.RSAModulus = data.ReadBytes(0x100);
certificate.RSAPublicExponent = data.ReadUInt32();
certificate.RSAPadding = data.ReadBytes(0x34);
obj.RSAModulus = data.ReadBytes(0x100);
obj.RSAPublicExponent = data.ReadUInt32LittleEndian();
obj.RSAPadding = data.ReadBytes(0x34);
break;
case PublicKeyType.EllipticCurve:
certificate.ECCPublicKey = data.ReadBytes(0x3C);
certificate.ECCPadding = data.ReadBytes(0x3C);
obj.ECCPublicKey = data.ReadBytes(0x3C);
obj.ECCPadding = data.ReadBytes(0x3C);
break;
default:
return null;
}
return certificate;
return obj;
}
/// <summary>
/// Parse a Stream into a ticket
/// Parse a Stream into a CIAHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CIAHeader on success, null on error</returns>
public static CIAHeader ParseCIAHeader(Stream data)
{
var obj = new CIAHeader();
obj.HeaderSize = data.ReadUInt32LittleEndian();
obj.Type = data.ReadUInt16LittleEndian();
obj.Version = data.ReadUInt16LittleEndian();
obj.CertificateChainSize = data.ReadUInt32LittleEndian();
obj.TicketSize = data.ReadUInt32LittleEndian();
obj.TMDFileSize = data.ReadUInt32LittleEndian();
obj.MetaSize = data.ReadUInt32LittleEndian();
obj.ContentSize = data.ReadUInt64LittleEndian();
obj.ContentIndex = data.ReadBytes(0x2000);
return obj;
}
/// <summary>
/// Parse a Stream into a ContentChunkRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ContentChunkRecord on success, null on error</returns>
public static ContentChunkRecord ParseContentChunkRecord(Stream data)
{
var obj = new ContentChunkRecord();
obj.ContentId = data.ReadUInt32LittleEndian();
obj.ContentIndex = (ContentIndex)data.ReadUInt16LittleEndian();
obj.ContentType = (TMDContentType)data.ReadUInt16LittleEndian();
obj.ContentSize = data.ReadUInt64LittleEndian();
obj.SHA256Hash = data.ReadBytes(0x20);
return obj;
}
/// <summary>
/// Parse a Stream into a ContentInfoRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ContentInfoRecord on success, null on error</returns>
public static ContentInfoRecord ParseContentInfoRecord(Stream data)
{
var obj = new ContentInfoRecord();
obj.ContentIndexOffset = data.ReadUInt16LittleEndian();
obj.ContentCommandCount = data.ReadUInt16LittleEndian();
obj.UnhashedContentRecordsSHA256Hash = data.ReadBytes(0x20);
return obj;
}
/// <summary>
/// Parse a Stream into a MetaData
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled MetaData on success, null on error</returns>
public static MetaData ParseMetaData(Stream data)
{
var obj = new MetaData();
obj.TitleIDDependencyList = data.ReadBytes(0x180);
obj.Reserved1 = data.ReadBytes(0x180);
obj.CoreVersion = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadBytes(0xFC);
obj.IconData = data.ReadBytes(0x36C0);
return obj;
}
/// <summary>
/// Parse a Stream into a Ticket
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="fromCdn">Indicates if the ticket is from CDN</param>
/// <returns>Filled ticket on success, null on error</returns>
/// <returns>Filled Ticket on success, null on error</returns>
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
{
var ticket = new Ticket();
var obj = new Ticket();
ticket.SignatureType = (SignatureType)data.ReadUInt32();
switch (ticket.SignatureType)
obj.SignatureType = (SignatureType)data.ReadUInt32LittleEndian();
switch (obj.SignatureType)
{
case SignatureType.RSA_4096_SHA1:
case SignatureType.RSA_4096_SHA256:
ticket.SignatureSize = 0x200;
ticket.PaddingSize = 0x3C;
obj.SignatureSize = 0x200;
obj.PaddingSize = 0x3C;
break;
case SignatureType.RSA_2048_SHA1:
case SignatureType.RSA_2048_SHA256:
ticket.SignatureSize = 0x100;
ticket.PaddingSize = 0x3C;
obj.SignatureSize = 0x100;
obj.PaddingSize = 0x3C;
break;
case SignatureType.ECDSA_SHA1:
case SignatureType.ECDSA_SHA256:
ticket.SignatureSize = 0x3C;
ticket.PaddingSize = 0x40;
obj.SignatureSize = 0x3C;
obj.PaddingSize = 0x40;
break;
default:
return null;
}
ticket.Signature = data.ReadBytes(ticket.SignatureSize);
ticket.Padding = data.ReadBytes(ticket.PaddingSize);
obj.Signature = data.ReadBytes(obj.SignatureSize);
obj.Padding = data.ReadBytes(obj.PaddingSize);
byte[] issuer = data.ReadBytes(0x40);
ticket.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
ticket.ECCPublicKey = data.ReadBytes(0x3C);
ticket.Version = data.ReadByteValue();
ticket.CaCrlVersion = data.ReadByteValue();
ticket.SignerCrlVersion = data.ReadByteValue();
ticket.TitleKey = data.ReadBytes(0x10);
ticket.Reserved1 = data.ReadByteValue();
ticket.TicketID = data.ReadUInt64();
ticket.ConsoleID = data.ReadUInt32();
ticket.TitleID = data.ReadUInt64();
ticket.Reserved2 = data.ReadBytes(2);
ticket.TicketTitleVersion = data.ReadUInt16();
ticket.Reserved3 = data.ReadBytes(8);
ticket.LicenseType = data.ReadByteValue();
ticket.CommonKeyYIndex = data.ReadByteValue();
ticket.Reserved4 = data.ReadBytes(0x2A);
ticket.eShopAccountID = data.ReadUInt32();
ticket.Reserved5 = data.ReadByteValue();
ticket.Audit = data.ReadByteValue();
ticket.Reserved6 = data.ReadBytes(0x42);
ticket.Limits = new uint[0x10];
for (int i = 0; i < ticket.Limits.Length; i++)
obj.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
obj.ECCPublicKey = data.ReadBytes(0x3C);
obj.Version = data.ReadByteValue();
obj.CaCrlVersion = data.ReadByteValue();
obj.SignerCrlVersion = data.ReadByteValue();
obj.TitleKey = data.ReadBytes(0x10);
obj.Reserved1 = data.ReadByteValue();
obj.TicketID = data.ReadUInt64LittleEndian();
obj.ConsoleID = data.ReadUInt32LittleEndian();
obj.TitleID = data.ReadUInt64LittleEndian();
obj.Reserved2 = data.ReadBytes(2);
obj.TicketTitleVersion = data.ReadUInt16LittleEndian();
obj.Reserved3 = data.ReadBytes(8);
obj.LicenseType = data.ReadByteValue();
obj.CommonKeyYIndex = data.ReadByteValue();
obj.Reserved4 = data.ReadBytes(0x2A);
obj.eShopAccountID = data.ReadUInt32LittleEndian();
obj.Reserved5 = data.ReadByteValue();
obj.Audit = data.ReadByteValue();
obj.Reserved6 = data.ReadBytes(0x42);
obj.Limits = new uint[0x10];
for (int i = 0; i < obj.Limits.Length; i++)
{
ticket.Limits[i] = data.ReadUInt32();
obj.Limits[i] = data.ReadUInt32LittleEndian();
}
// Seek to the content index size
data.Seek(4, SeekOrigin.Current);
// Read the size (big-endian)
ticket.ContentIndexSize = data.ReadUInt32BigEndian();
obj.ContentIndexSize = data.ReadUInt32BigEndian();
// Seek back to the start of the content index
data.Seek(-8, SeekOrigin.Current);
ticket.ContentIndex = data.ReadBytes((int)ticket.ContentIndexSize);
obj.ContentIndex = data.ReadBytes((int)obj.ContentIndexSize);
// Certificates only exist in standalone CETK files
if (fromCdn)
{
ticket.CertificateChain = new Certificate[2];
obj.CertificateChain = new Certificate[2];
for (int i = 0; i < 2; i++)
{
var certificate = ParseCertificate(data);
if (certificate == null)
return null;
ticket.CertificateChain[i] = certificate;
obj.CertificateChain[i] = certificate;
}
}
return ticket;
return obj;
}
/// <summary>
@@ -306,90 +370,84 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled title metadata on success, null on error</returns>
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
{
var titleMetadata = new TitleMetadata();
var obj = new TitleMetadata();
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();
switch (titleMetadata.SignatureType)
obj.SignatureType = (SignatureType)data.ReadUInt32LittleEndian();
switch (obj.SignatureType)
{
case SignatureType.RSA_4096_SHA1:
case SignatureType.RSA_4096_SHA256:
titleMetadata.SignatureSize = 0x200;
titleMetadata.PaddingSize = 0x3C;
obj.SignatureSize = 0x200;
obj.PaddingSize = 0x3C;
break;
case SignatureType.RSA_2048_SHA1:
case SignatureType.RSA_2048_SHA256:
titleMetadata.SignatureSize = 0x100;
titleMetadata.PaddingSize = 0x3C;
obj.SignatureSize = 0x100;
obj.PaddingSize = 0x3C;
break;
case SignatureType.ECDSA_SHA1:
case SignatureType.ECDSA_SHA256:
titleMetadata.SignatureSize = 0x3C;
titleMetadata.PaddingSize = 0x40;
obj.SignatureSize = 0x3C;
obj.PaddingSize = 0x40;
break;
default:
return null;
}
titleMetadata.Signature = data.ReadBytes(titleMetadata.SignatureSize);
titleMetadata.Padding1 = data.ReadBytes(titleMetadata.PaddingSize);
obj.Signature = data.ReadBytes(obj.SignatureSize);
obj.Padding1 = data.ReadBytes(obj.PaddingSize);
byte[] issuer = data.ReadBytes(0x40);
titleMetadata.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
titleMetadata.Version = data.ReadByteValue();
titleMetadata.CaCrlVersion = data.ReadByteValue();
titleMetadata.SignerCrlVersion = data.ReadByteValue();
titleMetadata.Reserved1 = data.ReadByteValue();
titleMetadata.SystemVersion = data.ReadUInt64();
titleMetadata.TitleID = data.ReadUInt64();
titleMetadata.TitleType = data.ReadUInt32();
titleMetadata.GroupID = data.ReadUInt16();
titleMetadata.SaveDataSize = data.ReadUInt32();
titleMetadata.SRLPrivateSaveDataSize = data.ReadUInt32();
titleMetadata.Reserved2 = data.ReadBytes(4);
titleMetadata.SRLFlag = data.ReadByteValue();
titleMetadata.Reserved3 = data.ReadBytes(0x31);
titleMetadata.AccessRights = data.ReadUInt32();
titleMetadata.TitleVersion = data.ReadUInt16();
titleMetadata.ContentCount = data.ReadUInt16BigEndian();
titleMetadata.BootContent = data.ReadUInt16();
titleMetadata.Padding2 = data.ReadBytes(2);
titleMetadata.SHA256HashContentInfoRecords = data.ReadBytes(0x20);
titleMetadata.ContentInfoRecords = new ContentInfoRecord[64];
obj.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0');
obj.Version = data.ReadByteValue();
obj.CaCrlVersion = data.ReadByteValue();
obj.SignerCrlVersion = data.ReadByteValue();
obj.Reserved1 = data.ReadByteValue();
obj.SystemVersion = data.ReadUInt64LittleEndian();
obj.TitleID = data.ReadUInt64LittleEndian();
obj.TitleType = data.ReadUInt32LittleEndian();
obj.GroupID = data.ReadUInt16LittleEndian();
obj.SaveDataSize = data.ReadUInt32LittleEndian();
obj.SRLPrivateSaveDataSize = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadBytes(4);
obj.SRLFlag = data.ReadByteValue();
obj.Reserved3 = data.ReadBytes(0x31);
obj.AccessRights = data.ReadUInt32LittleEndian();
obj.TitleVersion = data.ReadUInt16LittleEndian();
obj.ContentCount = data.ReadUInt16BigEndian();
obj.BootContent = data.ReadUInt16LittleEndian();
obj.Padding2 = data.ReadBytes(2);
obj.SHA256HashContentInfoRecords = data.ReadBytes(0x20);
obj.ContentInfoRecords = new ContentInfoRecord[64];
for (int i = 0; i < 64; i++)
{
var contentInfoRecord = data.ReadType<ContentInfoRecord>();
if (contentInfoRecord == null)
return null;
titleMetadata.ContentInfoRecords[i] = contentInfoRecord;
var contentInfoRecord = ParseContentInfoRecord(data);
obj.ContentInfoRecords[i] = contentInfoRecord;
}
titleMetadata.ContentChunkRecords = new ContentChunkRecord[titleMetadata.ContentCount];
for (int i = 0; i < titleMetadata.ContentCount; i++)
obj.ContentChunkRecords = new ContentChunkRecord[obj.ContentCount];
for (int i = 0; i < obj.ContentCount; i++)
{
var contentChunkRecord = data.ReadType<ContentChunkRecord>();
if (contentChunkRecord == null)
return null;
titleMetadata.ContentChunkRecords[i] = contentChunkRecord;
var contentChunkRecord = ParseContentChunkRecord(data);
obj.ContentChunkRecords[i] = contentChunkRecord;
}
// Certificates only exist in standalone TMD files
if (fromCdn)
{
titleMetadata.CertificateChain = new Certificate[2];
obj.CertificateChain = new Certificate[2];
for (int i = 0; i < 2; i++)
{
var certificate = ParseCertificate(data);
if (certificate == null)
return null;
titleMetadata.CertificateChain[i] = certificate;
obj.CertificateChain[i] = certificate;
}
}
return titleMetadata;
return obj;
}
}
}

View File

@@ -179,7 +179,7 @@ namespace SabreTools.Serialization.Deserializers
}
}
// If we're in the doscenter block
// If we're in the clrmamepro block
else if (reader.TopLevel == "clrmamepro"
&& reader.RowType == CmpRowType.Standalone)
{

View File

@@ -172,6 +172,9 @@ namespace SabreTools.Serialization.Deserializers
case "crc":
file.CRC = kvp.Value;
break;
case "sha1":
file.SHA1 = kvp.Value;
break;
case "date":
file.Date = kvp.Value;
break;

View File

@@ -22,10 +22,10 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Dummy0 != 0x00000001)
var header = ParseHeader(data);
if (header.Dummy0 != 0x00000001)
return null;
if (header?.MajorVersion != 0x00000001)
if (header.MajorVersion != 0x00000001)
return null;
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
@@ -37,74 +37,49 @@ namespace SabreTools.Serialization.Deserializers
#region Block Entry Header
// Try to parse the block entry header
var blockEntryHeader = data.ReadType<BlockEntryHeader>();
if (blockEntryHeader == null)
return null;
// Set the game cache block entry header
file.BlockEntryHeader = blockEntryHeader;
file.BlockEntryHeader = ParseBlockEntryHeader(data);
#endregion
#region Block Entries
// Create the block entry array
file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
file.BlockEntries = new BlockEntry[file.BlockEntryHeader.BlockCount];
// Try to parse the block entries
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
for (int i = 0; i < file.BlockEntryHeader.BlockCount; i++)
{
var blockEntry = data.ReadType<BlockEntry>();
if (blockEntry == null)
return null;
file.BlockEntries[i] = blockEntry;
file.BlockEntries[i] = ParseBlockEntry(data);
}
#endregion
#region Fragmentation Map Header
// Try to parse the fragmentation map header
var fragmentationMapHeader = data.ReadType<FragmentationMapHeader>();
if (fragmentationMapHeader == null)
return null;
// Set the game cache fragmentation map header
file.FragmentationMapHeader = fragmentationMapHeader;
file.FragmentationMapHeader = ParseFragmentationMapHeader(data);
#endregion
#region Fragmentation Maps
// Create the fragmentation map array
file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
file.FragmentationMaps = new FragmentationMap[file.FragmentationMapHeader.BlockCount];
// Try to parse the fragmentation maps
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
for (int i = 0; i < file.FragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = data.ReadType<FragmentationMap>();
if (fragmentationMap == null)
return null;
file.FragmentationMaps[i] = fragmentationMap;
file.FragmentationMaps[i] = ParseFragmentationMap(data);
}
#endregion
#region Block Entry Map Header
// Set the game cache block entry map header
if (header.MinorVersion < 6)
{
// Try to parse the block entry map header
var blockEntryMapHeader = data.ReadType<BlockEntryMapHeader>();
if (blockEntryMapHeader == null)
return null;
// Set the game cache block entry map header
file.BlockEntryMapHeader = blockEntryMapHeader;
}
file.BlockEntryMapHeader = ParseBlockEntryMapHeader(data);
#endregion
@@ -118,11 +93,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the block entry maps
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = data.ReadType<BlockEntryMap>();
if (blockEntryMap == null)
return null;
file.BlockEntryMaps[i] = blockEntryMap;
file.BlockEntryMaps[i] = ParseBlockEntryMap(data);
}
}
@@ -133,42 +104,33 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Header
// Try to parse the directory header
var directoryHeader = data.ReadType<DirectoryHeader>();
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
file.DirectoryHeader = ParseDirectoryHeader(data);
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
file.DirectoryEntries = new DirectoryEntry[file.DirectoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
for (int i = 0; i < file.DirectoryHeader.ItemCount; i++)
{
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
file.DirectoryEntries[i] = ParseDirectoryEntry(data);
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
if (file.DirectoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
long directoryNamesEnd = data.Position + file.DirectoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = [];
@@ -194,16 +156,12 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[file.DirectoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
for (int i = 0; i < file.DirectoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = data.ReadType<DirectoryInfo1Entry>();
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
file.DirectoryInfo1Entries[i] = ParseDirectoryInfo1Entry(data);
}
#endregion
@@ -211,16 +169,12 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[file.DirectoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
for (int i = 0; i < file.DirectoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = data.ReadType<DirectoryInfo2Entry>();
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
file.DirectoryInfo2Entries[i] = ParseDirectoryInfo2Entry(data);
}
#endregion
@@ -228,16 +182,12 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
file.DirectoryCopyEntries = new DirectoryCopyEntry[file.DirectoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
for (int i = 0; i < file.DirectoryHeader.CopyCount; i++)
{
var directoryCopyEntry = data.ReadType<DirectoryCopyEntry>();
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
file.DirectoryCopyEntries[i] = ParseDirectoryCopyEntry(data);
}
#endregion
@@ -245,32 +195,28 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
file.DirectoryLocalEntries = new DirectoryLocalEntry[file.DirectoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
for (int i = 0; i < file.DirectoryHeader.LocalCount; i++)
{
var directoryLocalEntry = data.ReadType<DirectoryLocalEntry>();
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
file.DirectoryLocalEntries[i] = ParseDirectoryLocalEntry(data);
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
data.Seek(initialOffset + file.DirectoryHeader.DirectorySize, SeekOrigin.Begin);
#region Directory Map Header
if (header.MinorVersion >= 5)
{
// Try to parse the directory map header
var directoryMapHeader = data.ReadType<DirectoryMapHeader>();
if (directoryMapHeader?.Dummy0 != 0x00000001)
var directoryMapHeader = ParseDirectoryMapHeader(data);
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
if (directoryMapHeader?.Dummy1 != 0x00000000)
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
// Set the game cache directory map header
@@ -282,16 +228,12 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Map Entries
// Create the directory map entry array
file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
file.DirectoryMapEntries = new DirectoryMapEntry[file.DirectoryHeader.ItemCount];
// Try to parse the directory map entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
for (int i = 0; i < file.DirectoryHeader.ItemCount; i++)
{
var directoryMapEntry = data.ReadType<DirectoryMapEntry>();
if (directoryMapEntry == null)
return null;
file.DirectoryMapEntries[i] = directoryMapEntry;
file.DirectoryMapEntries[i] = ParseDirectoryMapEntry(data);
}
#endregion
@@ -299,7 +241,7 @@ namespace SabreTools.Serialization.Deserializers
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = data.ReadType<ChecksumHeader>();
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader?.Dummy0 != 0x00000001)
return null;
@@ -314,7 +256,7 @@ namespace SabreTools.Serialization.Deserializers
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader?.Dummy0 != 0x14893721)
return null;
if (checksumMapHeader?.Dummy1 != 0x00000001)
@@ -333,11 +275,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = data.ReadType<ChecksumMapEntry>();
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
file.ChecksumMapEntries[i] = ParseChecksumMapEntry(data);
}
#endregion
@@ -350,11 +288,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = data.ReadType<ChecksumEntry>();
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
file.ChecksumEntries[i] = ParseChecksumEntry(data);
}
#endregion
@@ -384,26 +318,348 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache data block header
/// Parse a Stream into a BlockEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BlockEntry on success, null on error</returns>
public static BlockEntry ParseBlockEntry(Stream data)
{
var obj = new BlockEntry();
obj.EntryFlags = data.ReadUInt32LittleEndian();
obj.FileDataOffset = data.ReadUInt32LittleEndian();
obj.FileDataSize = data.ReadUInt32LittleEndian();
obj.FirstDataBlockIndex = data.ReadUInt32LittleEndian();
obj.NextBlockEntryIndex = data.ReadUInt32LittleEndian();
obj.PreviousBlockEntryIndex = data.ReadUInt32LittleEndian();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a BlockEntryHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BlockEntryHeader on success, null on error</returns>
public static BlockEntryHeader ParseBlockEntryHeader(Stream data)
{
var obj = new BlockEntryHeader();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.BlocksUsed = data.ReadUInt32LittleEndian();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.Dummy2 = data.ReadUInt32LittleEndian();
obj.Dummy3 = data.ReadUInt32LittleEndian();
obj.Dummy4 = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a BlockEntryMap
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BlockEntryMap on success, null on error</returns>
public static BlockEntryMap ParseBlockEntryMap(Stream data)
{
var obj = new BlockEntryMap();
obj.PreviousBlockEntryIndex = data.ReadUInt32LittleEndian();
obj.NextBlockEntryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a BlockEntryMapHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BlockEntryMapHeader on success, null on error</returns>
public static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
{
var obj = new BlockEntryMapHeader();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.FirstBlockEntryIndex = data.ReadUInt32LittleEndian();
obj.LastBlockEntryIndex = data.ReadUInt32LittleEndian();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumEntry on success, null on error</returns>
public static ChecksumEntry ParseChecksumEntry(Stream data)
{
var obj = new ChecksumEntry();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumHeader on success, null on error</returns>
public static ChecksumHeader ParseChecksumHeader(Stream data)
{
var obj = new ChecksumHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.ChecksumSize = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumMapEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumMapEntry on success, null on error</returns>
public static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
var obj = new ChecksumMapEntry();
obj.ChecksumCount = data.ReadUInt32LittleEndian();
obj.FirstChecksumIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumMapHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumMapHeader on success, null on error</returns>
public static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
var obj = new ChecksumMapHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.ItemCount = data.ReadUInt32LittleEndian();
obj.ChecksumCount = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DataBlockHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
/// <returns>Filled DataBlockHeader on success, null on error</returns>
public static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
{
var dataBlockHeader = new DataBlockHeader();
var obj = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)
dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
obj.LastVersionPlayed = data.ReadUInt32LittleEndian();
dataBlockHeader.BlockCount = data.ReadUInt32();
dataBlockHeader.BlockSize = data.ReadUInt32();
dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
dataBlockHeader.BlocksUsed = data.ReadUInt32();
dataBlockHeader.Checksum = data.ReadUInt32();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.BlockSize = data.ReadUInt32LittleEndian();
obj.FirstBlockOffset = data.ReadUInt32LittleEndian();
obj.BlocksUsed = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return dataBlockHeader;
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryCopyEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryCopyEntry on success, null on error</returns>
public static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
var obj = new DirectoryCopyEntry();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryEntry on success, null on error</returns>
public static DirectoryEntry ParseDirectoryEntry(Stream data)
{
var obj = new DirectoryEntry();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.ItemSize = data.ReadUInt32LittleEndian();
obj.ChecksumIndex = data.ReadUInt32LittleEndian();
obj.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32LittleEndian();
obj.ParentIndex = data.ReadUInt32LittleEndian();
obj.NextIndex = data.ReadUInt32LittleEndian();
obj.FirstIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryHeader on success, null on error</returns>
public static DirectoryHeader ParseDirectoryHeader(Stream data)
{
var obj = new DirectoryHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.CacheID = data.ReadUInt32LittleEndian();
obj.LastVersionPlayed = data.ReadUInt32LittleEndian();
obj.ItemCount = data.ReadUInt32LittleEndian();
obj.FileCount = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.DirectorySize = data.ReadUInt32LittleEndian();
obj.NameSize = data.ReadUInt32LittleEndian();
obj.Info1Count = data.ReadUInt32LittleEndian();
obj.CopyCount = data.ReadUInt32LittleEndian();
obj.LocalCount = data.ReadUInt32LittleEndian();
obj.Dummy2 = data.ReadUInt32LittleEndian();
obj.Dummy3 = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryInfo1Entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryInfo1Entry on success, null on error</returns>
public static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
var obj = new DirectoryInfo1Entry();
obj.Dummy0 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryInfo2Entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryInfo2Entry on success, null on error</returns>
public static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
var obj = new DirectoryInfo2Entry();
obj.Dummy0 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryLocalEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryLocalEntry on success, null on error</returns>
public static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
var obj = new DirectoryLocalEntry();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryMapEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryMapEntry on success, null on error</returns>
public static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
{
var obj = new DirectoryMapEntry();
obj.FirstBlockIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryMapHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryMapHeader on success, null on error</returns>
public static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
{
var obj = new DirectoryMapHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a FragmentationMap
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FragmentationMap on success, null on error</returns>
public static FragmentationMap ParseFragmentationMap(Stream data)
{
var obj = new FragmentationMap();
obj.NextDataBlockIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a FragmentationMapHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FragmentationMapHeader on success, null on error</returns>
public static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
{
var obj = new FragmentationMapHeader();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.FirstUnusedEntry = data.ReadUInt32LittleEndian();
obj.Terminator = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.MajorVersion = data.ReadUInt32LittleEndian();
obj.MinorVersion = data.ReadUInt32LittleEndian();
obj.CacheID = data.ReadUInt32LittleEndian();
obj.LastVersionPlayed = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.Dummy2 = data.ReadUInt32LittleEndian();
obj.FileSize = data.ReadUInt32LittleEndian();
obj.BlockSize = data.ReadUInt32LittleEndian();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.Dummy3 = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -15,7 +15,7 @@ namespace SabreTools.Serialization.Deserializers
try
{
// Create a new IRD to fill
// Deserialize the IRD
var ird = new Models.IRD.File();
ird.Magic = data.ReadBytes(4);
@@ -44,7 +44,7 @@ namespace SabreTools.Serialization.Deserializers
ird.AppVersion = Encoding.ASCII.GetString(appVersion);
if (ird.Version == 7)
ird.UID = data.ReadUInt32();
ird.UID = data.ReadUInt32LittleEndian();
ird.HeaderLength = data.ReadByteValue();
ird.Header = data.ReadBytes((int)ird.HeaderLength);
@@ -63,12 +63,12 @@ namespace SabreTools.Serialization.Deserializers
ird.FileHashes = new byte[ird.FileCount][];
for (int i = 0; i < ird.FileCount; i++)
{
ird.FileKeys[i] = data.ReadUInt64();
ird.FileKeys[i] = data.ReadUInt64LittleEndian();
ird.FileHashes[i] = data.ReadBytes(16) ?? [];
}
ird.ExtraConfig = data.ReadUInt16();
ird.Attachments = data.ReadUInt16();
ird.ExtraConfig = data.ReadUInt16LittleEndian();
ird.Attachments = data.ReadUInt16LittleEndian();
if (ird.Version >= 9)
ird.PIC = data.ReadBytes(115);
@@ -80,9 +80,9 @@ namespace SabreTools.Serialization.Deserializers
ird.PIC = data.ReadBytes(115);
if (ird.Version > 7)
ird.UID = data.ReadUInt32();
ird.UID = data.ReadUInt32LittleEndian();
ird.CRC = data.ReadUInt32();
ird.CRC = data.ReadUInt32LittleEndian();
return ird;
}

View File

@@ -22,8 +22,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Signature1 != Constants.HeaderSignature)
var header = ParseHeader(data);
if (header.Signature1 != Constants.HeaderSignature)
return null;
if (header.TocAddress >= data.Length)
return null;
@@ -48,11 +48,8 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < header.DirCount; i++)
{
var directory = ParseDirectory(data);
if (directory?.Name == null)
return null;
directories.Add(directory);
data.Seek(directory.ChunkSize - directory.Name.Length - 6, SeekOrigin.Current);
data.Seek(directory.ChunkSize - directory.Name!.Length - 6, SeekOrigin.Current);
}
// Set the directories
@@ -69,12 +66,9 @@ namespace SabreTools.Serialization.Deserializers
var directory = archive.Directories[i];
for (int j = 0; j < directory.FileCount; j++)
{
var file = data.ReadType<Models.InstallShieldArchiveV3.File>();
if (file?.Name == null)
return null;
var file = ParseFile(data);
files.Add(file);
data.Seek(file.ChunkSize - file.Name.Length - 30, SeekOrigin.Current);
data.Seek(file.ChunkSize - file.Name!.Length - 30, SeekOrigin.Current);
}
}
@@ -93,22 +87,80 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a directory
/// Parse a Stream into a Directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data)
/// <returns>Filled Directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory ParseDirectory(Stream data)
{
var directory = new Models.InstallShieldArchiveV3.Directory();
var obj = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
directory.ChunkSize = data.ReadUInt16();
obj.FileCount = data.ReadUInt16LittleEndian();
obj.ChunkSize = data.ReadUInt16LittleEndian();
ushort nameLength = data.ReadUInt16();
ushort nameLength = data.ReadUInt16LittleEndian();
byte[] nameBytes = data.ReadBytes(nameLength);
directory.Name = Encoding.ASCII.GetString(nameBytes);
obj.Name = Encoding.ASCII.GetString(nameBytes);
return directory;
return obj;
}
/// <summary>
/// Parse a Stream into a File
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled File on success, null on error</returns>
public static Models.InstallShieldArchiveV3.File ParseFile(Stream data)
{
var obj = new Models.InstallShieldArchiveV3.File();
obj.VolumeEnd = data.ReadByteValue();
obj.Index = data.ReadUInt16LittleEndian();
obj.UncompressedSize = data.ReadUInt32LittleEndian();
obj.CompressedSize = data.ReadUInt32LittleEndian();
obj.Offset = data.ReadUInt32LittleEndian();
obj.DateTime = data.ReadUInt32LittleEndian();
obj.Reserved0 = data.ReadUInt32LittleEndian();
obj.ChunkSize = data.ReadUInt16LittleEndian();
obj.Attrib = (Attributes)data.ReadByteValue();
obj.IsSplit = data.ReadByteValue();
obj.Reserved1 = data.ReadByteValue();
obj.VolumeStart = data.ReadByteValue();
obj.Name = data.ReadPrefixedAnsiString();
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.Signature1 = data.ReadUInt32LittleEndian();
obj.Signature2 = data.ReadUInt32LittleEndian();
obj.Reserved0 = data.ReadUInt16LittleEndian();
obj.IsMultivolume = data.ReadUInt16LittleEndian();
obj.FileCount = data.ReadUInt16LittleEndian();
obj.DateTime = data.ReadUInt32LittleEndian();
obj.CompressedSize = data.ReadUInt32LittleEndian();
obj.UncompressedSize = data.ReadUInt32LittleEndian();
obj.Reserved1 = data.ReadUInt32LittleEndian();
obj.VolumeTotal = data.ReadByteValue();
obj.VolumeNumber = data.ReadByteValue();
obj.Reserved2 = data.ReadByteValue();
obj.SplitBeginAddress = data.ReadUInt32LittleEndian();
obj.SplitEndAddress = data.ReadUInt32LittleEndian();
obj.TocAddress = data.ReadUInt32LittleEndian();
obj.Reserved3 = data.ReadUInt32LittleEndian();
obj.DirCount = data.ReadUInt16LittleEndian();
obj.Reserved4 = data.ReadUInt32LittleEndian();
obj.Reserved5 = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldCabinet;
using static SabreTools.Models.InstallShieldCabinet.Constants;
@@ -25,8 +26,8 @@ namespace SabreTools.Serialization.Deserializers
#region Common Header
// Try to parse the cabinet header
var commonHeader = data.ReadType<CommonHeader>();
if (commonHeader?.Signature != SignatureString)
var commonHeader = ParseCommonHeader(data);
if (commonHeader.Signature != SignatureString)
return null;
// Set the cabinet header
@@ -34,15 +35,13 @@ namespace SabreTools.Serialization.Deserializers
#endregion
// Get the major version
int majorVersion = GetMajorVersion(commonHeader);
#region Volume Header
// Try to parse the volume header
var volumeHeader = ParseVolumeHeader(data, GetMajorVersion(commonHeader));
if (volumeHeader == null)
return null;
// Set the volume header
cabinet.VolumeHeader = volumeHeader;
cabinet.VolumeHeader = ParseVolumeHeader(data, majorVersion);
#endregion
@@ -56,20 +55,15 @@ namespace SabreTools.Serialization.Deserializers
// Seek to the descriptor
data.Seek(descriptorOffset, SeekOrigin.Begin);
// Try to parse the descriptor
var descriptor = data.ReadType<Descriptor>();
if (descriptor == null)
return null;
// Set the descriptor
cabinet.Descriptor = descriptor;
cabinet.Descriptor = ParseDescriptor(data);
#endregion
#region File Descriptor Offsets
// Get the file table offset
uint fileTableOffset = commonHeader.DescriptorOffset + descriptor.FileTableOffset;
uint fileTableOffset = commonHeader.DescriptorOffset + cabinet.Descriptor.FileTableOffset;
if (fileTableOffset < 0 || fileTableOffset >= data.Length)
return null;
@@ -78,16 +72,16 @@ namespace SabreTools.Serialization.Deserializers
// Get the number of file table items
uint fileTableItems;
if (GetMajorVersion(commonHeader) <= 5)
fileTableItems = descriptor.DirectoryCount + descriptor.FileCount;
if (majorVersion <= 5)
fileTableItems = cabinet.Descriptor.DirectoryCount + cabinet.Descriptor.FileCount;
else
fileTableItems = descriptor.DirectoryCount;
fileTableItems = cabinet.Descriptor.DirectoryCount;
// Create and fill the file table
cabinet.FileDescriptorOffsets = new uint[fileTableItems];
for (int i = 0; i < cabinet.FileDescriptorOffsets.Length; i++)
{
cabinet.FileDescriptorOffsets[i] = data.ReadUInt32();
cabinet.FileDescriptorOffsets[i] = data.ReadUInt32LittleEndian();
}
#endregion
@@ -95,12 +89,12 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Descriptors
// Create and fill the directory descriptors
cabinet.DirectoryNames = new string[descriptor.DirectoryCount];
for (int i = 0; i < descriptor.DirectoryCount; i++)
cabinet.DirectoryNames = new string[cabinet.Descriptor.DirectoryCount];
for (int i = 0; i < cabinet.Descriptor.DirectoryCount; i++)
{
// Get the directory descriptor offset
uint offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.Descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[i];
// If we have an invalid offset
@@ -111,7 +105,7 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
string? directoryName = ParseDirectoryName(data, GetMajorVersion(commonHeader));
string? directoryName = ParseDirectoryName(data, majorVersion);
if (directoryName != null)
cabinet.DirectoryNames[i] = directoryName;
}
@@ -121,22 +115,22 @@ namespace SabreTools.Serialization.Deserializers
#region File Descriptors
// Create and fill the file descriptors
cabinet.FileDescriptors = new FileDescriptor[descriptor.FileCount];
for (int i = 0; i < descriptor.FileCount; i++)
cabinet.FileDescriptors = new FileDescriptor[cabinet.Descriptor.FileCount];
for (int i = 0; i < cabinet.Descriptor.FileCount; i++)
{
// Get the file descriptor offset
uint offset;
if (GetMajorVersion(commonHeader) <= 5)
if (majorVersion <= 5)
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[descriptor.DirectoryCount + i];
+ cabinet.Descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[cabinet.Descriptor.DirectoryCount + i];
}
else
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ descriptor.FileTableOffset2
+ cabinet.Descriptor.FileTableOffset
+ cabinet.Descriptor.FileTableOffset2
+ (uint)(i * 0x57);
}
@@ -148,8 +142,9 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
FileDescriptor fileDescriptor = ParseFileDescriptor(data, GetMajorVersion(commonHeader), descriptorOffset + descriptor.FileTableOffset);
cabinet.FileDescriptors[i] = fileDescriptor;
cabinet.FileDescriptors[i] = ParseFileDescriptor(data,
majorVersion,
descriptorOffset + cabinet.Descriptor.FileTableOffset);
}
#endregion
@@ -158,10 +153,10 @@ namespace SabreTools.Serialization.Deserializers
// Create and fill the file group offsets
cabinet.FileGroupOffsets = new Dictionary<long, OffsetList?>();
for (int i = 0; i < (descriptor.FileGroupOffsets?.Length ?? 0); i++)
for (int i = 0; i < (cabinet.Descriptor.FileGroupOffsets?.Length ?? 0); i++)
{
// Get the file group offset
uint offset = descriptor.FileGroupOffsets![i];
uint offset = cabinet.Descriptor.FileGroupOffsets![i];
if (offset == 0)
continue;
@@ -174,8 +169,8 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.FileGroupOffsets[descriptor.FileGroupOffsets[i]] = offsetList;
OffsetList offsetList = ParseOffsetList(data, majorVersion, descriptorOffset);
cabinet.FileGroupOffsets[offset] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
@@ -188,7 +183,7 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
offsetList = ParseOffsetList(data, majorVersion, descriptorOffset);
cabinet.FileGroupOffsets[nextOffset] = offsetList;
// Set the next offset
@@ -225,13 +220,8 @@ namespace SabreTools.Serialization.Deserializers
/// Seek to the file group
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the file group
var fileGroup = ParseFileGroup(data, GetMajorVersion(commonHeader), descriptorOffset);
if (fileGroup == null)
return null;
// Add the file group
cabinet.FileGroups[fileGroupId++] = fileGroup;
cabinet.FileGroups[fileGroupId++] = ParseFileGroup(data, majorVersion, descriptorOffset);
}
#endregion
@@ -240,10 +230,10 @@ namespace SabreTools.Serialization.Deserializers
// Create and fill the component offsets
cabinet.ComponentOffsets = new Dictionary<long, OffsetList?>();
for (int i = 0; i < (descriptor.ComponentOffsets?.Length ?? 0); i++)
for (int i = 0; i < (cabinet.Descriptor.ComponentOffsets?.Length ?? 0); i++)
{
// Get the component offset
uint offset = descriptor.ComponentOffsets![i];
uint offset = cabinet.Descriptor.ComponentOffsets![i];
if (offset == 0)
continue;
@@ -256,8 +246,8 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.ComponentOffsets[descriptor.ComponentOffsets[i]] = offsetList;
OffsetList offsetList = ParseOffsetList(data, majorVersion, descriptorOffset);
cabinet.ComponentOffsets[cabinet.Descriptor.ComponentOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
@@ -270,7 +260,7 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
offsetList = ParseOffsetList(data, majorVersion, descriptorOffset);
cabinet.ComponentOffsets[nextOffset] = offsetList;
// Set the next offset
@@ -307,13 +297,8 @@ namespace SabreTools.Serialization.Deserializers
// Seek to the component
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the component
var component = ParseComponent(data, GetMajorVersion(commonHeader), descriptorOffset);
if (component == null)
return null;
// Add the component
cabinet.Components[componentId++] = component;
cabinet.Components[componentId++] = ParseComponent(data, majorVersion, descriptorOffset);
}
#endregion
@@ -330,251 +315,132 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a volume header
/// Parse a Stream into a CommonHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
/// <returns>Filled CommonHeader on success, null on error</returns>
public static CommonHeader ParseCommonHeader(Stream data)
{
var volumeHeader = new VolumeHeader();
var obj = new CommonHeader();
// Read the descriptor based on version
if (majorVersion <= 5)
{
volumeHeader.DataOffset = data.ReadUInt32();
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
}
else
{
volumeHeader.DataOffset = data.ReadUInt32();
volumeHeader.DataOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileOffsetHigh = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32();
}
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.Version = data.ReadUInt32LittleEndian();
obj.VolumeInfo = data.ReadUInt32LittleEndian();
obj.DescriptorOffset = data.ReadUInt32LittleEndian();
obj.DescriptorSize = data.ReadUInt32LittleEndian();
return volumeHeader;
return obj;
}
/// <summary>
/// Parse a Stream into an offset list
/// Parse a Stream into a Component
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
public static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
var offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
offsetList.NextOffset = data.ReadUInt32();
// Cache the current offset
long currentOffset = data.Position;
// Seek to the name offset
data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadNullTerminatedUnicodeString();
else
offsetList.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
return offsetList;
}
/// <summary>
/// Parse a Stream into a file group
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
public static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Attributes = (FileGroupAttributes)data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
data.Seek(0x36, SeekOrigin.Current);
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownStringOffset = data.ReadUInt32();
fileGroup.OperatingSystemOffset = data.ReadUInt32();
fileGroup.LanguageOffset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.OverwriteFlags = (FileGroupFlags)data.ReadUInt32();
fileGroup.Reserved = new uint[4];
for (int i = 0; i < fileGroup.Reserved.Length; i++)
{
fileGroup.Reserved[i] = data.ReadUInt32();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileGroup.NameOffset != 0)
{
// Seek to the name
data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileGroup;
}
/// <summary>
/// Parse a Stream into a component
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
/// <returns>Filled Component on success, null on error</returns>
public static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
var component = new Component();
var obj = new Component();
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Status = (ComponentStatus)data.ReadUInt16();
component.PasswordOffset = data.ReadUInt32();
component.MiscOffset = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.CDRomFolderOffset = data.ReadUInt32();
component.HTTPLocationOffset = data.ReadUInt32();
component.FTPLocationOffset = data.ReadUInt32();
component.Guid = new Guid[2];
for (int i = 0; i < component.Guid.Length; i++)
obj.IdentifierOffset = data.ReadUInt32LittleEndian();
obj.DescriptorOffset = data.ReadUInt32LittleEndian();
obj.DisplayNameOffset = data.ReadUInt32LittleEndian();
obj.Status = (ComponentStatus)data.ReadUInt16LittleEndian();
obj.PasswordOffset = data.ReadUInt32LittleEndian();
obj.MiscOffset = data.ReadUInt32LittleEndian();
obj.ComponentIndex = data.ReadUInt16LittleEndian();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.CDRomFolderOffset = data.ReadUInt32LittleEndian();
obj.HTTPLocationOffset = data.ReadUInt32LittleEndian();
obj.FTPLocationOffset = data.ReadUInt32LittleEndian();
obj.Guid = new Guid[2];
for (int i = 0; i < obj.Guid.Length; i++)
{
component.Guid[i] = data.ReadGuid();
obj.Guid[i] = data.ReadGuid();
}
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
component.DependsCount = data.ReadUInt16();
component.DependsOffset = data.ReadUInt32();
component.FileGroupCount = data.ReadUInt16();
component.FileGroupNamesOffset = data.ReadUInt32();
component.X3Count = data.ReadUInt16();
component.X3Offset = data.ReadUInt32();
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.OnInstallingOffset = data.ReadUInt32();
component.OnInstalledOffset = data.ReadUInt32();
component.OnUninstallingOffset = data.ReadUInt32();
component.OnUninstalledOffset = data.ReadUInt32();
obj.CLSIDOffset = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadBytes(28);
obj.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
obj.DependsCount = data.ReadUInt16LittleEndian();
obj.DependsOffset = data.ReadUInt32LittleEndian();
obj.FileGroupCount = data.ReadUInt16LittleEndian();
obj.FileGroupNamesOffset = data.ReadUInt32LittleEndian();
obj.X3Count = data.ReadUInt16LittleEndian();
obj.X3Offset = data.ReadUInt32LittleEndian();
obj.SubComponentsCount = data.ReadUInt16LittleEndian();
obj.SubComponentsOffset = data.ReadUInt32LittleEndian();
obj.NextComponentOffset = data.ReadUInt32LittleEndian();
obj.OnInstallingOffset = data.ReadUInt32LittleEndian();
obj.OnInstalledOffset = data.ReadUInt32LittleEndian();
obj.OnUninstallingOffset = data.ReadUInt32LittleEndian();
obj.OnUninstalledOffset = data.ReadUInt32LittleEndian();
// Cache the current position
long currentPosition = data.Position;
// Read the identifier, if possible
if (component.IdentifierOffset != 0)
if (obj.IdentifierOffset != 0)
{
// Seek to the identifier
data.Seek(component.IdentifierOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.IdentifierOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Identifier = data.ReadNullTerminatedUnicodeString();
obj.Identifier = data.ReadNullTerminatedUnicodeString();
else
component.Identifier = data.ReadNullTerminatedAnsiString();
obj.Identifier = data.ReadNullTerminatedAnsiString();
}
// Read the display name, if possible
if (component.DisplayNameOffset != 0)
if (obj.DisplayNameOffset != 0)
{
// Seek to the name
data.Seek(component.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.DisplayName = data.ReadNullTerminatedUnicodeString();
obj.DisplayName = data.ReadNullTerminatedUnicodeString();
else
component.DisplayName = data.ReadNullTerminatedAnsiString();
obj.DisplayName = data.ReadNullTerminatedAnsiString();
}
// Read the name, if possible
if (component.NameOffset != 0)
if (obj.NameOffset != 0)
{
// Seek to the name
data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadNullTerminatedUnicodeString();
obj.Name = data.ReadNullTerminatedUnicodeString();
else
component.Name = data.ReadNullTerminatedAnsiString();
obj.Name = data.ReadNullTerminatedAnsiString();
}
// Read the CLSID, if possible
if (component.CLSIDOffset != 0)
if (obj.CLSIDOffset != 0)
{
// Seek to the CLSID
data.Seek(component.CLSIDOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.CLSIDOffset + descriptorOffset, SeekOrigin.Begin);
// Read the GUID
component.CLSID = data.ReadGuid();
obj.CLSID = data.ReadGuid();
}
// Read the file group names, if possible
if (component.FileGroupCount != 0 && component.FileGroupNamesOffset != 0)
if (obj.FileGroupCount != 0 && obj.FileGroupNamesOffset != 0)
{
// Seek to the file group table offset
data.Seek(component.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin);
// Read the file group names table
component.FileGroupNames = new string[component.FileGroupCount];
for (int j = 0; j < component.FileGroupCount; j++)
obj.FileGroupNames = new string[obj.FileGroupCount];
for (int j = 0; j < obj.FileGroupCount; j++)
{
// Get the name offset
uint nameOffset = data.ReadUInt32();
uint nameOffset = data.ReadUInt32LittleEndian();
// Cache the current offset
long preNameOffset = data.Position;
@@ -583,9 +449,9 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadNullTerminatedUnicodeString() ?? string.Empty;
obj.FileGroupNames[j] = data.ReadNullTerminatedUnicodeString() ?? string.Empty;
else
component.FileGroupNames[j] = data.ReadNullTerminatedAnsiString() ?? string.Empty;
obj.FileGroupNames[j] = data.ReadNullTerminatedAnsiString() ?? string.Empty;
// Seek back to the original position
data.Seek(preNameOffset, SeekOrigin.Begin);
@@ -595,7 +461,51 @@ namespace SabreTools.Serialization.Deserializers
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return component;
return obj;
}
/// <summary>
/// Parse a Stream into a Descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Descriptor on success, null on error</returns>
public static Descriptor ParseDescriptor(Stream data)
{
var obj = new Descriptor();
obj.StringsOffset = data.ReadUInt32LittleEndian();
obj.Reserved0 = data.ReadUInt32LittleEndian();
obj.ComponentListOffset = data.ReadUInt32LittleEndian();
obj.FileTableOffset = data.ReadUInt32LittleEndian();
obj.Reserved1 = data.ReadUInt32LittleEndian();
obj.FileTableSize = data.ReadUInt32LittleEndian();
obj.FileTableSize2 = data.ReadUInt32LittleEndian();
obj.DirectoryCount = data.ReadUInt16LittleEndian();
obj.Reserved2 = data.ReadUInt32LittleEndian();
obj.Reserved3 = data.ReadUInt16LittleEndian();
obj.Reserved4 = data.ReadUInt32LittleEndian();
obj.FileCount = data.ReadUInt32LittleEndian();
obj.FileTableOffset2 = data.ReadUInt32LittleEndian();
obj.ComponentTableInfoCount = data.ReadUInt16LittleEndian();
obj.ComponentTableOffset = data.ReadUInt32LittleEndian();
obj.Reserved5 = data.ReadUInt32LittleEndian();
obj.Reserved6 = data.ReadUInt32LittleEndian();
obj.FileGroupOffsets = new uint[71];
for (int i = 0; i < 71; i++)
{
obj.FileGroupOffsets[i] = data.ReadUInt32LittleEndian();
}
obj.ComponentOffsets = new uint[71];
for (int i = 0; i < 71; i++)
{
obj.ComponentOffsets[i] = data.ReadUInt32LittleEndian();
}
obj.SetupTypesOffset = data.ReadUInt32LittleEndian();
obj.SetupTableOffset = data.ReadUInt32LittleEndian();
obj.Reserved7 = data.ReadUInt32LittleEndian();
obj.Reserved8 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
@@ -614,68 +524,206 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a file descriptor
/// Parse a Stream into a FileDescriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
/// <returns>Filled FileDescriptor on success, null on error</returns>
public static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
var fileDescriptor = new FileDescriptor();
var obj = new FileDescriptor();
// Read the descriptor based on version
if (majorVersion <= 5)
{
fileDescriptor.Volume = 0xFFFF; // Set by the header index
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt32();
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt32();
fileDescriptor.CompressedSize = data.ReadUInt32();
obj.Volume = 0xFFFF; // Set by the header index
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
obj.Flags = (FileFlags)data.ReadUInt16LittleEndian();
obj.ExpandedSize = data.ReadUInt32LittleEndian();
obj.CompressedSize = data.ReadUInt32LittleEndian();
_ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data?
fileDescriptor.DataOffset = data.ReadUInt32();
obj.DataOffset = data.ReadUInt32LittleEndian();
if (majorVersion == 5)
fileDescriptor.MD5 = data.ReadBytes(0x10);
obj.MD5 = data.ReadBytes(0x10);
}
else
{
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt64();
fileDescriptor.CompressedSize = data.ReadUInt64();
fileDescriptor.DataOffset = data.ReadUInt64();
fileDescriptor.MD5 = data.ReadBytes(0x10);
obj.Flags = (FileFlags)data.ReadUInt16LittleEndian();
obj.ExpandedSize = data.ReadUInt64LittleEndian();
obj.CompressedSize = data.ReadUInt64LittleEndian();
obj.DataOffset = data.ReadUInt64LittleEndian();
obj.MD5 = data.ReadBytes(0x10);
_ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data?
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt16();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.DirectoryIndex = data.ReadUInt16LittleEndian();
_ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data?
fileDescriptor.LinkPrevious = data.ReadUInt32();
fileDescriptor.LinkNext = data.ReadUInt32();
fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue();
fileDescriptor.Volume = data.ReadUInt16();
obj.LinkPrevious = data.ReadUInt32LittleEndian();
obj.LinkNext = data.ReadUInt32LittleEndian();
obj.LinkFlags = (LinkFlags)data.ReadByteValue();
obj.Volume = data.ReadUInt16LittleEndian();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileDescriptor.NameOffset != 0)
if (obj.NameOffset != 0)
{
// Seek to the name
data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin);
data.Seek(obj.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadNullTerminatedUnicodeString();
obj.Name = data.ReadNullTerminatedUnicodeString();
else
fileDescriptor.Name = data.ReadNullTerminatedAnsiString();
obj.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileDescriptor;
return obj;
}
/// <summary>
/// Parse a Stream into a FileGroup
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled FileGroup on success, null on error</returns>
public static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
var obj = new FileGroup();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.ExpandedSize = data.ReadUInt32LittleEndian();
obj.CompressedSize = data.ReadUInt32LittleEndian();
obj.Attributes = (FileGroupAttributes)data.ReadUInt16LittleEndian();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
data.Seek(0x36, SeekOrigin.Current);
obj.FirstFile = data.ReadUInt32LittleEndian();
obj.LastFile = data.ReadUInt32LittleEndian();
obj.UnknownStringOffset = data.ReadUInt32LittleEndian();
obj.OperatingSystemOffset = data.ReadUInt32LittleEndian();
obj.LanguageOffset = data.ReadUInt32LittleEndian();
obj.HTTPLocationOffset = data.ReadUInt32LittleEndian();
obj.FTPLocationOffset = data.ReadUInt32LittleEndian();
obj.MiscOffset = data.ReadUInt32LittleEndian();
obj.TargetDirectoryOffset = data.ReadUInt32LittleEndian();
obj.OverwriteFlags = (FileGroupFlags)data.ReadUInt32LittleEndian();
obj.Reserved = new uint[4];
for (int i = 0; i < obj.Reserved.Length; i++)
{
obj.Reserved[i] = data.ReadUInt32LittleEndian();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (obj.NameOffset != 0)
{
// Seek to the name
data.Seek(obj.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
obj.Name = data.ReadNullTerminatedUnicodeString();
else
obj.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return obj;
}
/// <summary>
/// Parse a Stream into an OffsetList
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled OffsetList on success, null on error</returns>
public static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
var obj = new OffsetList();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.DescriptorOffset = data.ReadUInt32LittleEndian();
obj.NextOffset = data.ReadUInt32LittleEndian();
// Cache the current offset
long currentOffset = data.Position;
// Seek to the name offset
data.Seek(obj.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
obj.Name = data.ReadNullTerminatedUnicodeString();
else
obj.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
return obj;
}
/// <summary>
/// Parse a Stream into a VolumeHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled VolumeHeader on success, null on error</returns>
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
var obj = new VolumeHeader();
// Read the descriptor based on version
if (majorVersion <= 5)
{
obj.DataOffset = data.ReadUInt32LittleEndian();
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
obj.FirstFileIndex = data.ReadUInt32LittleEndian();
obj.LastFileIndex = data.ReadUInt32LittleEndian();
obj.FirstFileOffset = data.ReadUInt32LittleEndian();
obj.FirstFileSizeExpanded = data.ReadUInt32LittleEndian();
obj.FirstFileSizeCompressed = data.ReadUInt32LittleEndian();
obj.LastFileOffset = data.ReadUInt32LittleEndian();
obj.LastFileSizeExpanded = data.ReadUInt32LittleEndian();
obj.LastFileSizeCompressed = data.ReadUInt32LittleEndian();
}
else
{
obj.DataOffset = data.ReadUInt32LittleEndian();
obj.DataOffsetHigh = data.ReadUInt32LittleEndian();
obj.FirstFileIndex = data.ReadUInt32LittleEndian();
obj.LastFileIndex = data.ReadUInt32LittleEndian();
obj.FirstFileOffset = data.ReadUInt32LittleEndian();
obj.FirstFileOffsetHigh = data.ReadUInt32LittleEndian();
obj.FirstFileSizeExpanded = data.ReadUInt32LittleEndian();
obj.FirstFileSizeExpandedHigh = data.ReadUInt32LittleEndian();
obj.FirstFileSizeCompressed = data.ReadUInt32LittleEndian();
obj.FirstFileSizeCompressedHigh = data.ReadUInt32LittleEndian();
obj.LastFileOffset = data.ReadUInt32LittleEndian();
obj.LastFileOffsetHigh = data.ReadUInt32LittleEndian();
obj.LastFileSizeExpanded = data.ReadUInt32LittleEndian();
obj.LastFileSizeExpandedHigh = data.ReadUInt32LittleEndian();
obj.LastFileSizeCompressed = data.ReadUInt32LittleEndian();
obj.LastFileSizeCompressedHigh = data.ReadUInt32LittleEndian();
}
return obj;
}
#region Helpers

View File

@@ -0,0 +1,121 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZKWAJ : BaseBinaryDeserializer<KWAJFile>
{
/// <inheritdoc/>
public override KWAJFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new KWAJFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
#region Extended Header
if (header.HeaderFlags != 0)
{
var extensions = new KWAJHeaderExtensions();
#if NET20 || NET35
if ((header.HeaderFlags & KWAJHeaderFlags.HasDecompressedLength) != 0)
extensions.DecompressedLength = data.ReadUInt32LittleEndian();
if ((header.HeaderFlags & KWAJHeaderFlags.HasUnknownFlag) != 0)
extensions.UnknownPurpose = data.ReadUInt16LittleEndian();
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
{
extensions.UnknownDataLength = data.ReadUInt16LittleEndian();
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
}
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileName) != 0)
extensions.FileName = data.ReadNullTerminatedAnsiString();
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileExtension) != 0)
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
{
extensions.ArbitraryTextLength = data.ReadUInt16LittleEndian();
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
}
#else
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasDecompressedLength))
extensions.DecompressedLength = data.ReadUInt32LittleEndian();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasUnknownFlag))
extensions.UnknownPurpose = data.ReadUInt16LittleEndian();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
{
extensions.UnknownDataLength = data.ReadUInt16LittleEndian();
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
}
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileName))
extensions.FileName = data.ReadNullTerminatedAnsiString();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileExtension))
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
{
extensions.ArbitraryTextLength = data.ReadUInt16LittleEndian();
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
}
#endif
file.HeaderExtensions = extensions;
}
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static KWAJHeader? ParseHeader(Stream data)
{
var header = new KWAJHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(KWAJSignatureBytes))
return null;
header.CompressionType = (KWAJCompressionType)data.ReadUInt16LittleEndian();
if (header.CompressionType > KWAJCompressionType.MSZIP)
return null;
header.DataOffset = data.ReadUInt16LittleEndian();
header.HeaderFlags = (KWAJHeaderFlags)data.ReadUInt16LittleEndian();
return header;
}
}
}

View File

@@ -0,0 +1,65 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZQBasic : BaseBinaryDeserializer<QBasicFile>
{
/// <inheritdoc/>
public override QBasicFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new QBasicFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static QBasicHeader? ParseHeader(Stream data)
{
var header = new QBasicHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(QBasicSignatureBytes))
return null;
header.RealLength = data.ReadUInt32LittleEndian();
return header;
}
}
}

View File

@@ -0,0 +1,70 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZSZDD : BaseBinaryDeserializer<SZDDFile>
{
/// <inheritdoc/>
public override SZDDFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new SZDDFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static SZDDHeader? ParseHeader(Stream data)
{
var header = new SZDDHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(SZDDSignatureBytes))
return null;
header.CompressionType = (ExpandCompressionType)data.ReadByteValue();
if (header.CompressionType != ExpandCompressionType.A)
return null;
header.LastChar = (char)data.ReadByteValue();
header.RealLength = data.ReadUInt32LittleEndian();
return header;
}
}
}

View File

@@ -40,8 +40,8 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var informationBlock = data.ReadType<InformationBlock>();
if (informationBlock?.Signature != LESignatureString && informationBlock?.Signature != LXSignatureString)
var informationBlock = ParseInformationBlock(data);
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
return null;
// Set the executable header
@@ -64,11 +64,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the object table
for (int i = 0; i < executable.ObjectTable.Length; i++)
{
var entry = data.ReadType<ObjectTableEntry>();
if (entry == null)
return null;
executable.ObjectTable[i] = entry;
executable.ObjectTable[i] = ParseObjectTableEntry(data);
}
}
@@ -89,11 +85,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the object page map
for (int i = 0; i < executable.ObjectPageMap.Length; i++)
{
var entry = data.ReadType<ObjectPageMapEntry>();
if (entry == null)
return null;
executable.ObjectPageMap[i] = entry;
executable.ObjectPageMap[i] = ParseObjectPageMapEntry(data);
}
}
@@ -129,11 +121,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the resource table
for (int i = 0; i < executable.ResourceTable.Length; i++)
{
var entry = data.ReadType<ResourceTableEntry>();
if (entry == null)
return null;
executable.ResourceTable[i] = entry;
executable.ResourceTable[i] = ParseResourceTableEntry(data);
}
}
@@ -213,11 +201,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the module format directives table
for (int i = 0; i < executable.ModuleFormatDirectivesTable.Length; i++)
{
var entry = data.ReadType<ModuleFormatDirectivesTableEntry>();
if (entry == null)
return null;
executable.ModuleFormatDirectivesTable[i] = entry;
executable.ModuleFormatDirectivesTable[i] = ParseModuleFormatDirectivesTableEntry(data);
}
}
@@ -246,11 +230,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the fix-up page table
for (int i = 0; i < executable.FixupPageTable.Length; i++)
{
var entry = data.ReadType<FixupPageTableEntry>();
if (entry == null)
return null;
executable.FixupPageTable[i] = entry;
executable.FixupPageTable[i] = ParseFixupPageTableEntry(data);
}
}
@@ -296,11 +276,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the imported module name table
for (int i = 0; i < executable.ImportModuleNameTable.Length; i++)
{
var entry = ParseImportModuleNameTableEntry(data);
if (entry == null)
return null;
executable.ImportModuleNameTable[i] = entry;
executable.ImportModuleNameTable[i] = ParseImportModuleNameTableEntry(data);
}
}
@@ -327,9 +303,6 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < offset + tableSize)
{
var entry = ParseImportModuleProcedureNameTableEntry(data);
if (entry == null)
return null;
importModuleProcedureNameTable.Add(entry);
}
@@ -354,11 +327,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the per-page checksum name table
for (int i = 0; i < executable.PerPageChecksumTable.Length; i++)
{
var entry = data.ReadType<PerPageChecksumTableEntry>();
if (entry == null)
return null;
executable.PerPageChecksumTable[i] = entry;
executable.PerPageChecksumTable[i] = ParsePerPageChecksumTableEntry(data);
}
}
@@ -404,7 +373,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the debug information
var debugInformation = ParseDebugInformation(data, informationBlock.DebugInformationLength);
if (debugInformation == null)
if (debugInformation.Signature != DebugInformationSignatureString)
return null;
// Set the debug information
@@ -423,276 +392,288 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a resident names table entry
/// Parse a Stream into a DebugInformation
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident names table entry on success, null on error</returns>
public static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
/// <param name="size">Total size of the debug information</param>
/// <returns>Filled DebugInformation on success, null on error</returns>
public static DebugInformation ParseDebugInformation(Stream data, long size)
{
var entry = new ResidentNamesTableEntry();
var obj = new DebugInformation();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
byte[] signature = data.ReadBytes(3);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.FormatType = (DebugFormatType)data.ReadByteValue();
obj.DebuggerData = data.ReadBytes((int)(size - 4));
entry.OrdinalNumber = data.ReadUInt16();
return entry;
return obj;
}
/// <summary>
/// Parse a Stream into an entry table bundle
/// Parse a Stream into an EntryTableBundle
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled entry table bundle on success, null on error</returns>
/// <returns>Filled EntryTableBundle on success, null on error</returns>
public static EntryTableBundle? ParseEntryTableBundle(Stream data)
{
var bundle = new EntryTableBundle();
var obj = new EntryTableBundle();
bundle.Entries = data.ReadByteValue();
if (bundle.Entries == 0)
return bundle;
obj.Entries = data.ReadByteValue();
if (obj.Entries == 0)
return obj;
bundle.BundleType = (BundleType)data.ReadByteValue();
bundle.TableEntries = new EntryTableEntry[bundle.Entries];
for (int i = 0; i < bundle.Entries; i++)
obj.BundleType = (BundleType)data.ReadByteValue();
obj.TableEntries = new EntryTableEntry[obj.Entries];
for (int i = 0; i < obj.Entries; i++)
{
var entry = new EntryTableEntry();
switch (bundle.BundleType & ~BundleType.ParameterTypingInformationPresent)
switch (obj.BundleType & ~BundleType.ParameterTypingInformationPresent)
{
case BundleType.UnusedEntry:
// Empty entry with no information
break;
case BundleType.SixteenBitEntry:
entry.SixteenBitObjectNumber = data.ReadUInt16();
entry.SixteenBitObjectNumber = data.ReadUInt16LittleEndian();
entry.SixteenBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.SixteenBitOffset = data.ReadUInt16();
entry.SixteenBitOffset = data.ReadUInt16LittleEndian();
break;
case BundleType.TwoEightySixCallGateEntry:
entry.TwoEightySixObjectNumber = data.ReadUInt16();
entry.TwoEightySixObjectNumber = data.ReadUInt16LittleEndian();
entry.TwoEightySixEntryFlags = (EntryFlags)data.ReadByteValue();
entry.TwoEightySixOffset = data.ReadUInt16();
entry.TwoEightySixCallgate = data.ReadUInt16();
entry.TwoEightySixOffset = data.ReadUInt16LittleEndian();
entry.TwoEightySixCallgate = data.ReadUInt16LittleEndian();
break;
case BundleType.ThirtyTwoBitEntry:
entry.ThirtyTwoBitObjectNumber = data.ReadUInt16();
entry.ThirtyTwoBitObjectNumber = data.ReadUInt16LittleEndian();
entry.ThirtyTwoBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.ThirtyTwoBitOffset = data.ReadUInt32();
entry.ThirtyTwoBitOffset = data.ReadUInt32LittleEndian();
break;
case BundleType.ForwarderEntry:
entry.ForwarderReserved = data.ReadUInt16();
entry.ForwarderReserved = data.ReadUInt16LittleEndian();
entry.ForwarderFlags = (ForwarderFlags)data.ReadByteValue();
entry.ForwarderModuleOrdinalNumber = data.ReadUInt16();
entry.ProcedureNameOffset = data.ReadUInt32();
entry.ImportOrdinalNumber = data.ReadUInt32();
entry.ForwarderModuleOrdinalNumber = data.ReadUInt16LittleEndian();
entry.ProcedureNameOffset = data.ReadUInt32LittleEndian();
entry.ImportOrdinalNumber = data.ReadUInt32LittleEndian();
break;
default:
return null;
}
bundle.TableEntries[i] = entry;
obj.TableEntries[i] = entry;
}
return bundle;
return obj;
}
/// <summary>
/// Parse a Stream into a fix-up record table entry
/// Parse a Stream into an FixupPageTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up record table entry on success, null on error</returns>
/// <returns>Filled FixupPageTableEntry on success, null on error</returns>
public static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
{
var obj = new FixupPageTableEntry();
obj.Offset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a FixupRecordTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FixupRecordTableEntry on success, null on error</returns>
public static FixupRecordTableEntry? ParseFixupRecordTableEntry(Stream data)
{
var entry = new FixupRecordTableEntry();
var obj = new FixupRecordTableEntry();
entry.SourceType = (FixupRecordSourceType)data.ReadByteValue();
entry.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue();
obj.SourceType = (FixupRecordSourceType)data.ReadByteValue();
obj.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue();
// Source list flag
#if NET20 || NET35
if ((entry.SourceType & FixupRecordSourceType.SourceListFlag) != 0)
if ((obj.SourceType & FixupRecordSourceType.SourceListFlag) != 0)
#else
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
if (obj.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
#endif
entry.SourceOffsetListCount = data.ReadByteValue();
obj.SourceOffsetListCount = data.ReadByteValue();
else
entry.SourceOffset = data.ReadUInt16();
obj.SourceOffset = data.ReadUInt16LittleEndian();
// OBJECT / TRGOFF
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.InternalReference) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.InternalReference) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference))
#endif
{
// 16-bit Object Number/Module Ordinal Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
#endif
entry.TargetObjectNumberWORD = data.ReadUInt16();
obj.TargetObjectNumberWORD = data.ReadUInt16LittleEndian();
else
entry.TargetObjectNumberByte = data.ReadByteValue();
obj.TargetObjectNumberByte = data.ReadByteValue();
// 16-bit Selector fixup
#if NET20 || NET35
if ((entry.SourceType & FixupRecordSourceType.SixteenBitSelectorFixup) == 0)
if ((obj.SourceType & FixupRecordSourceType.SixteenBitSelectorFixup) == 0)
#else
if (!entry.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup))
if (!obj.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup))
#endif
{
// 32-bit Target Offset Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
#endif
entry.TargetOffsetDWORD = data.ReadUInt32();
obj.TargetOffsetDWORD = data.ReadUInt32LittleEndian();
else
entry.TargetOffsetWORD = data.ReadUInt16();
obj.TargetOffsetWORD = data.ReadUInt16LittleEndian();
}
}
// MOD ORD# / IMPORT ORD / ADDITIVE
#if NET20 || NET35
else if ((entry.TargetFlags & FixupRecordTargetFlags.ImportedReferenceByOrdinal) != 0)
else if ((obj.TargetFlags & FixupRecordTargetFlags.ImportedReferenceByOrdinal) != 0)
#else
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal))
else if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal))
#endif
{
// 16-bit Object Number/Module Ordinal Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
#endif
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
obj.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16LittleEndian();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
obj.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 8-bit Ordinal Flag & 32-bit Target Offset Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.EightBitOrdinalFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.EightBitOrdinalFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag))
#endif
entry.ImportedOrdinalNumberByte = data.ReadByteValue();
obj.ImportedOrdinalNumberByte = data.ReadByteValue();
#if NET20 || NET35
else if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
else if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
#else
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
else if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
#endif
entry.ImportedOrdinalNumberDWORD = data.ReadUInt32();
obj.ImportedOrdinalNumberDWORD = data.ReadUInt32LittleEndian();
else
entry.ImportedOrdinalNumberWORD = data.ReadUInt16();
obj.ImportedOrdinalNumberWORD = data.ReadUInt16LittleEndian();
// Additive Fixup Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
#endif
{
// 32-bit Additive Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
#endif
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
obj.AdditiveFixupValueDWORD = data.ReadUInt32LittleEndian();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
obj.AdditiveFixupValueWORD = data.ReadUInt16LittleEndian();
}
}
// MOD ORD# / PROCEDURE NAME OFFSET / ADDITIVE
#if NET20 || NET35
else if ((entry.TargetFlags & FixupRecordTargetFlags.ImportedReferenceByName) != 0)
else if ((obj.TargetFlags & FixupRecordTargetFlags.ImportedReferenceByName) != 0)
#else
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName))
else if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName))
#endif
{
// 16-bit Object Number/Module Ordinal Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
#endif
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
obj.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16LittleEndian();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
obj.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 32-bit Target Offset Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
#endif
entry.OffsetImportProcedureNameTableDWORD = data.ReadUInt32();
obj.OffsetImportProcedureNameTableDWORD = data.ReadUInt32LittleEndian();
else
entry.OffsetImportProcedureNameTableWORD = data.ReadUInt16();
obj.OffsetImportProcedureNameTableWORD = data.ReadUInt16LittleEndian();
// Additive Fixup Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
#endif
{
// 32-bit Additive Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
#endif
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
obj.AdditiveFixupValueDWORD = data.ReadUInt32LittleEndian();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
obj.AdditiveFixupValueWORD = data.ReadUInt16LittleEndian();
}
}
// ORD # / ADDITIVE
#if NET20 || NET35
else if ((entry.TargetFlags & FixupRecordTargetFlags.InternalReferenceViaEntryTable) != 0)
else if ((obj.TargetFlags & FixupRecordTargetFlags.InternalReferenceViaEntryTable) != 0)
#else
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable))
else if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable))
#endif
{
// 16-bit Object Number/Module Ordinal Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
#endif
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
obj.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16LittleEndian();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
obj.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// Additive Fixup Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.AdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
#endif
{
// 32-bit Additive Flag
#if NET20 || NET35
if ((entry.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
if ((obj.TargetFlags & FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag) != 0)
#else
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
if (obj.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
#endif
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
obj.AdditiveFixupValueDWORD = data.ReadUInt32LittleEndian();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
obj.AdditiveFixupValueWORD = data.ReadUInt16LittleEndian();
}
}
@@ -705,59 +686,135 @@ namespace SabreTools.Serialization.Deserializers
#region SCROFFn
#if NET20 || NET35
if ((entry.SourceType & FixupRecordSourceType.SourceListFlag) != 0)
if ((obj.SourceType & FixupRecordSourceType.SourceListFlag) != 0)
#else
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
if (obj.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
#endif
{
entry.SourceOffsetList = new ushort[entry.SourceOffsetListCount];
for (int i = 0; i < entry.SourceOffsetList.Length; i++)
obj.SourceOffsetList = new ushort[obj.SourceOffsetListCount];
for (int i = 0; i < obj.SourceOffsetList.Length; i++)
{
entry.SourceOffsetList[i] = data.ReadUInt16();
obj.SourceOffsetList[i] = data.ReadUInt16LittleEndian();
}
}
#endregion
return entry;
return obj;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// Parse a Stream into an ImportModuleNameTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
/// <returns>Filled ImportModuleNameTableEntry on success, null on error</returns>
public static ImportModuleNameTableEntry ParseImportModuleNameTableEntry(Stream data)
{
var entry = new ImportModuleNameTableEntry();
var obj = new ImportModuleNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
obj.Length = data.ReadByteValue();
if (obj.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
byte[] name = data.ReadBytes(obj.Length);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
return obj;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// Parse a Stream into an ImportModuleProcedureNameTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
/// <returns>Filled ImportModuleProcedureNameTableEntry on success, null on error</returns>
public static ImportModuleProcedureNameTableEntry ParseImportModuleProcedureNameTableEntry(Stream data)
{
var entry = new ImportModuleProcedureNameTableEntry();
var obj = new ImportModuleProcedureNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
obj.Length = data.ReadByteValue();
if (obj.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
byte[] name = data.ReadBytes(obj.Length);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
return obj;
}
/// <summary>
/// Parse a Stream into a InformationBlock
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled InformationBlock on success, null on error</returns>
public static InformationBlock ParseInformationBlock(Stream data)
{
var obj = new InformationBlock();
byte[] signature = data.ReadBytes(2);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.ByteOrder = (ByteOrder)data.ReadByteValue();
obj.WordOrder = (WordOrder)data.ReadByteValue();
obj.ExecutableFormatLevel = data.ReadUInt32LittleEndian();
obj.CPUType = (CPUType)data.ReadUInt16LittleEndian();
obj.ModuleOS = (OperatingSystem)data.ReadUInt16LittleEndian();
obj.ModuleVersion = data.ReadUInt32LittleEndian();
obj.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32LittleEndian();
obj.ModuleNumberPages = data.ReadUInt32LittleEndian();
obj.InitialObjectCS = data.ReadUInt32LittleEndian();
obj.InitialEIP = data.ReadUInt32LittleEndian();
obj.InitialObjectSS = data.ReadUInt32LittleEndian();
obj.InitialESP = data.ReadUInt32LittleEndian();
obj.MemoryPageSize = data.ReadUInt32LittleEndian();
obj.BytesOnLastPage = data.ReadUInt32LittleEndian();
obj.FixupSectionSize = data.ReadUInt32LittleEndian();
obj.FixupSectionChecksum = data.ReadUInt32LittleEndian();
obj.LoaderSectionSize = data.ReadUInt32LittleEndian();
obj.LoaderSectionChecksum = data.ReadUInt32LittleEndian();
obj.ObjectTableOffset = data.ReadUInt32LittleEndian();
obj.ObjectTableCount = data.ReadUInt32LittleEndian();
obj.ObjectPageMapOffset = data.ReadUInt32LittleEndian();
obj.ObjectIterateDataMapOffset = data.ReadUInt32LittleEndian();
obj.ResourceTableOffset = data.ReadUInt32LittleEndian();
obj.ResourceTableCount = data.ReadUInt32LittleEndian();
obj.ResidentNamesTableOffset = data.ReadUInt32LittleEndian();
obj.EntryTableOffset = data.ReadUInt32LittleEndian();
obj.ModuleDirectivesTableOffset = data.ReadUInt32LittleEndian();
obj.ModuleDirectivesCount = data.ReadUInt32LittleEndian();
obj.FixupPageTableOffset = data.ReadUInt32LittleEndian();
obj.FixupRecordTableOffset = data.ReadUInt32LittleEndian();
obj.ImportedModulesNameTableOffset = data.ReadUInt32LittleEndian();
obj.ImportedModulesCount = data.ReadUInt32LittleEndian();
obj.ImportProcedureNameTableOffset = data.ReadUInt32LittleEndian();
obj.PerPageChecksumTableOffset = data.ReadUInt32LittleEndian();
obj.DataPagesOffset = data.ReadUInt32LittleEndian();
obj.PreloadPageCount = data.ReadUInt32LittleEndian();
obj.NonResidentNamesTableOffset = data.ReadUInt32LittleEndian();
obj.NonResidentNamesTableLength = data.ReadUInt32LittleEndian();
obj.NonResidentNamesTableChecksum = data.ReadUInt32LittleEndian();
obj.AutomaticDataObject = data.ReadUInt32LittleEndian();
obj.DebugInformationOffset = data.ReadUInt32LittleEndian();
obj.DebugInformationLength = data.ReadUInt32LittleEndian();
obj.PreloadInstancePagesNumber = data.ReadUInt32LittleEndian();
obj.DemandInstancePagesNumber = data.ReadUInt32LittleEndian();
obj.ExtraHeapAllocation = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ModuleFormatDirectivesTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ModuleFormatDirectivesTableEntry on success, null on error</returns>
public static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
{
var obj = new ModuleFormatDirectivesTableEntry();
obj.DirectiveNumber = (DirectiveNumber)data.ReadUInt16LittleEndian();
obj.DirectiveDataLength = data.ReadUInt16LittleEndian();
obj.DirectiveDataOffset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
@@ -767,38 +824,105 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled non-resident names table entry on success, null on error</returns>
public static NonResidentNamesTableEntry ParseNonResidentNameTableEntry(Stream data)
{
var entry = new NonResidentNamesTableEntry();
var obj = new NonResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
obj.Length = data.ReadByteValue();
if (obj.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
byte[] name = data.ReadBytes(obj.Length);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();
return entry;
obj.OrdinalNumber = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a debug information
/// Parse a Stream into a ObjectPageMapEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="size">Total size of the debug information</param>
/// <returns>Filled debug information on success, null on error</returns>
public static DebugInformation? ParseDebugInformation(Stream data, long size)
/// <returns>Filled ObjectPageMapEntry on success, null on error</returns>
public static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
{
var debugInformation = new DebugInformation();
var obj = new ObjectPageMapEntry();
byte[] signature = data.ReadBytes(3);
debugInformation.Signature = Encoding.ASCII.GetString(signature);
if (debugInformation.Signature != DebugInformationSignatureString)
return null;
obj.PageDataOffset = data.ReadUInt32LittleEndian();
obj.DataSize = data.ReadUInt16LittleEndian();
obj.Flags = (ObjectPageFlags)data.ReadUInt16LittleEndian();
debugInformation.FormatType = (DebugFormatType)data.ReadByteValue();
debugInformation.DebuggerData = data.ReadBytes((int)(size - 4));
return obj;
}
return debugInformation;
/// <summary>
/// Parse a Stream into a ObjectTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ObjectTableEntry on success, null on error</returns>
public static ObjectTableEntry ParseObjectTableEntry(Stream data)
{
var obj = new ObjectTableEntry();
obj.RelocationBaseAddress = data.ReadUInt32LittleEndian();
obj.ObjectFlags = (ObjectFlags)data.ReadUInt16LittleEndian();
obj.PageTableIndex = data.ReadUInt32LittleEndian();
obj.PageTableEntries = data.ReadUInt32LittleEndian();
obj.Reserved = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a PerPageChecksumTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled PerPageChecksumTableEntry on success, null on error</returns>
public static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
{
var obj = new PerPageChecksumTableEntry();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ResourceTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ResourceTableEntry on success, null on error</returns>
public static ResourceTableEntry ParseResourceTableEntry(Stream data)
{
var obj = new ResourceTableEntry();
obj.TypeID = (ResourceTableEntryType)data.ReadUInt32LittleEndian();
obj.NameID = data.ReadUInt16LittleEndian();
obj.ResourceSize = data.ReadUInt32LittleEndian();
obj.ObjectNumber = data.ReadUInt16LittleEndian();
obj.Offset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ResidentNamesTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ResidentNamesTableEntry on success, null on error</returns>
public static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
{
var obj = new ResidentNamesTableEntry();
obj.Length = data.ReadByteValue();
if (obj.Length > 0)
{
byte[] name = data.ReadBytes(obj.Length);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
obj.OrdinalNumber = data.ReadUInt16LittleEndian();
return obj;
}
}
}

View File

@@ -27,7 +27,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the executable header
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
if (executableHeader.Magic != SignatureString)
return null;
// Set the executable header
@@ -44,12 +44,13 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the relocation table
data.Seek(tableAddress, SeekOrigin.Begin);
var relocationTable = ParseRelocationTable(data, executableHeader.RelocationItems);
if (relocationTable == null)
return null;
// Set the relocation table
executable.RelocationTable = relocationTable;
executable.RelocationTable = new RelocationEntry[executableHeader.RelocationItems];
for (int i = 0; i < executableHeader.RelocationItems; i++)
{
executable.RelocationTable[i] = ParseRelocationEntry(data);
}
#endregion
@@ -64,82 +65,72 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into an MS-DOS executable header
/// Parse a Stream into an ExecutableHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader? ParseExecutableHeader(Stream data)
/// <returns>Filled ExecutableHeader on success, null on error</returns>
public static ExecutableHeader ParseExecutableHeader(Stream data)
{
var header = new ExecutableHeader();
var obj = new ExecutableHeader();
#region Standard Fields
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LastPageBytes = data.ReadUInt16();
header.Pages = data.ReadUInt16();
header.RelocationItems = data.ReadUInt16();
header.HeaderParagraphSize = data.ReadUInt16();
header.MinimumExtraParagraphs = data.ReadUInt16();
header.MaximumExtraParagraphs = data.ReadUInt16();
header.InitialSSValue = data.ReadUInt16();
header.InitialSPValue = data.ReadUInt16();
header.Checksum = data.ReadUInt16();
header.InitialIPValue = data.ReadUInt16();
header.InitialCSValue = data.ReadUInt16();
header.RelocationTableAddr = data.ReadUInt16();
header.OverlayNumber = data.ReadUInt16();
obj.Magic = Encoding.ASCII.GetString(magic);
obj.LastPageBytes = data.ReadUInt16LittleEndian();
obj.Pages = data.ReadUInt16LittleEndian();
obj.RelocationItems = data.ReadUInt16LittleEndian();
obj.HeaderParagraphSize = data.ReadUInt16LittleEndian();
obj.MinimumExtraParagraphs = data.ReadUInt16LittleEndian();
obj.MaximumExtraParagraphs = data.ReadUInt16LittleEndian();
obj.InitialSSValue = data.ReadUInt16LittleEndian();
obj.InitialSPValue = data.ReadUInt16LittleEndian();
obj.Checksum = data.ReadUInt16LittleEndian();
obj.InitialIPValue = data.ReadUInt16LittleEndian();
obj.InitialCSValue = data.ReadUInt16LittleEndian();
obj.RelocationTableAddr = data.ReadUInt16LittleEndian();
obj.OverlayNumber = data.ReadUInt16LittleEndian();
#endregion
// If we don't have enough data for PE extensions
if (data.Position >= data.Length || data.Length - data.Position < 36)
return header;
return obj;
#region PE Extensions
header.Reserved1 = new ushort[4];
for (int i = 0; i < header.Reserved1.Length; i++)
obj.Reserved1 = new ushort[4];
for (int i = 0; i < obj.Reserved1.Length; i++)
{
header.Reserved1[i] = data.ReadUInt16();
obj.Reserved1[i] = data.ReadUInt16LittleEndian();
}
header.OEMIdentifier = data.ReadUInt16();
header.OEMInformation = data.ReadUInt16();
header.Reserved2 = new ushort[10];
for (int i = 0; i < header.Reserved2.Length; i++)
obj.OEMIdentifier = data.ReadUInt16LittleEndian();
obj.OEMInformation = data.ReadUInt16LittleEndian();
obj.Reserved2 = new ushort[10];
for (int i = 0; i < obj.Reserved2.Length; i++)
{
header.Reserved2[i] = data.ReadUInt16();
obj.Reserved2[i] = data.ReadUInt16LittleEndian();
}
header.NewExeHeaderAddr = data.ReadUInt32();
obj.NewExeHeaderAddr = data.ReadUInt32LittleEndian();
#endregion
return header;
return obj;
}
/// <summary>
/// Parse a Stream into a relocation table
/// Parse a Stream into an ExecutableHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[]? ParseRelocationTable(Stream data, int count)
/// <returns>Filled ExecutableHeader on success, null on error</returns>
public static RelocationEntry ParseRelocationEntry(Stream data)
{
var relocationTable = new RelocationEntry[count];
var obj = new RelocationEntry();
for (int i = 0; i < count; i++)
{
var entry = data.ReadType<RelocationEntry>();
if (entry == null)
return null;
obj.Offset = data.ReadUInt16LittleEndian();
obj.Segment = data.ReadUInt16LittleEndian();
relocationTable[i] = entry;
}
return relocationTable;
return obj;
}
}
}

View File

@@ -103,18 +103,18 @@ namespace SabreTools.Serialization.Deserializers
if (header.Signature != SignatureString)
return null;
header.Reserved1 = data.ReadUInt32();
header.CabinetSize = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.FilesOffset = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.Reserved1 = data.ReadUInt32LittleEndian();
header.CabinetSize = data.ReadUInt32LittleEndian();
header.Reserved2 = data.ReadUInt32LittleEndian();
header.FilesOffset = data.ReadUInt32LittleEndian();
header.Reserved3 = data.ReadUInt32LittleEndian();
header.VersionMinor = data.ReadByteValue();
header.VersionMajor = data.ReadByteValue();
header.FolderCount = data.ReadUInt16();
header.FileCount = data.ReadUInt16();
header.Flags = (HeaderFlags)data.ReadUInt16();
header.SetID = data.ReadUInt16();
header.CabinetIndex = data.ReadUInt16();
header.FolderCount = data.ReadUInt16LittleEndian();
header.FileCount = data.ReadUInt16LittleEndian();
header.Flags = (HeaderFlags)data.ReadUInt16LittleEndian();
header.SetID = data.ReadUInt16LittleEndian();
header.CabinetIndex = data.ReadUInt16LittleEndian();
#if NET20 || NET35
if ((header.Flags & HeaderFlags.RESERVE_PRESENT) != 0)
@@ -122,7 +122,7 @@ namespace SabreTools.Serialization.Deserializers
if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT))
#endif
{
header.HeaderReservedSize = data.ReadUInt16();
header.HeaderReservedSize = data.ReadUInt16LittleEndian();
if (header.HeaderReservedSize > 60_000)
return null;
@@ -166,9 +166,9 @@ namespace SabreTools.Serialization.Deserializers
{
var folder = new CFFOLDER();
folder.CabStartOffset = data.ReadUInt32();
folder.DataCount = data.ReadUInt16();
folder.CompressionType = (CompressionType)data.ReadUInt16();
folder.CabStartOffset = data.ReadUInt32LittleEndian();
folder.DataCount = data.ReadUInt16LittleEndian();
folder.CompressionType = (CompressionType)data.ReadUInt16LittleEndian();
if (header.FolderReservedSize > 0)
folder.ReservedData = data.ReadBytes(header.FolderReservedSize);
@@ -201,9 +201,9 @@ namespace SabreTools.Serialization.Deserializers
{
var dataBlock = new CFDATA();
dataBlock.Checksum = data.ReadUInt32();
dataBlock.CompressedSize = data.ReadUInt16();
dataBlock.UncompressedSize = data.ReadUInt16();
dataBlock.Checksum = data.ReadUInt32LittleEndian();
dataBlock.CompressedSize = data.ReadUInt16LittleEndian();
dataBlock.UncompressedSize = data.ReadUInt16LittleEndian();
if (dataReservedSize > 0)
dataBlock.ReservedData = data.ReadBytes(dataReservedSize);
@@ -223,12 +223,12 @@ namespace SabreTools.Serialization.Deserializers
{
var file = new CFFILE();
file.FileSize = data.ReadUInt32();
file.FolderStartOffset = data.ReadUInt32();
file.FolderIndex = (FolderIndex)data.ReadUInt16();
file.Date = data.ReadUInt16();
file.Time = data.ReadUInt16();
file.Attributes = (Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16();
file.FileSize = data.ReadUInt32LittleEndian();
file.FolderStartOffset = data.ReadUInt32LittleEndian();
file.FolderIndex = (FolderIndex)data.ReadUInt16LittleEndian();
file.Date = data.ReadUInt16LittleEndian();
file.Time = data.ReadUInt16LittleEndian();
file.Attributes = (Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16LittleEndian();
#if NET20 || NET35
if ((file.Attributes & Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF) != 0)

View File

@@ -25,7 +25,7 @@ namespace SabreTools.Serialization.Deserializers
#region User Data
// Check for User Data
uint possibleSignature = data.ReadUInt32();
uint possibleSignature = data.ReadUInt32LittleEndian();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == UserDataSignatureUInt32)
{
@@ -33,7 +33,7 @@ namespace SabreTools.Serialization.Deserializers
long basePtr = data.Position;
// Deserialize the user data, returning null if invalid
var userData = data.ReadType<UserData>();
var userData = ParseUserData(data);
if (userData?.Signature != UserDataSignatureString)
return null;
@@ -49,13 +49,13 @@ namespace SabreTools.Serialization.Deserializers
#region Archive Header
// Check for the Header
possibleSignature = data.ReadUInt32();
possibleSignature = data.ReadUInt32LittleEndian();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == ArchiveHeaderSignatureUInt32)
{
// Try to parse the archive header
var archiveHeader = ParseArchiveHeader(data);
if (archiveHeader == null)
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
return null;
// Set the archive header
@@ -90,10 +90,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < hashTableEnd)
{
var hashEntry = data.ReadType<HashEntry>();
if (hashEntry == null)
return null;
var hashEntry = ParseHashEntry(data);
hashTable.Add(hashEntry);
}
@@ -120,10 +117,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < hashTableEnd)
{
var hashEntry = data.ReadType<HashEntry>();
if (hashEntry == null)
return null;
var hashEntry = ParseHashEntry(data);
hashTable.Add(hashEntry);
}
@@ -149,10 +143,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < hashTableEnd)
{
var hashEntry = data.ReadType<HashEntry>();
if (hashEntry == null)
return null;
var hashEntry = ParseHashEntry(data);
hashTable.Add(hashEntry);
}
@@ -182,10 +173,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < blockTableEnd)
{
var blockEntry = data.ReadType<BlockEntry>();
if (blockEntry == null)
return null;
var blockEntry = ParseBlockEntry(data);
blockTable.Add(blockEntry);
}
@@ -212,10 +200,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < blockTableEnd)
{
var blockEntry = data.ReadType<BlockEntry>();
if (blockEntry == null)
return null;
var blockEntry = ParseBlockEntry(data);
blockTable.Add(blockEntry);
}
@@ -241,10 +226,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < blockTableEnd)
{
var blockEntry = data.ReadType<BlockEntry>();
if (blockEntry == null)
return null;
var blockEntry = ParseBlockEntry(data);
blockTable.Add(blockEntry);
}
@@ -271,7 +253,7 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < (archive.BlockTable?.Length ?? 0); i++)
{
short hiBlockEntry = data.ReadInt16();
short hiBlockEntry = data.ReadInt16LittleEndian();
hiBlockTable.Add(hiBlockEntry);
}
@@ -295,7 +277,7 @@ namespace SabreTools.Serialization.Deserializers
// Read in the BET table
var betTable = ParseBetTable(data);
if (betTable != null)
if (betTable.Signature != BetTableSignatureString)
return null;
archive.BetTable = betTable;
@@ -318,7 +300,7 @@ namespace SabreTools.Serialization.Deserializers
// Read in the HET table
var hetTable = ParseHetTable(data);
if (hetTable != null)
if (hetTable.Signature != HetTableSignatureString)
return null;
archive.HetTable = hetTable;
@@ -337,150 +319,195 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a archive header
/// Parse a Stream into an ArchiveHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive header on success, null on error</returns>
private static ArchiveHeader? ParseArchiveHeader(Stream data)
/// <returns>Filled ArchiveHeader on success, null on error</returns>
public static ArchiveHeader ParseArchiveHeader(Stream data)
{
ArchiveHeader archiveHeader = new ArchiveHeader();
var obj = new ArchiveHeader();
// V1 - Common
byte[] signature = data.ReadBytes(4);
archiveHeader.Signature = Encoding.ASCII.GetString(signature);
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
return null;
obj.Signature = Encoding.ASCII.GetString(signature);
archiveHeader.HeaderSize = data.ReadUInt32();
archiveHeader.ArchiveSize = data.ReadUInt32();
archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16();
archiveHeader.BlockSize = data.ReadUInt16();
archiveHeader.HashTablePosition = data.ReadUInt32();
archiveHeader.BlockTablePosition = data.ReadUInt32();
archiveHeader.HashTableSize = data.ReadUInt32();
archiveHeader.BlockTableSize = data.ReadUInt32();
obj.HeaderSize = data.ReadUInt32LittleEndian();
obj.ArchiveSize = data.ReadUInt32LittleEndian();
obj.FormatVersion = (FormatVersion)data.ReadUInt16LittleEndian();
obj.BlockSize = data.ReadUInt16LittleEndian();
obj.HashTablePosition = data.ReadUInt32LittleEndian();
obj.BlockTablePosition = data.ReadUInt32LittleEndian();
obj.HashTableSize = data.ReadUInt32LittleEndian();
obj.BlockTableSize = data.ReadUInt32LittleEndian();
// V2
if (archiveHeader.FormatVersion >= FormatVersion.Format2)
if (obj.FormatVersion >= FormatVersion.Format2)
{
archiveHeader.HiBlockTablePosition = data.ReadUInt64();
archiveHeader.HashTablePositionHi = data.ReadUInt16();
archiveHeader.BlockTablePositionHi = data.ReadUInt16();
obj.HiBlockTablePosition = data.ReadUInt64LittleEndian();
obj.HashTablePositionHi = data.ReadUInt16LittleEndian();
obj.BlockTablePositionHi = data.ReadUInt16LittleEndian();
}
// V3
if (archiveHeader.FormatVersion >= FormatVersion.Format3)
if (obj.FormatVersion >= FormatVersion.Format3)
{
archiveHeader.ArchiveSizeLong = data.ReadUInt64();
archiveHeader.BetTablePosition = data.ReadUInt64();
archiveHeader.HetTablePosition = data.ReadUInt64();
obj.ArchiveSizeLong = data.ReadUInt64LittleEndian();
obj.BetTablePosition = data.ReadUInt64LittleEndian();
obj.HetTablePosition = data.ReadUInt64LittleEndian();
}
// V4
if (archiveHeader.FormatVersion >= FormatVersion.Format4)
if (obj.FormatVersion >= FormatVersion.Format4)
{
archiveHeader.HashTableSizeLong = data.ReadUInt64();
archiveHeader.BlockTableSizeLong = data.ReadUInt64();
archiveHeader.HiBlockTableSize = data.ReadUInt64();
archiveHeader.HetTableSize = data.ReadUInt64();
archiveHeader.BetTablesize = data.ReadUInt64();
archiveHeader.RawChunkSize = data.ReadUInt32();
obj.HashTableSizeLong = data.ReadUInt64LittleEndian();
obj.BlockTableSizeLong = data.ReadUInt64LittleEndian();
obj.HiBlockTableSize = data.ReadUInt64LittleEndian();
obj.HetTableSize = data.ReadUInt64LittleEndian();
obj.BetTablesize = data.ReadUInt64LittleEndian();
obj.RawChunkSize = data.ReadUInt32LittleEndian();
archiveHeader.BlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.HashTableMD5 = data.ReadBytes(0x10);
archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.BetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
obj.BlockTableMD5 = data.ReadBytes(0x10);
obj.HashTableMD5 = data.ReadBytes(0x10);
obj.HiBlockTableMD5 = data.ReadBytes(0x10);
obj.BetTableMD5 = data.ReadBytes(0x10);
obj.HetTableMD5 = data.ReadBytes(0x10);
obj.HetTableMD5 = data.ReadBytes(0x10);
}
return archiveHeader;
return obj;
}
/// <summary>
/// Parse a Stream into a HET table
/// Parse a Stream into a BetTable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable? ParseHetTable(Stream data)
/// <returns>Filled BetTable on success, null on error</returns>
public static BetTable ParseBetTable(Stream data)
{
var hetTable = new HetTable();
var obj = new BetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
hetTable.Signature = Encoding.ASCII.GetString(signature);
if (hetTable.Signature != HetTableSignatureString)
return null;
hetTable.Version = data.ReadUInt32();
hetTable.DataSize = data.ReadUInt32();
// HET-Specific
hetTable.TableSize = data.ReadUInt32();
hetTable.MaxFileCount = data.ReadUInt32();
hetTable.HashTableSize = data.ReadUInt32();
hetTable.TotalIndexSize = data.ReadUInt32();
hetTable.IndexSizeExtra = data.ReadUInt32();
hetTable.IndexSize = data.ReadUInt32();
hetTable.BlockTableSize = data.ReadUInt32();
hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize);
// TODO: Populate the file indexes array
hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][];
return hetTable;
}
/// <summary>
/// Parse a Stream into a BET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable? ParseBetTable(Stream data)
{
var betTable = new BetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
betTable.Signature = Encoding.ASCII.GetString(signature);
if (betTable.Signature != BetTableSignatureString)
return null;
betTable.Version = data.ReadUInt32();
betTable.DataSize = data.ReadUInt32();
obj.Signature = Encoding.ASCII.GetString(signature);
obj.Version = data.ReadUInt32LittleEndian();
obj.DataSize = data.ReadUInt32LittleEndian();
// BET-Specific
betTable.TableSize = data.ReadUInt32();
betTable.FileCount = data.ReadUInt32();
betTable.Unknown = data.ReadUInt32();
betTable.TableEntrySize = data.ReadUInt32();
obj.TableSize = data.ReadUInt32LittleEndian();
obj.FileCount = data.ReadUInt32LittleEndian();
obj.Unknown = data.ReadUInt32LittleEndian();
obj.TableEntrySize = data.ReadUInt32LittleEndian();
betTable.FilePositionBitIndex = data.ReadUInt32();
betTable.FileSizeBitIndex = data.ReadUInt32();
betTable.CompressedSizeBitIndex = data.ReadUInt32();
betTable.FlagIndexBitIndex = data.ReadUInt32();
betTable.UnknownBitIndex = data.ReadUInt32();
obj.FilePositionBitIndex = data.ReadUInt32LittleEndian();
obj.FileSizeBitIndex = data.ReadUInt32LittleEndian();
obj.CompressedSizeBitIndex = data.ReadUInt32LittleEndian();
obj.FlagIndexBitIndex = data.ReadUInt32LittleEndian();
obj.UnknownBitIndex = data.ReadUInt32LittleEndian();
betTable.FilePositionBitCount = data.ReadUInt32();
betTable.FileSizeBitCount = data.ReadUInt32();
betTable.CompressedSizeBitCount = data.ReadUInt32();
betTable.FlagIndexBitCount = data.ReadUInt32();
betTable.UnknownBitCount = data.ReadUInt32();
obj.FilePositionBitCount = data.ReadUInt32LittleEndian();
obj.FileSizeBitCount = data.ReadUInt32LittleEndian();
obj.CompressedSizeBitCount = data.ReadUInt32LittleEndian();
obj.FlagIndexBitCount = data.ReadUInt32LittleEndian();
obj.UnknownBitCount = data.ReadUInt32LittleEndian();
betTable.TotalBetHashSize = data.ReadUInt32();
betTable.BetHashSizeExtra = data.ReadUInt32();
betTable.BetHashSize = data.ReadUInt32();
betTable.BetHashArraySize = data.ReadUInt32();
betTable.FlagCount = data.ReadUInt32();
obj.TotalBetHashSize = data.ReadUInt32LittleEndian();
obj.BetHashSizeExtra = data.ReadUInt32LittleEndian();
obj.BetHashSize = data.ReadUInt32LittleEndian();
obj.BetHashArraySize = data.ReadUInt32LittleEndian();
obj.FlagCount = data.ReadUInt32LittleEndian();
betTable.FlagsArray = new uint[betTable.FlagCount];
byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4);
Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4);
obj.FlagsArray = new uint[obj.FlagCount];
byte[] flagsArray = data.ReadBytes((int)obj.FlagCount * 4);
Buffer.BlockCopy(flagsArray, 0, obj.FlagsArray, 0, (int)obj.FlagCount * 4);
// TODO: Populate the file table
// TODO: Populate the hash table
return betTable;
return obj;
}
/// <summary>
/// Parse a Stream into an BlockEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BlockEntry on success, null on error</returns>
public static BlockEntry ParseBlockEntry(Stream data)
{
var obj = new BlockEntry();
obj.FilePosition = data.ReadUInt32LittleEndian();
obj.CompressedSize = data.ReadUInt32LittleEndian();
obj.UncompressedSize = data.ReadUInt32LittleEndian();
obj.Flags = (FileFlags)data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into an HashEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HashEntry on success, null on error</returns>
public static HashEntry ParseHashEntry(Stream data)
{
var obj = new HashEntry();
obj.NameHashPartA = data.ReadUInt32LittleEndian();
obj.NameHashPartB = data.ReadUInt32LittleEndian();
obj.Locale = (Locale)data.ReadInt16LittleEndian();
obj.Platform = data.ReadUInt16LittleEndian();
obj.BlockIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a HetTable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HetTable on success, null on error</returns>
public static HetTable ParseHetTable(Stream data)
{
var obj = new HetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.Version = data.ReadUInt32LittleEndian();
obj.DataSize = data.ReadUInt32LittleEndian();
// HET-Specific
obj.TableSize = data.ReadUInt32LittleEndian();
obj.MaxFileCount = data.ReadUInt32LittleEndian();
obj.HashTableSize = data.ReadUInt32LittleEndian();
obj.TotalIndexSize = data.ReadUInt32LittleEndian();
obj.IndexSizeExtra = data.ReadUInt32LittleEndian();
obj.IndexSize = data.ReadUInt32LittleEndian();
obj.BlockTableSize = data.ReadUInt32LittleEndian();
obj.HashTable = data.ReadBytes((int)obj.HashTableSize);
// TODO: Populate the file indexes array
obj.FileIndexes = new byte[(int)obj.HashTableSize][];
return obj;
}
/// <summary>
/// Parse a Stream into a UserData
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled UserData on success, null on error</returns>
public static UserData ParseUserData(Stream data)
{
var obj = new UserData();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.UserDataSize = data.ReadUInt32LittleEndian();
obj.HeaderOffset = data.ReadUInt32LittleEndian();
obj.UserDataHeaderSize = data.ReadUInt32LittleEndian();
return obj;
}
#region Helpers
@@ -488,7 +515,7 @@ namespace SabreTools.Serialization.Deserializers
/// <summary>
/// Buffer for encryption and decryption
/// </summary>
private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
private readonly uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
/// <summary>
/// Prepare the encryption table

View File

@@ -25,7 +25,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the header
var header = ParseNCSDHeader(data);
if (header == null)
if (header.MagicNumber != NCSDMagicNumber)
return null;
// Set the cart image header
@@ -35,25 +35,15 @@ namespace SabreTools.Serialization.Deserializers
#region Card Info Header
// Try to parse the card info header
var cardInfoHeader = ParseCardInfoHeader(data);
if (cardInfoHeader == null)
return null;
// Set the card info header
cart.CardInfoHeader = cardInfoHeader;
cart.CardInfoHeader = ParseCardInfoHeader(data);
#endregion
#region Development Card Info Header
// Try to parse the development card info header
var developmentCardInfoHeader = data.ReadType<DevelopmentCardInfoHeader>();
if (developmentCardInfoHeader == null)
return null;
// Set the development card info header
cart.DevelopmentCardInfoHeader = developmentCardInfoHeader;
cart.DevelopmentCardInfoHeader = ParseDevelopmentCardInfoHeader(data);
#endregion
@@ -92,11 +82,7 @@ namespace SabreTools.Serialization.Deserializers
// Handle the extended header, if it exists
if (partition.ExtendedHeaderSizeInBytes > 0)
{
var extendedHeader = data.ReadType<NCCHExtendedHeader>();
if (extendedHeader != null)
cart.ExtendedHeaders[i] = extendedHeader;
}
cart.ExtendedHeaders[i] = ParseNCCHExtendedHeader(data);
// Handle the ExeFS, if it exists
if (partition.ExeFSSizeInMediaUnits > 0)
@@ -117,10 +103,10 @@ namespace SabreTools.Serialization.Deserializers
long offset = partition.RomFSOffsetInMediaUnits * mediaUnitSize;
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
var romFsHeader = data.ReadType<RomFSHeader>();
if (romFsHeader?.MagicString != RomFSMagicNumber)
var romFsHeader = ParseRomFSHeader(data);
if (romFsHeader.MagicString != RomFSMagicNumber)
continue;
if (romFsHeader?.MagicNumber != RomFSSecondMagicNumber)
if (romFsHeader.MagicNumber != RomFSSecondMagicNumber)
continue;
cart.RomFSHeaders[i] = romFsHeader;
@@ -139,85 +125,193 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into an NCSD header
/// Parse a Stream into a AccessControlInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCSD header on success, null on error</returns>
public static NCSDHeader? ParseNCSDHeader(Stream data)
/// <returns>Filled AccessControlInfo on success, null on error</returns>
public static AccessControlInfo ParseAccessControlInfo(Stream data)
{
var header = new NCSDHeader();
var obj = new AccessControlInfo();
header.RSA2048Signature = data.ReadBytes(0x100);
byte[] magicNumber = data.ReadBytes(4);
header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ;
if (header.MagicNumber != NCSDMagicNumber)
return null;
obj.ARM11LocalSystemCapabilities = ParseARM11LocalSystemCapabilities(data);
obj.ARM11KernelCapabilities = ParseARM11KernelCapabilities(data);
obj.ARM9AccessControl = ParseARM9AccessControl(data);
header.ImageSizeInMediaUnits = data.ReadUInt32();
header.MediaId = data.ReadBytes(8);
header.PartitionsFSType = (FilesystemType)data.ReadUInt64();
header.PartitionsCryptType = data.ReadBytes(8);
header.PartitionsTable = new PartitionTableEntry[8];
for (int i = 0; i < 8; i++)
{
var partitionTableEntry = data.ReadType<PartitionTableEntry>();
if (partitionTableEntry == null)
return null;
header.PartitionsTable[i] = partitionTableEntry;
}
if (header.PartitionsFSType == FilesystemType.Normal || header.PartitionsFSType == FilesystemType.None)
{
header.ExheaderHash = data.ReadBytes(0x20);
header.AdditionalHeaderSize = data.ReadUInt32();
header.SectorZeroOffset = data.ReadUInt32();
header.PartitionFlags = data.ReadBytes(8);
header.PartitionIdTable = new ulong[8];
for (int i = 0; i < 8; i++)
{
header.PartitionIdTable[i] = data.ReadUInt64();
}
header.Reserved1 = data.ReadBytes(0x20);
header.Reserved2 = data.ReadBytes(0x0E);
header.FirmUpdateByte1 = data.ReadByteValue();
header.FirmUpdateByte2 = data.ReadByteValue();
}
else if (header.PartitionsFSType == FilesystemType.FIRM)
{
header.Unknown = data.ReadBytes(0x5E);
header.EncryptedMBR = data.ReadBytes(0x42);
}
return header;
return obj;
}
/// <summary>
/// Parse a Stream into a card info header
/// Parse a Stream into a ARM9AccessControl
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled card info header on success, null on error</returns>
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
/// <returns>Filled ARM9AccessControl on success, null on error</returns>
public static ARM9AccessControl ParseARM9AccessControl(Stream data)
{
var header = new CardInfoHeader();
var obj = new ARM9AccessControl();
header.WritableAddressMediaUnits = data.ReadUInt32();
header.CardInfoBitmask = data.ReadUInt32();
header.Reserved1 = data.ReadBytes(0xF8);
header.FilledSize = data.ReadUInt32();
header.Reserved2 = data.ReadBytes(0x0C);
header.TitleVersion = data.ReadUInt16();
header.CardRevision = data.ReadUInt16();
header.Reserved3 = data.ReadBytes(0x0C);
header.CVerTitleID = data.ReadBytes(0x08);
header.CVerVersionNumber = data.ReadUInt16();
header.Reserved4 = data.ReadBytes(0xCD6);
header.InitialData = ParseInitialData(data);
obj.Descriptors = new ARM9AccessControlDescriptors[15];
for (int i = 0; i < 15; i++)
{
obj.Descriptors[i] = (ARM9AccessControlDescriptors)data.ReadByteValue();
}
obj.DescriptorVersion = data.ReadByteValue();
return header;
return obj;
}
/// <summary>
/// Parse a Stream into a ARM11KernelCapabilities
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM11KernelCapabilities on success, null on error</returns>
public static ARM11KernelCapabilities ParseARM11KernelCapabilities(Stream data)
{
var obj = new ARM11KernelCapabilities();
obj.Descriptors = new uint[28];
for (int i = 0; i < 28; i++)
{
obj.Descriptors[i] = data.ReadUInt32LittleEndian();
}
obj.Reserved = data.ReadBytes(0x10);
return obj;
}
/// <summary>
/// Parse a Stream into a ARM11LocalSystemCapabilities
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM11LocalSystemCapabilities on success, null on error</returns>
public static ARM11LocalSystemCapabilities ParseARM11LocalSystemCapabilities(Stream data)
{
var obj = new ARM11LocalSystemCapabilities();
obj.ProgramID = data.ReadUInt64LittleEndian();
obj.CoreVersion = data.ReadUInt32LittleEndian();
obj.Flag1 = (ARM11LSCFlag1)data.ReadByteValue();
obj.Flag2 = (ARM11LSCFlag2)data.ReadByteValue();
obj.Flag0 = (ARM11LSCFlag0)data.ReadByteValue();
obj.Priority = data.ReadByteValue();
obj.ResourceLimitDescriptors = new ushort[16];
for (int i = 0; i < 16; i++)
{
obj.ResourceLimitDescriptors[i] = data.ReadUInt16LittleEndian();
}
obj.StorageInfo = ParseStorageInfo(data);
obj.ServiceAccessControl = new ulong[32];
for (int i = 0; i < 32; i++)
{
obj.ServiceAccessControl[i] = data.ReadUInt64LittleEndian();
}
obj.ExtendedServiceAccessControl = new ulong[2];
for (int i = 0; i < 2; i++)
{
obj.ExtendedServiceAccessControl[i] = data.ReadUInt64LittleEndian();
}
obj.Reserved = data.ReadBytes(0x0F);
obj.ResourceLimitCategory = (ResourceLimitCategory)data.ReadByteValue();
return obj;
}
/// <summary>
/// Parse a Stream into a CardInfoHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CardInfoHeader on success, null on error</returns>
public static CardInfoHeader ParseCardInfoHeader(Stream data)
{
var obj = new CardInfoHeader();
obj.WritableAddressMediaUnits = data.ReadUInt32LittleEndian();
obj.CardInfoBitmask = data.ReadUInt32LittleEndian();
obj.Reserved1 = data.ReadBytes(0xF8);
obj.FilledSize = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadBytes(0x0C);
obj.TitleVersion = data.ReadUInt16LittleEndian();
obj.CardRevision = data.ReadUInt16LittleEndian();
obj.Reserved3 = data.ReadBytes(0x0C);
obj.CVerTitleID = data.ReadBytes(0x08);
obj.CVerVersionNumber = data.ReadUInt16LittleEndian();
obj.Reserved4 = data.ReadBytes(0xCD6);
obj.InitialData = ParseInitialData(data);
return obj;
}
/// <summary>
/// Parse a Stream into a CodeSetInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CodeSetInfo on success, null on error</returns>
public static CodeSetInfo ParseCodeSetInfo(Stream data)
{
var obj = new CodeSetInfo();
obj.Address = data.ReadUInt32LittleEndian();
obj.PhysicalRegionSizeInPages = data.ReadUInt32LittleEndian();
obj.SizeInBytes = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DevelopmentCardInfoHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DevelopmentCardInfoHeader on success, null on error</returns>
public static DevelopmentCardInfoHeader ParseDevelopmentCardInfoHeader(Stream data)
{
var obj = new DevelopmentCardInfoHeader();
obj.CardDeviceReserved1 = data.ReadBytes(0x200);
obj.TitleKey = data.ReadBytes(0x10);
obj.CardDeviceReserved2 = data.ReadBytes(0x1BF0);
obj.TestData = ParseTestData(data);
return obj;
}
/// <summary>
/// Parse a Stream into a ExeFSFileHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFSFileHeader on success, null on error</returns>
public static ExeFSFileHeader ParseExeFSFileHeader(Stream data)
{
var obj = new ExeFSFileHeader();
byte[] fileName = data.ReadBytes(8);
obj.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
obj.FileOffset = data.ReadUInt32LittleEndian();
obj.FileSize = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into an ExeFSHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFSHeader on success, null on error</returns>
public static ExeFSHeader ParseExeFSHeader(Stream data)
{
var obj = new ExeFSHeader();
obj.FileHeaders = new ExeFSFileHeader[10];
for (int i = 0; i < 10; i++)
{
obj.FileHeaders[i] = ParseExeFSFileHeader(data);
}
obj.Reserved = data.ReadBytes(0x20);
obj.FileHashes = new byte[10][];
for (int i = 0; i < 10; i++)
{
obj.FileHashes[i] = data.ReadBytes(0x20);
}
return obj;
}
/// <summary>
@@ -227,92 +321,285 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled initial data on success, null on error</returns>
public static InitialData? ParseInitialData(Stream data)
{
var id = new InitialData();
var obj = new InitialData();
id.CardSeedKeyY = data.ReadBytes(0x10);
id.EncryptedCardSeed = data.ReadBytes(0x10);
id.CardSeedAESMAC = data.ReadBytes(0x10);
id.CardSeedNonce = data.ReadBytes(0x0C);
id.Reserved = data.ReadBytes(0xC4);
id.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
obj.CardSeedKeyY = data.ReadBytes(0x10);
obj.EncryptedCardSeed = data.ReadBytes(0x10);
obj.CardSeedAESMAC = data.ReadBytes(0x10);
obj.CardSeedNonce = data.ReadBytes(0x0C);
obj.Reserved = data.ReadBytes(0xC4);
obj.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
return id;
return obj;
}
/// <summary>
/// Parse a Stream into an NCCH header
/// Parse a Stream into a NCCHExtendedHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCCHExtendedHeader on success, null on error</returns>
public static NCCHExtendedHeader ParseNCCHExtendedHeader(Stream data)
{
var obj = new NCCHExtendedHeader();
obj.SCI = ParseSystemControlInfo(data);
obj.ACI = ParseAccessControlInfo(data);
obj.AccessDescSignature = data.ReadBytes(0x100);
obj.NCCHHDRPublicKey = data.ReadBytes(0x100);
obj.ACIForLimitations = ParseAccessControlInfo(data);
return obj;
}
/// <summary>
/// Parse a Stream into an NCCHHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="skipSignature">Indicates if the signature should be skipped</param>
/// <returns>Filled NCCH header on success, null on error</returns>
/// <returns>Filled NCCHHeader on success, null on error</returns>
public static NCCHHeader ParseNCCHHeader(Stream data, bool skipSignature = false)
{
var header = new NCCHHeader();
var obj = new NCCHHeader();
if (!skipSignature)
header.RSA2048Signature = data.ReadBytes(0x100);
obj.RSA2048Signature = data.ReadBytes(0x100);
byte[] magicId = data.ReadBytes(4);
header.MagicID = Encoding.ASCII.GetString(magicId).TrimEnd('\0');
header.ContentSizeInMediaUnits = data.ReadUInt32();
header.PartitionId = data.ReadUInt64();
header.MakerCode = data.ReadUInt16();
header.Version = data.ReadUInt16();
header.VerificationHash = data.ReadUInt32();
header.ProgramId = data.ReadBytes(8);
header.Reserved1 = data.ReadBytes(0x10);
header.LogoRegionHash = data.ReadBytes(0x20);
obj.MagicID = Encoding.ASCII.GetString(magicId).TrimEnd('\0');
obj.ContentSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.PartitionId = data.ReadUInt64LittleEndian();
obj.MakerCode = data.ReadUInt16LittleEndian();
obj.Version = data.ReadUInt16LittleEndian();
obj.VerificationHash = data.ReadUInt32LittleEndian();
obj.ProgramId = data.ReadBytes(8);
obj.Reserved1 = data.ReadBytes(0x10);
obj.LogoRegionHash = data.ReadBytes(0x20);
byte[] productCode = data.ReadBytes(0x10);
header.ProductCode = Encoding.ASCII.GetString(productCode).TrimEnd('\0');
header.ExtendedHeaderHash = data.ReadBytes(0x20);
header.ExtendedHeaderSizeInBytes = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.Flags = data.ReadType<NCCHHeaderFlags>();
header.PlainRegionOffsetInMediaUnits = data.ReadUInt32();
header.PlainRegionSizeInMediaUnits = data.ReadUInt32();
header.LogoRegionOffsetInMediaUnits = data.ReadUInt32();
header.LogoRegionSizeInMediaUnits = data.ReadUInt32();
header.ExeFSOffsetInMediaUnits = data.ReadUInt32();
header.ExeFSSizeInMediaUnits = data.ReadUInt32();
header.ExeFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.RomFSOffsetInMediaUnits = data.ReadUInt32();
header.RomFSSizeInMediaUnits = data.ReadUInt32();
header.RomFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved4 = data.ReadUInt32();
header.ExeFSSuperblockHash = data.ReadBytes(0x20);
header.RomFSSuperblockHash = data.ReadBytes(0x20);
obj.ProductCode = Encoding.ASCII.GetString(productCode).TrimEnd('\0');
obj.ExtendedHeaderHash = data.ReadBytes(0x20);
obj.ExtendedHeaderSizeInBytes = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadUInt32LittleEndian();
obj.Flags = ParseNCCHHeaderFlags(data);
obj.PlainRegionOffsetInMediaUnits = data.ReadUInt32LittleEndian();
obj.PlainRegionSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.LogoRegionOffsetInMediaUnits = data.ReadUInt32LittleEndian();
obj.LogoRegionSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.ExeFSOffsetInMediaUnits = data.ReadUInt32LittleEndian();
obj.ExeFSSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.ExeFSHashRegionSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.Reserved3 = data.ReadUInt32LittleEndian();
obj.RomFSOffsetInMediaUnits = data.ReadUInt32LittleEndian();
obj.RomFSSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.RomFSHashRegionSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.Reserved4 = data.ReadUInt32LittleEndian();
obj.ExeFSSuperblockHash = data.ReadBytes(0x20);
obj.RomFSSuperblockHash = data.ReadBytes(0x20);
return header;
return obj;
}
/// <summary>
/// Parse a Stream into an ExeFS header
/// Parse a Stream into an NCCHHeaderFlags
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFS header on success, null on error</returns>
public static ExeFSHeader? ParseExeFSHeader(Stream data)
/// <returns>Filled NCCHHeaderFlags on success, null on error</returns>
public static NCCHHeaderFlags ParseNCCHHeaderFlags(Stream data)
{
var exeFSHeader = new ExeFSHeader();
var obj = new NCCHHeaderFlags();
exeFSHeader.FileHeaders = new ExeFSFileHeader[10];
for (int i = 0; i < 10; i++)
{
var exeFsFileHeader = data.ReadType<ExeFSFileHeader>();
if (exeFsFileHeader == null)
return null;
obj.Reserved0 = data.ReadByteValue();
obj.Reserved1 = data.ReadByteValue();
obj.Reserved2 = data.ReadByteValue();
obj.CryptoMethod = (CryptoMethod)data.ReadByteValue();
obj.ContentPlatform = (ContentPlatform)data.ReadByteValue();
obj.MediaPlatformIndex = (ContentType)data.ReadByteValue();
obj.ContentUnitSize = data.ReadByteValue();
obj.BitMasks = (BitMasks)data.ReadByteValue();
exeFsFileHeader.FileName = exeFsFileHeader.FileName?.TrimEnd('\0');
exeFSHeader.FileHeaders[i] = exeFsFileHeader;
}
exeFSHeader.Reserved = data.ReadBytes(0x20);
exeFSHeader.FileHashes = new byte[10][];
for (int i = 0; i < 10; i++)
return obj;
}
/// <summary>
/// Parse a Stream into an NCSDHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCSDHeader on success, null on error</returns>
public static NCSDHeader ParseNCSDHeader(Stream data)
{
var obj = new NCSDHeader();
obj.RSA2048Signature = data.ReadBytes(0x100);
byte[] magicNumber = data.ReadBytes(4);
obj.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0');
obj.ImageSizeInMediaUnits = data.ReadUInt32LittleEndian();
obj.MediaId = data.ReadBytes(8);
obj.PartitionsFSType = (FilesystemType)data.ReadUInt64LittleEndian();
obj.PartitionsCryptType = data.ReadBytes(8);
obj.PartitionsTable = new PartitionTableEntry[8];
for (int i = 0; i < 8; i++)
{
exeFSHeader.FileHashes[i] = data.ReadBytes(0x20) ?? [];
obj.PartitionsTable[i] = ParsePartitionTableEntry(data);
}
return exeFSHeader;
if (obj.PartitionsFSType == FilesystemType.Normal || obj.PartitionsFSType == FilesystemType.None)
{
obj.ExheaderHash = data.ReadBytes(0x20);
obj.AdditionalHeaderSize = data.ReadUInt32LittleEndian();
obj.SectorZeroOffset = data.ReadUInt32LittleEndian();
obj.PartitionFlags = data.ReadBytes(8);
obj.PartitionIdTable = new ulong[8];
for (int i = 0; i < 8; i++)
{
obj.PartitionIdTable[i] = data.ReadUInt64LittleEndian();
}
obj.Reserved1 = data.ReadBytes(0x20);
obj.Reserved2 = data.ReadBytes(0x0E);
obj.FirmUpdateByte1 = data.ReadByteValue();
obj.FirmUpdateByte2 = data.ReadByteValue();
}
else if (obj.PartitionsFSType == FilesystemType.FIRM)
{
obj.Unknown = data.ReadBytes(0x5E);
obj.EncryptedMBR = data.ReadBytes(0x42);
}
return obj;
}
/// <summary>
/// Parse a Stream into an PartitionTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled PartitionTableEntry on success, null on error</returns>
public static PartitionTableEntry ParsePartitionTableEntry(Stream data)
{
var obj = new PartitionTableEntry();
obj.Offset = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into an RomFSHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled RomFSHeader on success, null on error</returns>
public static RomFSHeader ParseRomFSHeader(Stream data)
{
var obj = new RomFSHeader();
byte[] magicString = data.ReadBytes(4);
obj.MagicString = Encoding.ASCII.GetString(magicString);
obj.MagicNumber = data.ReadUInt32LittleEndian();
obj.MasterHashSize = data.ReadUInt32LittleEndian();
obj.Level1LogicalOffset = data.ReadUInt64LittleEndian();
obj.Level1HashdataSize = data.ReadUInt64LittleEndian();
obj.Level1BlockSizeLog2 = data.ReadUInt32LittleEndian();
obj.Reserved1 = data.ReadUInt32LittleEndian();
obj.Level2LogicalOffset = data.ReadUInt64LittleEndian();
obj.Level2HashdataSize = data.ReadUInt64LittleEndian();
obj.Level2BlockSizeLog2 = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadUInt32LittleEndian();
obj.Level3LogicalOffset = data.ReadUInt64LittleEndian();
obj.Level3HashdataSize = data.ReadUInt64LittleEndian();
obj.Level3BlockSizeLog2 = data.ReadUInt32LittleEndian();
obj.Reserved3 = data.ReadUInt32LittleEndian();
obj.Reserved4 = data.ReadUInt32LittleEndian();
obj.OptionalInfoSize = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into an StorageInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled StorageInfo on success, null on error</returns>
public static StorageInfo ParseStorageInfo(Stream data)
{
var obj = new StorageInfo();
obj.ExtdataID = data.ReadUInt64LittleEndian();
obj.SystemSavedataIDs = data.ReadBytes(8);
obj.StorageAccessibleUniqueIDs = data.ReadBytes(8);
obj.FileSystemAccessInfo = data.ReadBytes(7);
obj.OtherAttributes = (StorageInfoOtherAttributes)data.ReadByteValue();
return obj;
}
/// <summary>
/// Parse a Stream into an SystemControlInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SystemControlInfo on success, null on error</returns>
public static SystemControlInfo ParseSystemControlInfo(Stream data)
{
var obj = new SystemControlInfo();
byte[] applicationTitle = data.ReadBytes(8);
obj.ApplicationTitle = Encoding.ASCII.GetString(applicationTitle).TrimEnd('\0');
obj.Reserved1 = data.ReadBytes(5);
obj.Flag = data.ReadByteValue();
obj.RemasterVersion = data.ReadUInt16LittleEndian();
obj.TextCodeSetInfo = ParseCodeSetInfo(data);
obj.StackSize = data.ReadUInt32LittleEndian();
obj.ReadOnlyCodeSetInfo = ParseCodeSetInfo(data);
obj.Reserved2 = data.ReadUInt32LittleEndian();
obj.DataCodeSetInfo = ParseCodeSetInfo(data);
obj.BSSSize = data.ReadUInt32LittleEndian();
obj.DependencyModuleList = new ulong[48];
for (int i = 0; i < 48; i++)
{
obj.DependencyModuleList[i] = data.ReadUInt64LittleEndian();
}
obj.SystemInfo = ParseSystemInfo(data);
return obj;
}
/// <summary>
/// Parse a Stream into an SystemInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SystemInfo on success, null on error</returns>
public static SystemInfo ParseSystemInfo(Stream data)
{
var obj = new SystemInfo();
obj.SaveDataSize = data.ReadUInt64LittleEndian();
obj.JumpID = data.ReadUInt64LittleEndian();
obj.Reserved = data.ReadBytes(0x30);
return obj;
}
/// <summary>
/// Parse a Stream into an TestData
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled TestData on success, null on error</returns>
public static TestData ParseTestData(Stream data)
{
var obj = new TestData();
obj.Signature = data.ReadBytes(8);
obj.AscendingByteSequence = data.ReadBytes(0x1F8);
obj.DescendingByteSequence = data.ReadBytes(0x200);
obj.Filled00 = data.ReadBytes(0x200);
obj.FilledFF = data.ReadBytes(0x200);
obj.Filled0F = data.ReadBytes(0x200);
obj.FilledF0 = data.ReadBytes(0x200);
obj.Filled55 = data.ReadBytes(0x200);
obj.FilledAA = data.ReadBytes(0x1FF);
obj.FinalByte = data.ReadByteValue();
return obj;
}
}
}

View File

@@ -23,12 +23,12 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Dummy0 != 0x00000001)
var header = ParseHeader(data);
if (header.Dummy0 != 0x00000001)
return null;
if (header?.MajorVersion != 0x00000002)
if (header.MajorVersion != 0x00000002)
return null;
if (header?.MinorVersion != 1)
if (header.MinorVersion != 1)
return null;
// Set the no cache header
@@ -42,8 +42,8 @@ namespace SabreTools.Serialization.Deserializers
#region Directory Header
// Try to parse the directory header
var directoryHeader = data.ReadType<DirectoryHeader>();
if (directoryHeader?.Dummy0 != 0x00000004)
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader.Dummy0 != 0x00000004)
return null;
// Set the game cache directory header
@@ -59,11 +59,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
file.DirectoryEntries[i] = ParseDirectoryEntry(data);
}
#endregion
@@ -107,11 +103,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = data.ReadType<DirectoryInfo1Entry>();
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
file.DirectoryInfo1Entries[i] = ParseDirectoryInfo1Entry(data);
}
#endregion
@@ -124,11 +116,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = data.ReadType<DirectoryInfo2Entry>();
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
file.DirectoryInfo2Entries[i] = ParseDirectoryInfo2Entry(data);
}
#endregion
@@ -141,11 +129,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = data.ReadType<DirectoryCopyEntry>();
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
file.DirectoryCopyEntries[i] = ParseDirectoryCopyEntry(data);
}
#endregion
@@ -158,11 +142,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = data.ReadType<DirectoryLocalEntry>();
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
file.DirectoryLocalEntries[i] = ParseDirectoryLocalEntry(data);
}
#endregion
@@ -173,10 +153,10 @@ namespace SabreTools.Serialization.Deserializers
#region Unknown Header
// Try to parse the unknown header
var unknownHeader = data.ReadType<UnknownHeader>();
if (unknownHeader?.Dummy0 != 0x00000001)
var unknownHeader = ParseUnknownHeader(data);
if (unknownHeader.Dummy0 != 0x00000001)
return null;
if (unknownHeader?.Dummy1 != 0x00000000)
if (unknownHeader.Dummy1 != 0x00000000)
return null;
// Set the game cache unknown header
@@ -192,11 +172,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the unknown entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = data.ReadType<UnknownEntry>();
if (unknownEntry == null)
return null;
file.UnknownEntries[i] = unknownEntry;
file.UnknownEntries[i] = ParseUnknownEntry(data);
}
#endregion
@@ -204,8 +180,8 @@ namespace SabreTools.Serialization.Deserializers
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = data.ReadType<ChecksumHeader>();
if (checksumHeader?.Dummy0 != 0x00000001)
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader.Dummy0 != 0x00000001)
return null;
// Set the game cache checksum header
@@ -219,10 +195,10 @@ namespace SabreTools.Serialization.Deserializers
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
if (checksumMapHeader?.Dummy0 != 0x14893721)
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
if (checksumMapHeader?.Dummy1 != 0x00000001)
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
// Set the game cache checksum map header
@@ -238,11 +214,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = data.ReadType<ChecksumMapEntry>();
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
file.ChecksumMapEntries[i] = ParseChecksumMapEntry(data);
}
#endregion
@@ -255,11 +227,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = data.ReadType<ChecksumEntry>();
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
file.ChecksumEntries[i] = ParseChecksumEntry(data);
}
#endregion
@@ -275,5 +243,222 @@ namespace SabreTools.Serialization.Deserializers
return null;
}
}
/// <summary>
/// Parse a Stream into a ChecksumEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumEntry on success, null on error</returns>
public static ChecksumEntry ParseChecksumEntry(Stream data)
{
var obj = new ChecksumEntry();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumHeader on success, null on error</returns>
public static ChecksumHeader ParseChecksumHeader(Stream data)
{
var obj = new ChecksumHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.ChecksumSize = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumMapEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumMapEntry on success, null on error</returns>
public static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
var obj = new ChecksumMapEntry();
obj.ChecksumCount = data.ReadUInt32LittleEndian();
obj.FirstChecksumIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ChecksumMapHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ChecksumMapHeader on success, null on error</returns>
public static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
var obj = new ChecksumMapHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.ItemCount = data.ReadUInt32LittleEndian();
obj.ChecksumCount = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryCopyEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryCopyEntry on success, null on error</returns>
public static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
var obj = new DirectoryCopyEntry();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryEntry on success, null on error</returns>
public static DirectoryEntry ParseDirectoryEntry(Stream data)
{
var obj = new DirectoryEntry();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.ItemSize = data.ReadUInt32LittleEndian();
obj.ChecksumIndex = data.ReadUInt32LittleEndian();
obj.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32LittleEndian();
obj.ParentIndex = data.ReadUInt32LittleEndian();
obj.NextIndex = data.ReadUInt32LittleEndian();
obj.FirstIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryHeader on success, null on error</returns>
public static DirectoryHeader ParseDirectoryHeader(Stream data)
{
var obj = new DirectoryHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.CacheID = data.ReadUInt32LittleEndian();
obj.LastVersionPlayed = data.ReadUInt32LittleEndian();
obj.ItemCount = data.ReadUInt32LittleEndian();
obj.FileCount = data.ReadUInt32LittleEndian();
obj.ChecksumDataLength = data.ReadUInt32LittleEndian();
obj.DirectorySize = data.ReadUInt32LittleEndian();
obj.NameSize = data.ReadUInt32LittleEndian();
obj.Info1Count = data.ReadUInt32LittleEndian();
obj.CopyCount = data.ReadUInt32LittleEndian();
obj.LocalCount = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.Dummy2 = data.ReadUInt32LittleEndian();
obj.Checksum = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryInfo1Entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryInfo1Entry on success, null on error</returns>
public static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
var obj = new DirectoryInfo1Entry();
obj.Dummy0 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryInfo2Entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryInfo2Entry on success, null on error</returns>
public static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
var obj = new DirectoryInfo2Entry();
obj.Dummy0 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryLocalEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryLocalEntry on success, null on error</returns>
public static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
var obj = new DirectoryLocalEntry();
obj.DirectoryIndex = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.MajorVersion = data.ReadUInt32LittleEndian();
obj.MinorVersion = data.ReadUInt32LittleEndian();
obj.CacheID = data.ReadUInt32LittleEndian();
obj.LastVersionPlayed = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
obj.Dummy2 = data.ReadUInt32LittleEndian();
obj.FileSize = data.ReadUInt32LittleEndian();
obj.BlockSize = data.ReadUInt32LittleEndian();
obj.BlockCount = data.ReadUInt32LittleEndian();
obj.Dummy3 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a UnknownEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled UnknownEntry on success, null on error</returns>
public static UnknownEntry ParseUnknownEntry(Stream data)
{
var obj = new UnknownEntry();
obj.Dummy0 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a UnknownHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled UnknownHeader on success, null on error</returns>
public static UnknownHeader ParseUnknownHeader(Stream data)
{
var obj = new UnknownHeader();
obj.Dummy0 = data.ReadUInt32LittleEndian();
obj.Dummy1 = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.NewExecutable;
using static SabreTools.Models.NewExecutable.Constants;
@@ -39,8 +40,8 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var header = data.ReadType<ExecutableHeader>();
if (header?.Magic != SignatureString)
var header = ParseExecutableHeader(data);
if (header.Magic != SignatureString)
return null;
// Set the executable header
@@ -57,14 +58,15 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
// Seek to the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
var segmentTable = ParseSegmentTable(data, header.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
executable.SegmentTable = new SegmentTableEntry[header.FileSegmentCount];
for (int i = 0; i < header.FileSegmentCount; i++)
{
executable.SegmentTable[i] = ParseSegmentTableEntry(data);
}
#endregion
@@ -77,14 +79,11 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
// Seek to the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
var resourceTable = ParseResourceTable(data, header.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
executable.ResourceTable = ParseResourceTable(data, header.ResourceEntriesCount);
#endregion
@@ -100,14 +99,11 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
// Seek to the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var residentNameTable = ParseResidentNameTable(data, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
executable.ResidentNameTable = ParseResidentNameTable(data, endOffset);
#endregion
@@ -120,14 +116,15 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
// Seek to the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
var moduleReferenceTable = ParseModuleReferenceTable(data, header.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
executable.ModuleReferenceTable = new ModuleReferenceTableEntry[header.ModuleReferenceTableSize];
for (int i = 0; i < header.ModuleReferenceTableSize; i++)
{
executable.ModuleReferenceTable[i] = ParseModuleReferenceTableEntry(data);
}
#endregion
@@ -143,14 +140,11 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
// Seek to the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var importedNameTable = ParseImportedNameTable(data, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
executable.ImportedNameTable = ParseImportedNameTable(data, endOffset);
#endregion
@@ -167,14 +161,11 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
// Seek to the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var entryTable = ParseEntryTable(data, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
executable.EntryTable = ParseEntryTable(data, endOffset);
#endregion
@@ -189,14 +180,11 @@ namespace SabreTools.Serialization.Deserializers
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
// Seek to the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
executable.NonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
#endregion
@@ -210,68 +198,229 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a segment table
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
public static SegmentTableEntry[]? ParseSegmentTable(Stream data, int count)
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
public static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
var segmentTable = new SegmentTableEntry[count];
var entryTable = new List<EntryTableBundle>();
for (int i = 0; i < count; i++)
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = data.ReadType<SegmentTableEntry>();
if (entry == null)
return null;
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
segmentTable[i] = entry;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16LittleEndian();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16LittleEndian();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16LittleEndian();
break;
}
entryTable.Add(entry);
}
return segmentTable;
return [.. entryTable];
}
/// <summary>
/// Parse a Stream into a resource table
/// Parse a Stream into an ExecutableHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExecutableHeader on success, null on error</returns>
public static ExecutableHeader ParseExecutableHeader(Stream data)
{
var obj = new ExecutableHeader();
byte[] magic = data.ReadBytes(2);
obj.Magic = Encoding.ASCII.GetString(magic);
obj.LinkerVersion = data.ReadByteValue();
obj.LinkerRevision = data.ReadByteValue();
obj.EntryTableOffset = data.ReadUInt16LittleEndian();
obj.EntryTableSize = data.ReadUInt16LittleEndian();
obj.CrcChecksum = data.ReadUInt32LittleEndian();
obj.FlagWord = (HeaderFlag)data.ReadUInt16LittleEndian();
obj.AutomaticDataSegmentNumber = data.ReadUInt16LittleEndian();
obj.InitialHeapAlloc = data.ReadUInt16LittleEndian();
obj.InitialStackAlloc = data.ReadUInt16LittleEndian();
obj.InitialCSIPSetting = data.ReadUInt32LittleEndian();
obj.InitialSSSPSetting = data.ReadUInt32LittleEndian();
obj.FileSegmentCount = data.ReadUInt16LittleEndian();
obj.ModuleReferenceTableSize = data.ReadUInt16LittleEndian();
obj.NonResidentNameTableSize = data.ReadUInt16LittleEndian();
obj.SegmentTableOffset = data.ReadUInt16LittleEndian();
obj.ResourceTableOffset = data.ReadUInt16LittleEndian();
obj.ResidentNameTableOffset = data.ReadUInt16LittleEndian();
obj.ModuleReferenceTableOffset = data.ReadUInt16LittleEndian();
obj.ImportedNamesTableOffset = data.ReadUInt16LittleEndian();
obj.NonResidentNamesTableOffset = data.ReadUInt32LittleEndian();
obj.MovableEntriesCount = data.ReadUInt16LittleEndian();
obj.SegmentAlignmentShiftCount = data.ReadUInt16LittleEndian();
obj.ResourceEntriesCount = data.ReadUInt16LittleEndian();
obj.TargetOperatingSystem = (Models.NewExecutable.OperatingSystem)data.ReadByteValue();
obj.AdditionalFlags = (OS2Flag)data.ReadByteValue();
obj.ReturnThunkOffset = data.ReadUInt16LittleEndian();
obj.SegmentReferenceThunkOffset = data.ReadUInt16LittleEndian();
obj.MinCodeSwapAreaSize = data.ReadUInt16LittleEndian();
obj.WindowsSDKRevision = data.ReadByteValue();
obj.WindowsSDKVersion = data.ReadByteValue();
return obj;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
public static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
{
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset && data.Position < data.Length)
{
ushort currentOffset = (ushort)data.Position;
importedNameTable[currentOffset] = ParseImportedNameTableEntry(data);
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an ImportedNameTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ImportedNameTableEntry on success, null on error</returns>
public static ImportedNameTableEntry ParseImportedNameTableEntry(Stream data)
{
var obj = new ImportedNameTableEntry();
obj.Length = data.ReadByteValue();
obj.NameString = data.ReadBytes(obj.Length);
return obj;
}
/// <summary>
/// Parse a Stream into an ModuleReferenceTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ModuleReferenceTableEntry on success, null on error</returns>
public static ModuleReferenceTableEntry ParseModuleReferenceTableEntry(Stream data)
{
var obj = new ModuleReferenceTableEntry();
obj.Offset = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
public static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
{
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = ParseNonResidentNameTableEntry(data);
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a NonResidentNameTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NonResidentNameTableEntry on success, null on error</returns>
public static NonResidentNameTableEntry ParseNonResidentNameTableEntry(Stream data)
{
var obj = new NonResidentNameTableEntry();
obj.Length = data.ReadByteValue();
obj.NameString = data.ReadBytes(obj.Length);
obj.OrdinalNumber = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
public static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
{
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = ParseResidentNameTableEntry(data);
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a ResidentNameTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ResidentNameTableEntry on success, null on error</returns>
public static ResidentNameTableEntry ParseResidentNameTableEntry(Stream data)
{
var obj = new ResidentNameTableEntry();
obj.Length = data.ReadByteValue();
obj.NameString = data.ReadBytes(obj.Length);
obj.OrdinalNumber = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a ResourceTable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
public static ResourceTable? ParseResourceTable(Stream data, ushort count)
/// <returns>Filled ResourceTable on success, null on error</returns>
public static ResourceTable ParseResourceTable(Stream data, ushort count)
{
long initialOffset = data.Position;
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.AlignmentShiftCount = data.ReadUInt16LittleEndian();
var resourceTypes = new List<ResourceTypeInformationEntry>();
for (int i = 0; i < count; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
var entry = ParseResourceTypeInformationEntry(data);
resourceTypes.Add(entry);
// A zero type ID marks the end of the resource type information blocks.
if (entry.TypeID == 0)
{
resourceTypes.Add(entry);
break;
}
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = data.ReadType<ResourceTypeResourceEntry>();
if (resource == null)
return null;
entry.Resources[j] = resource;
}
resourceTypes.Add(entry);
}
resourceTable.ResourceTypes = [.. resourceTypes];
@@ -314,9 +463,6 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(stringOffset, SeekOrigin.Begin);
var str = ParseResourceTypeAndNameString(data);
if (str == null)
return null;
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
@@ -324,192 +470,80 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a resource type and name string
/// Parse a Stream into an ResourceTypeInformationEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource type and name string on success, null on error</returns>
public static ResourceTypeAndNameString? ParseResourceTypeAndNameString(Stream data)
/// <returns>Filled ResourceTypeInformationEntry on success, null on error</returns>
public static ResourceTypeInformationEntry ParseResourceTypeInformationEntry(Stream data)
{
var str = new ResourceTypeAndNameString();
var obj = new ResourceTypeInformationEntry();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
obj.TypeID = data.ReadUInt16LittleEndian();
obj.ResourceCount = data.ReadUInt16LittleEndian();
obj.Reserved = data.ReadUInt32LittleEndian();
return str;
}
// A zero type ID marks the end of the resource type information blocks.
if (obj.TypeID == 0)
return obj;
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
public static ResidentNameTableEntry[]? ParseResidentNameTable(Stream data, int endOffset)
{
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset && data.Position < data.Length)
obj.Resources = new ResourceTypeResourceEntry[obj.ResourceCount];
for (int i = 0; i < obj.ResourceCount; i++)
{
var entry = ParseResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
// TODO: Should we read and store the resource data?
obj.Resources[i] = ParseResourceTypeResourceEntry(data);
}
return [.. residentNameTable];
return obj;
}
/// <summary>
/// Parse a Stream into a resident-name table entry
/// Parse a Stream into a ResourceTypeAndNameString
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident-name table entry on success, null on error</returns>
public static ResidentNameTableEntry? ParseResidentNameTableEntry(Stream data)
/// <returns>Filled rResourceTypeAndNameString on success, null on error</returns>
public static ResourceTypeAndNameString ParseResourceTypeAndNameString(Stream data)
{
var entry = new ResidentNameTableEntry();
var obj = new ResourceTypeAndNameString();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
obj.Length = data.ReadByteValue();
obj.Text = data.ReadBytes(obj.Length);
return entry;
return obj;
}
/// <summary>
/// Parse a Stream into a module-reference table
/// Parse a Stream into an ResourceTypeResourceEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
public static ModuleReferenceTableEntry[]? ParseModuleReferenceTable(Stream data, int count)
/// <returns>Filled ResourceTypeResourceEntry on success, null on error</returns>
public static ResourceTypeResourceEntry ParseResourceTypeResourceEntry(Stream data)
{
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
var obj = new ResourceTypeResourceEntry();
for (int i = 0; i < count; i++)
{
var entry = data.ReadType<ModuleReferenceTableEntry>();
if (entry == null)
return null;
obj.Offset = data.ReadUInt16LittleEndian();
obj.Length = data.ReadUInt16LittleEndian();
obj.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16LittleEndian();
obj.ResourceID = data.ReadUInt16LittleEndian();
obj.Reserved = data.ReadUInt32LittleEndian();
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
return obj;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// Parse a Stream into an SegmentTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
public static Dictionary<ushort, ImportedNameTableEntry>? ParseImportedNameTable(Stream data, int endOffset)
/// <returns>Filled SegmentTableEntry on success, null on error</returns>
public static SegmentTableEntry ParseSegmentTableEntry(Stream data)
{
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
var obj = new SegmentTableEntry();
while (data.Position < endOffset && data.Position < data.Length)
{
ushort currentOffset = (ushort)data.Position;
var entry = ParseImportedNameTableEntry(data);
if (entry == null)
return null;
obj.Offset = data.ReadUInt16LittleEndian();
obj.Length = data.ReadUInt16LittleEndian();
obj.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16LittleEndian();
obj.MinimumAllocationSize = data.ReadUInt16LittleEndian();
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an imported-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled imported-name table entry on success, null on error</returns>
public static ImportedNameTableEntry? ParseImportedNameTableEntry(Stream data)
{
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
return entry;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
public static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16();
break;
}
entryTable.Add(entry);
}
return [.. entryTable];
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
public static NonResidentNameTableEntry[]? ParseNonResidentNameTable(Stream data, int endOffset)
{
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = ParseNonResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a nonresident-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled nonresident-name table entry on success, null on error</returns>
public static NonResidentNameTableEntry? ParseNonResidentNameTableEntry(Stream data)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
return entry;
return obj;
}
}
}

View File

@@ -22,27 +22,16 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<CommonHeader>();
if (header == null)
return null;
// Set the cart image header
cart.CommonHeader = header;
cart.CommonHeader = ParseCommonHeader(data);
#endregion
#region Extended DSi Header
// If we have a DSi-compatible cartridge
if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi)
{
var extendedDSiHeader = data.ReadType<ExtendedDSiHeader>();
if (extendedDSiHeader == null)
return null;
cart.ExtendedDSiHeader = extendedDSiHeader;
}
if (cart.CommonHeader.UnitCode == Unitcode.NDSPlusDSi || cart.CommonHeader.UnitCode == Unitcode.DSi)
cart.ExtendedDSiHeader = ParseExtendedDSiHeader(data);
#endregion
@@ -64,27 +53,22 @@ namespace SabreTools.Serialization.Deserializers
#region Name Table
// Try to get the name table offset
long nameTableOffset = header.FileNameTableOffset;
long nameTableOffset = cart.CommonHeader.FileNameTableOffset;
if (nameTableOffset < 0 || nameTableOffset > data.Length)
return null;
// Seek to the name table
data.Seek(nameTableOffset, SeekOrigin.Begin);
// Try to parse the name table
var nameTable = ParseNameTable(data);
if (nameTable == null)
return null;
// Set the name table
cart.NameTable = nameTable;
cart.NameTable = ParseNameTable(data);
#endregion
#region File Allocation Table
// Try to get the file allocation table offset
long fileAllocationTableOffset = header.FileAllocationTableOffset;
long fileAllocationTableOffset = cart.CommonHeader.FileAllocationTableOffset;
if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length)
return null;
@@ -95,12 +79,9 @@ namespace SabreTools.Serialization.Deserializers
var fileAllocationTable = new List<FileAllocationTableEntry>();
// Try to parse the file allocation table
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
while (data.Position - fileAllocationTableOffset < cart.CommonHeader.FileAllocationTableLength)
{
var entry = data.ReadType<FileAllocationTableEntry>();
if (entry == null)
return null;
var entry = ParseFileAllocationTableEntry(data);
fileAllocationTable.Add(entry);
}
@@ -121,12 +102,203 @@ namespace SabreTools.Serialization.Deserializers
}
}
/// <summary>
/// Parse a Stream into a CommonHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CommonHeader on success, null on error</returns>
public static CommonHeader ParseCommonHeader(Stream data)
{
var obj = new CommonHeader();
byte[] gameTitle = data.ReadBytes(12);
obj.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0');
obj.GameCode = data.ReadUInt32LittleEndian();
byte[] makerCode = data.ReadBytes(2);
obj.MakerCode = Encoding.ASCII.GetString(makerCode);
obj.UnitCode = (Unitcode)data.ReadByteValue();
obj.EncryptionSeedSelect = data.ReadByteValue();
obj.DeviceCapacity = data.ReadByteValue();
obj.Reserved1 = data.ReadBytes(7);
obj.GameRevision = data.ReadUInt16LittleEndian();
obj.RomVersion = data.ReadByteValue();
obj.InternalFlags = data.ReadByteValue();
obj.ARM9RomOffset = data.ReadUInt32LittleEndian();
obj.ARM9EntryAddress = data.ReadUInt32LittleEndian();
obj.ARM9LoadAddress = data.ReadUInt32LittleEndian();
obj.ARM9Size = data.ReadUInt32LittleEndian();
obj.ARM7RomOffset = data.ReadUInt32LittleEndian();
obj.ARM7EntryAddress = data.ReadUInt32LittleEndian();
obj.ARM7LoadAddress = data.ReadUInt32LittleEndian();
obj.ARM7Size = data.ReadUInt32LittleEndian();
obj.FileNameTableOffset = data.ReadUInt32LittleEndian();
obj.FileNameTableLength = data.ReadUInt32LittleEndian();
obj.FileAllocationTableOffset = data.ReadUInt32LittleEndian();
obj.FileAllocationTableLength = data.ReadUInt32LittleEndian();
obj.ARM9OverlayOffset = data.ReadUInt32LittleEndian();
obj.ARM9OverlayLength = data.ReadUInt32LittleEndian();
obj.ARM7OverlayOffset = data.ReadUInt32LittleEndian();
obj.ARM7OverlayLength = data.ReadUInt32LittleEndian();
obj.NormalCardControlRegisterSettings = data.ReadUInt32LittleEndian();
obj.SecureCardControlRegisterSettings = data.ReadUInt32LittleEndian();
obj.IconBannerOffset = data.ReadUInt32LittleEndian();
obj.SecureAreaCRC = data.ReadUInt16LittleEndian();
obj.SecureTransferTimeout = data.ReadUInt16LittleEndian();
obj.ARM9Autoload = data.ReadUInt32LittleEndian();
obj.ARM7Autoload = data.ReadUInt32LittleEndian();
obj.SecureDisable = data.ReadBytes(8);
obj.NTRRegionRomSize = data.ReadUInt32LittleEndian();
obj.HeaderSize = data.ReadUInt32LittleEndian();
obj.Reserved2 = data.ReadBytes(56);
obj.NintendoLogo = data.ReadBytes(156);
obj.NintendoLogoCRC = data.ReadUInt16LittleEndian();
obj.HeaderCRC = data.ReadUInt16LittleEndian();
obj.DebuggerReserved = data.ReadBytes(0x20);
return obj;
}
/// <summary>
/// Parse a Stream into a ExtendedDSiHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExtendedDSiHeader on success, null on error</returns>
public static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data)
{
var obj = new ExtendedDSiHeader();
obj.GlobalMBK15Settings = new uint[5];
for (int i = 0; i < 5; i++)
{
obj.GlobalMBK15Settings[i] = data.ReadUInt32LittleEndian();
}
obj.LocalMBK68SettingsARM9 = new uint[3];
for (int i = 0; i < 3; i++)
{
obj.LocalMBK68SettingsARM9[i] = data.ReadUInt32LittleEndian();
}
obj.LocalMBK68SettingsARM7 = new uint[3];
for (int i = 0; i < 3; i++)
{
obj.LocalMBK68SettingsARM7[i] = data.ReadUInt32LittleEndian();
}
obj.GlobalMBK9Setting = data.ReadUInt32LittleEndian();
obj.RegionFlags = data.ReadUInt32LittleEndian();
obj.AccessControl = data.ReadUInt32LittleEndian();
obj.ARM7SCFGEXTMask = data.ReadUInt32LittleEndian();
obj.ReservedFlags = data.ReadUInt32LittleEndian();
obj.ARM9iRomOffset = data.ReadUInt32LittleEndian();
obj.Reserved3 = data.ReadUInt32LittleEndian();
obj.ARM9iLoadAddress = data.ReadUInt32LittleEndian();
obj.ARM9iSize = data.ReadUInt32LittleEndian();
obj.ARM7iRomOffset = data.ReadUInt32LittleEndian();
obj.Reserved4 = data.ReadUInt32LittleEndian();
obj.ARM7iLoadAddress = data.ReadUInt32LittleEndian();
obj.ARM7iSize = data.ReadUInt32LittleEndian();
obj.DigestNTRRegionOffset = data.ReadUInt32LittleEndian();
obj.DigestNTRRegionLength = data.ReadUInt32LittleEndian();
obj.DigestTWLRegionOffset = data.ReadUInt32LittleEndian();
obj.DigestTWLRegionLength = data.ReadUInt32LittleEndian();
obj.DigestSectorHashtableRegionOffset = data.ReadUInt32LittleEndian();
obj.DigestSectorHashtableRegionLength = data.ReadUInt32LittleEndian();
obj.DigestBlockHashtableRegionOffset = data.ReadUInt32LittleEndian();
obj.DigestBlockHashtableRegionLength = data.ReadUInt32LittleEndian();
obj.DigestSectorSize = data.ReadUInt32LittleEndian();
obj.DigestBlockSectorCount = data.ReadUInt32LittleEndian();
obj.IconBannerSize = data.ReadUInt32LittleEndian();
obj.Unknown1 = data.ReadUInt32LittleEndian();
obj.NTRTWLRegionRomSize = data.ReadUInt32LittleEndian();
obj.Unknown2 = data.ReadBytes(12);
obj.ModcryptArea1Offset = data.ReadUInt32LittleEndian();
obj.ModcryptArea1Size = data.ReadUInt32LittleEndian();
obj.ModcryptArea2Offset = data.ReadUInt32LittleEndian();
obj.ModcryptArea2Size = data.ReadUInt32LittleEndian();
obj.TitleID = data.ReadBytes(8);
obj.DSiWarePublicSavSize = data.ReadUInt32LittleEndian();
obj.DSiWarePrivateSavSize = data.ReadUInt32LittleEndian();
obj.ReservedZero = data.ReadBytes(176);
obj.Unknown3 = data.ReadBytes(16);
obj.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20);
obj.ARM7SHA1HMACHash = data.ReadBytes(20);
obj.DigestMasterSHA1HMACHash = data.ReadBytes(20);
obj.BannerSHA1HMACHash = data.ReadBytes(20);
obj.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20);
obj.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20);
obj.Reserved5 = data.ReadBytes(40);
obj.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20);
obj.Reserved6 = data.ReadBytes(2636);
obj.ReservedAndUnchecked = data.ReadBytes(0x180);
obj.RSASignature = data.ReadBytes(0x80);
return obj;
}
/// <summary>
/// Parse a Stream into a FileAllocationTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FileAllocationTableEntry on success, null on error</returns>
public static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data)
{
var obj = new FileAllocationTableEntry();
obj.StartOffset = data.ReadUInt32LittleEndian();
obj.EndOffset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a FolderAllocationTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FolderAllocationTableEntry on success, null on error</returns>
public static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data)
{
var obj = new FolderAllocationTableEntry();
obj.StartOffset = data.ReadUInt32LittleEndian();
obj.FirstFileIndex = data.ReadUInt16LittleEndian();
obj.ParentFolderIndex = data.ReadByteValue();
obj.Unknown = data.ReadByteValue();
return obj;
}
/// <summary>
/// Parse a Stream into a NameListEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NameListEntry on success, null on error</returns>
public static NameListEntry? ParseNameListEntry(Stream data)
{
var entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
return null;
entry.Folder = (flagAndSize & 0x80) != 0;
byte size = (byte)(flagAndSize & ~0x80);
if (size > 0)
{
byte[] name = data.ReadBytes(size);
entry.Name = Encoding.UTF8.GetString(name);
}
if (entry.Folder)
entry.Index = data.ReadUInt16LittleEndian();
return entry;
}
/// <summary>
/// Parse a Stream into a name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name table on success, null on error</returns>
private static NameTable? ParseNameTable(Stream data)
public static NameTable ParseNameTable(Stream data)
{
var nameTable = new NameTable();
@@ -135,10 +307,7 @@ namespace SabreTools.Serialization.Deserializers
int entryCount = int.MaxValue;
while (entryCount > 0)
{
var entry = data.ReadType<FolderAllocationTableEntry>();
if (entry == null)
return null;
var entry = ParseFolderAllocationTableEntry(data);
folderAllocationTable.Add(entry);
// If we have the root entry
@@ -168,33 +337,5 @@ namespace SabreTools.Serialization.Deserializers
return nameTable;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static NameListEntry? ParseNameListEntry(Stream data)
{
var entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
return null;
entry.Folder = (flagAndSize & 0x80) != 0;
byte size = (byte)(flagAndSize & ~0x80);
if (size > 0)
{
byte[] name = data.ReadBytes(size);
entry.Name = Encoding.UTF8.GetString(name);
}
if (entry.Folder)
entry.Index = data.ReadUInt16();
return entry;
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PAK;
using static SabreTools.Models.PAK.Constants;
@@ -22,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Signature != SignatureString)
var header = ParseHeader(data);
if (header.Signature != SignatureString)
return null;
// Set the package header
@@ -47,11 +48,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory items
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = data.ReadType<DirectoryItem>();
if (directoryItem == null)
return null;
file.DirectoryItems[i] = directoryItem;
file.DirectoryItems[i] = ParseDirectoryItem(data);
}
#endregion
@@ -64,5 +61,39 @@ namespace SabreTools.Serialization.Deserializers
return null;
}
}
/// <summary>
/// Parse a Stream into a DirectoryItem
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryItem on success, null on error</returns>
public static DirectoryItem ParseDirectoryItem(Stream data)
{
var obj = new DirectoryItem();
byte[] itemName = data.ReadBytes(56);
obj.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
obj.ItemOffset = data.ReadUInt32LittleEndian();
obj.ItemLength = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.DirectoryOffset = data.ReadUInt32LittleEndian();
obj.DirectoryLength = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -24,8 +24,33 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
if (header.Signature == Version0SignatureString)
{
if (header.FileSegmentSize != Version0HSegmentSize)
return null;
}
else if (header.Signature == Version2SignatureString)
{
if (header.FileSegmentSize != Version2SegmentSize)
return null;
}
else if (header.Signature == Version3SignatureString)
{
if (header.FileSegmentSize != Version2SegmentSize
&& header.FileSegmentSize != Version3SegmentSize)
{
return null;
}
}
else if (header.Signature == Version4SignatureString)
{
if (header.FileSegmentSize != Version4SegmentSize)
return null;
}
else
{
return null;
}
// Set the archive header
archive.Header = header;
@@ -48,11 +73,7 @@ namespace SabreTools.Serialization.Deserializers
// Read all segments in turn
for (int i = 0; i < header.NumberOfFiles; i++)
{
var file = ParseSegment(data, header.FileSegmentSize);
if (file == null)
continue;
archive.Segments[i] = file;
archive.Segments[i] = ParseSegment(data, header.FileSegmentSize);
}
#endregion
@@ -67,13 +88,8 @@ namespace SabreTools.Serialization.Deserializers
// Seek to the footer
data.Seek(offset, SeekOrigin.Begin);
// Try to parse the footer
var footer = data.ReadType<Footer>();
if (footer == null)
return null;
// Set the archive footer
archive.Footer = footer;
archive.Footer = ParseFooter(data);
#endregion
@@ -87,54 +103,63 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a header
/// Parse a Stream into a Footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
/// <returns>Filled Footer on success, null on error</returns>
public static Footer ParseFooter(Stream data)
{
var header = data.ReadType<Header>();
return header?.Signature switch
{
Version0SignatureString when header.FileSegmentSize != Version0HSegmentSize => null,
Version0SignatureString => header,
var obj = new Footer();
Version2SignatureString when header.FileSegmentSize != Version2SegmentSize => null,
Version2SignatureString => header,
obj.SystemIP = data.ReadUInt32LittleEndian();
obj.Reserved = data.ReadUInt32LittleEndian();
byte[] kingTag = data.ReadBytes(4);
obj.KingTag = Encoding.ASCII.GetString(kingTag);
Version3SignatureString when header.FileSegmentSize != Version2SegmentSize
&& header.FileSegmentSize != Version3SegmentSize => null,
Version3SignatureString => header,
Version4SignatureString when header.FileSegmentSize != Version4SegmentSize => null,
Version4SignatureString => header,
_ => null,
};
return obj;
}
/// <summary>
/// Parse a Stream into a file entry
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.HeaderSize = data.ReadUInt32LittleEndian();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.NumberOfFiles = data.ReadUInt32LittleEndian();
obj.FileSegmentSize = data.ReadUInt32LittleEndian();
obj.FileListOffset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Segment
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="segmentSize">PFF segment size</param>
/// <returns>Filled file entry on success, null on error</returns>
private static Segment ParseSegment(Stream data, uint segmentSize)
/// <returns>Filled Segment on success, null on error</returns>
public static Segment ParseSegment(Stream data, uint segmentSize)
{
var segment = new Segment();
var obj = new Segment();
segment.Deleted = data.ReadUInt32();
segment.FileLocation = data.ReadUInt32();
segment.FileSize = data.ReadUInt32();
segment.PackedDate = data.ReadUInt32();
obj.Deleted = data.ReadUInt32LittleEndian();
obj.FileLocation = data.ReadUInt32LittleEndian();
obj.FileSize = data.ReadUInt32LittleEndian();
obj.PackedDate = data.ReadUInt32LittleEndian();
byte[] fileName = data.ReadBytes(0x10);
segment.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
obj.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
if (segmentSize > Version2SegmentSize)
segment.ModifiedDate = data.ReadUInt32();
obj.ModifiedDate = data.ReadUInt32LittleEndian();
if (segmentSize > Version3SegmentSize)
segment.CompressionLevel = data.ReadUInt32();
obj.CompressionLevel = data.ReadUInt32LittleEndian();
return segment;
return obj;
}
}
}

View File

@@ -9,8 +9,6 @@ namespace SabreTools.Serialization.Deserializers
{
public class PIC : BaseBinaryDeserializer<DiscInformation>
{
#region IStreamDeserializer
/// <inheritdoc/>
public override DiscInformation? Deserialize(Stream? data)
{
@@ -37,9 +35,6 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < 32; i++)
{
var unit = ParseDiscInformationUnit(data);
if (unit == null)
continue;
diUnits.Add(unit);
}
@@ -55,54 +50,39 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a disc information unit
/// Parse a Stream into a DiscInformationUnit
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled disc information unit on success, null on error</returns>
private static DiscInformationUnit? ParseDiscInformationUnit(Stream data)
/// <returns>Filled DiscInformationUnit on success, null on error</returns>
public static DiscInformationUnit ParseDiscInformationUnit(Stream data)
{
var unit = new DiscInformationUnit();
var obj = new DiscInformationUnit();
#region Header
// We only accept Disc Information units, not Emergency Brake or other
var header = data.ReadType<DiscInformationUnitHeader>();
if (header?.DiscInformationIdentifier != "DI")
return null;
// Set the information unit header
unit.Header = header;
obj.Header = ParseDiscInformationUnitHeader(data);
if (obj.Header.DiscInformationIdentifier != "DI")
return obj;
#endregion
#region Body
// Try to parse the body
var body = ParseDiscInformationUnitBody(data);
if (body == null)
return null;
// Set the information unit body
unit.Body = body;
obj.Body = ParseDiscInformationUnitBody(data);
#endregion
#region Trailer
if (unit.Body.DiscTypeIdentifier == DiscTypeIdentifierReWritable || unit.Body.DiscTypeIdentifier == DiscTypeIdentifierRecordable)
{
// Try to parse the trailer
var trailer = data.ReadType<DiscInformationUnitTrailer>();
if (trailer == null)
return null;
// Set the information unit trailer
unit.Trailer = trailer;
}
// Set the information unit trailer
if (obj.Body.DiscTypeIdentifier == DiscTypeIdentifierReWritable || obj.Body.DiscTypeIdentifier == DiscTypeIdentifierRecordable)
obj.Trailer = ParseDiscInformationUnitTrailer(data);
#endregion
return unit;
return obj;
}
/// <summary>
@@ -110,29 +90,68 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled disc information unit body on success, null on error</returns>
private static DiscInformationUnitBody? ParseDiscInformationUnitBody(Stream data)
private static DiscInformationUnitBody ParseDiscInformationUnitBody(Stream data)
{
var body = new DiscInformationUnitBody();
var obj = new DiscInformationUnitBody();
byte[] dti = data.ReadBytes(3);
body.DiscTypeIdentifier = Encoding.ASCII.GetString(dti);
body.DiscSizeClassVersion = data.ReadByteValue();
switch (body.DiscTypeIdentifier)
obj.DiscTypeIdentifier = Encoding.ASCII.GetString(dti);
obj.DiscSizeClassVersion = data.ReadByteValue();
switch (obj.DiscTypeIdentifier)
{
case DiscTypeIdentifierROM:
case DiscTypeIdentifierROMUltra:
case DiscTypeIdentifierXGD4:
body.FormatDependentContents = data.ReadBytes(52);
obj.FormatDependentContents = data.ReadBytes(52);
break;
case DiscTypeIdentifierReWritable:
case DiscTypeIdentifierRecordable:
body.FormatDependentContents = data.ReadBytes(100);
obj.FormatDependentContents = data.ReadBytes(100);
break;
}
return body;
return obj;
}
#endregion
/// <summary>
/// Parse a Stream into a DiscInformationUnitHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>DiscInformationUnitHeader on success, null on error</returns>
public static DiscInformationUnitHeader ParseDiscInformationUnitHeader(Stream data)
{
var obj = new DiscInformationUnitHeader();
byte[] discInformationIdentifier = data.ReadBytes(2);
obj.DiscInformationIdentifier = Encoding.ASCII.GetString(discInformationIdentifier);
if (obj.DiscInformationIdentifier != "DI")
return obj;
obj.DiscInformationFormat = data.ReadByteValue();
obj.NumberOfUnitsInBlock = data.ReadByteValue();
obj.Reserved0 = data.ReadByteValue();
obj.SequenceNumber = data.ReadByteValue();
obj.BytesInUse = data.ReadByteValue();
obj.Reserved1 = data.ReadByteValue();
return obj;
}
/// <summary>
/// Parse a Stream into a DiscInformationUnitTrailer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>DiscInformationUnitTrailer on success, null on error</returns>
public static DiscInformationUnitTrailer ParseDiscInformationUnitTrailer(Stream data)
{
var obj = new DiscInformationUnitTrailer();
obj.DiscManufacturerID = data.ReadBytes(6);
obj.MediaTypeID = data.ReadBytes(3);
obj.TimeStamp = data.ReadUInt16LittleEndian();
obj.ProductRevisionNumber = data.ReadByteValue();
return obj;
}
}
}

View File

@@ -6,6 +6,7 @@ using SabreTools.IO.Extensions;
using SabreTools.Models.PKZIP;
using static SabreTools.Models.PKZIP.Constants;
// TODO: Finish replacing ReadType
namespace SabreTools.Serialization.Deserializers
{
public class PKZIP : BaseBinaryDeserializer<Archive>
@@ -320,7 +321,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
uint possibleSignature = data.ReadUInt32LittleEndian();
if (possibleSignature == EndOfCentralDirectoryRecordSignature)
{
long signaturePosition = data.Position - 4;
@@ -360,17 +361,17 @@ namespace SabreTools.Serialization.Deserializers
{
var record = new EndOfCentralDirectoryRecord();
record.Signature = data.ReadUInt32();
record.Signature = data.ReadUInt32LittleEndian();
if (record.Signature != EndOfCentralDirectoryRecordSignature)
return null;
record.DiskNumber = data.ReadUInt16();
record.StartDiskNumber = data.ReadUInt16();
record.TotalEntriesOnDisk = data.ReadUInt16();
record.TotalEntries = data.ReadUInt16();
record.CentralDirectorySize = data.ReadUInt32();
record.CentralDirectoryOffset = data.ReadUInt32();
record.FileCommentLength = data.ReadUInt16();
record.DiskNumber = data.ReadUInt16LittleEndian();
record.StartDiskNumber = data.ReadUInt16LittleEndian();
record.TotalEntriesOnDisk = data.ReadUInt16LittleEndian();
record.TotalEntries = data.ReadUInt16LittleEndian();
record.CentralDirectorySize = data.ReadUInt32LittleEndian();
record.CentralDirectoryOffset = data.ReadUInt32LittleEndian();
record.FileCommentLength = data.ReadUInt16LittleEndian();
if (record.FileCommentLength > 0 && data.Position + record.FileCommentLength <= data.Length)
{
byte[] commentBytes = data.ReadBytes(record.FileCommentLength);
@@ -401,7 +402,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
uint possibleSignature = data.ReadUInt32LittleEndian();
if (possibleSignature == EndOfCentralDirectoryLocator64Signature)
{
long signaturePosition = data.Position - 4;
@@ -441,20 +442,20 @@ namespace SabreTools.Serialization.Deserializers
{
var record = new EndOfCentralDirectoryRecord64();
record.Signature = data.ReadUInt32();
record.Signature = data.ReadUInt32LittleEndian();
if (record.Signature != EndOfCentralDirectoryRecord64Signature)
return null;
record.DirectoryRecordSize = data.ReadUInt64();
record.DirectoryRecordSize = data.ReadUInt64LittleEndian();
record.HostSystem = (HostSystem)data.ReadByteValue();
record.VersionMadeBy = data.ReadByteValue();
record.VersionNeededToExtract = data.ReadUInt16();
record.DiskNumber = data.ReadUInt32();
record.StartDiskNumber = data.ReadUInt32();
record.TotalEntriesOnDisk = data.ReadUInt64();
record.TotalEntries = data.ReadUInt64();
record.CentralDirectorySize = data.ReadUInt64();
record.CentralDirectoryOffset = data.ReadUInt64();
record.VersionNeededToExtract = data.ReadUInt16LittleEndian();
record.DiskNumber = data.ReadUInt32LittleEndian();
record.StartDiskNumber = data.ReadUInt32LittleEndian();
record.TotalEntriesOnDisk = data.ReadUInt64LittleEndian();
record.TotalEntries = data.ReadUInt64LittleEndian();
record.CentralDirectorySize = data.ReadUInt64LittleEndian();
record.CentralDirectoryOffset = data.ReadUInt64LittleEndian();
// TODO: Handle the ExtensibleDataSector -- How to detect if exists?
@@ -470,27 +471,27 @@ namespace SabreTools.Serialization.Deserializers
{
var header = new CentralDirectoryFileHeader();
header.Signature = data.ReadUInt32();
header.Signature = data.ReadUInt32LittleEndian();
if (header.Signature != CentralDirectoryFileHeaderSignature)
return null;
header.HostSystem = (HostSystem)data.ReadByteValue();
header.VersionMadeBy = data.ReadByteValue();
header.VersionNeededToExtract = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
header.FileCommentLength = data.ReadUInt16();
header.DiskNumberStart = data.ReadUInt16();
header.InternalFileAttributes = (InternalFileAttributes)data.ReadUInt16();
header.ExternalFileAttributes = data.ReadUInt32();
header.RelativeOffsetOfLocalHeader = data.ReadUInt32();
header.VersionNeededToExtract = data.ReadUInt16LittleEndian();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16LittleEndian();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16LittleEndian();
header.LastModifedFileTime = data.ReadUInt16LittleEndian();
header.LastModifiedFileDate = data.ReadUInt16LittleEndian();
header.CRC32 = data.ReadUInt32LittleEndian();
header.CompressedSize = data.ReadUInt32LittleEndian();
header.UncompressedSize = data.ReadUInt32LittleEndian();
header.FileNameLength = data.ReadUInt16LittleEndian();
header.ExtraFieldLength = data.ReadUInt16LittleEndian();
header.FileCommentLength = data.ReadUInt16LittleEndian();
header.DiskNumberStart = data.ReadUInt16LittleEndian();
header.InternalFileAttributes = (InternalFileAttributes)data.ReadUInt16LittleEndian();
header.ExternalFileAttributes = data.ReadUInt32LittleEndian();
header.RelativeOffsetOfLocalHeader = data.ReadUInt32LittleEndian();
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
{
@@ -538,7 +539,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
uint possibleSignature = data.ReadUInt32LittleEndian();
if (possibleSignature == ArchiveExtraDataRecordSignature)
{
long signaturePosition = data.Position - 4;
@@ -578,11 +579,11 @@ namespace SabreTools.Serialization.Deserializers
{
var record = new ArchiveExtraDataRecord();
record.Signature = data.ReadUInt32();
record.Signature = data.ReadUInt32LittleEndian();
if (record.Signature != ArchiveExtraDataRecordSignature)
return null;
record.ExtraFieldLength = data.ReadUInt32();
record.ExtraFieldLength = data.ReadUInt32LittleEndian();
if (record.ExtraFieldLength > 0 && data.Position + record.ExtraFieldLength <= data.Length)
{
byte[] extraBytes = data.ReadBytes((int)record.ExtraFieldLength);
@@ -604,20 +605,20 @@ namespace SabreTools.Serialization.Deserializers
{
var header = new LocalFileHeader();
header.Signature = data.ReadUInt32();
header.Signature = data.ReadUInt32LittleEndian();
if (header.Signature != LocalFileHeaderSignature)
return null;
header.Version = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
header.Version = data.ReadUInt16LittleEndian();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16LittleEndian();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16LittleEndian();
header.LastModifedFileTime = data.ReadUInt16LittleEndian();
header.LastModifiedFileDate = data.ReadUInt16LittleEndian();
header.CRC32 = data.ReadUInt32LittleEndian();
header.CompressedSize = data.ReadUInt32LittleEndian();
header.UncompressedSize = data.ReadUInt32LittleEndian();
header.FileNameLength = data.ReadUInt16LittleEndian();
header.ExtraFieldLength = data.ReadUInt16LittleEndian();
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
{
@@ -649,13 +650,13 @@ namespace SabreTools.Serialization.Deserializers
var dataDescriptor = new DataDescriptor();
// Signatures are expected but not required
dataDescriptor.Signature = data.ReadUInt32();
dataDescriptor.Signature = data.ReadUInt32LittleEndian();
if (dataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
dataDescriptor.CRC32 = data.ReadUInt32();
dataDescriptor.CompressedSize = data.ReadUInt32();
dataDescriptor.UncompressedSize = data.ReadUInt32();
dataDescriptor.CRC32 = data.ReadUInt32LittleEndian();
dataDescriptor.CompressedSize = data.ReadUInt32LittleEndian();
dataDescriptor.UncompressedSize = data.ReadUInt32LittleEndian();
return dataDescriptor;
}
@@ -670,13 +671,13 @@ namespace SabreTools.Serialization.Deserializers
var zip64DataDescriptor = new DataDescriptor64();
// Signatures are expected but not required
zip64DataDescriptor.Signature = data.ReadUInt32();
zip64DataDescriptor.Signature = data.ReadUInt32LittleEndian();
if (zip64DataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
zip64DataDescriptor.CRC32 = data.ReadUInt32();
zip64DataDescriptor.CompressedSize = data.ReadUInt64();
zip64DataDescriptor.UncompressedSize = data.ReadUInt64();
zip64DataDescriptor.CRC32 = data.ReadUInt32LittleEndian();
zip64DataDescriptor.CompressedSize = data.ReadUInt64LittleEndian();
zip64DataDescriptor.UncompressedSize = data.ReadUInt64LittleEndian();
return zip64DataDescriptor;
}

View File

@@ -95,7 +95,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the unknown value 2
audioFile.UnknownValue2 = data.ReadUInt32();
audioFile.UnknownValue2 = data.ReadUInt32LittleEndian();
#endregion
@@ -137,7 +137,7 @@ namespace SabreTools.Serialization.Deserializers
#region Data Files Count
// Set the data files count
audioFile.DataFilesCount = data.ReadUInt32();
audioFile.DataFilesCount = data.ReadUInt32LittleEndian();
#endregion
@@ -183,11 +183,11 @@ namespace SabreTools.Serialization.Deserializers
AudioHeader audioHeader;
// Get the common header pieces
uint signature = data.ReadUInt32();
uint signature = data.ReadUInt32LittleEndian();
if (signature != SignatureUInt32)
return null;
uint version = data.ReadUInt32();
uint version = data.ReadUInt32LittleEndian();
// Build the header according to version
uint unknownOffset1;
@@ -199,16 +199,16 @@ namespace SabreTools.Serialization.Deserializers
v1.Signature = signature;
v1.Version = version;
v1.TrackID = data.ReadUInt32();
v1.UnknownOffset1 = data.ReadUInt32();
v1.UnknownOffset2 = data.ReadUInt32();
v1.UnknownOffset3 = data.ReadUInt32();
v1.Unknown1 = data.ReadUInt32();
v1.Unknown2 = data.ReadUInt32();
v1.Year = data.ReadUInt32();
v1.TrackID = data.ReadUInt32LittleEndian();
v1.UnknownOffset1 = data.ReadUInt32LittleEndian();
v1.UnknownOffset2 = data.ReadUInt32LittleEndian();
v1.UnknownOffset3 = data.ReadUInt32LittleEndian();
v1.Unknown1 = data.ReadUInt32LittleEndian();
v1.Unknown2 = data.ReadUInt32LittleEndian();
v1.Year = data.ReadUInt32LittleEndian();
v1.TrackNumber = data.ReadByteValue();
v1.Subgenre = (Subgenre)data.ReadByteValue();
v1.Duration = data.ReadUInt32();
v1.Duration = data.ReadUInt32LittleEndian();
audioHeader = v1;
unknownOffset1 = v1.UnknownOffset1;
@@ -220,29 +220,29 @@ namespace SabreTools.Serialization.Deserializers
v2.Signature = signature;
v2.Version = version;
v2.Unknown1 = data.ReadUInt32();
v2.Unknown2 = data.ReadUInt32();
v2.Unknown3 = data.ReadUInt32();
v2.Unknown4 = data.ReadUInt32();
v2.Unknown5 = data.ReadUInt32();
v2.Unknown6 = data.ReadUInt32();
v2.UnknownOffset1 = data.ReadUInt32();
v2.Unknown7 = data.ReadUInt32();
v2.Unknown8 = data.ReadUInt32();
v2.Unknown9 = data.ReadUInt32();
v2.UnknownOffset2 = data.ReadUInt32();
v2.Unknown10 = data.ReadUInt32();
v2.Unknown11 = data.ReadUInt32();
v2.Unknown12 = data.ReadUInt32();
v2.Unknown13 = data.ReadUInt32();
v2.Unknown14 = data.ReadUInt32();
v2.Unknown15 = data.ReadUInt32();
v2.Unknown16 = data.ReadUInt32();
v2.Unknown17 = data.ReadUInt32();
v2.TrackID = data.ReadUInt32();
v2.Year = data.ReadUInt32();
v2.TrackNumber = data.ReadUInt32();
v2.Unknown18 = data.ReadUInt32();
v2.Unknown1 = data.ReadUInt32LittleEndian();
v2.Unknown2 = data.ReadUInt32LittleEndian();
v2.Unknown3 = data.ReadUInt32LittleEndian();
v2.Unknown4 = data.ReadUInt32LittleEndian();
v2.Unknown5 = data.ReadUInt32LittleEndian();
v2.Unknown6 = data.ReadUInt32LittleEndian();
v2.UnknownOffset1 = data.ReadUInt32LittleEndian();
v2.Unknown7 = data.ReadUInt32LittleEndian();
v2.Unknown8 = data.ReadUInt32LittleEndian();
v2.Unknown9 = data.ReadUInt32LittleEndian();
v2.UnknownOffset2 = data.ReadUInt32LittleEndian();
v2.Unknown10 = data.ReadUInt32LittleEndian();
v2.Unknown11 = data.ReadUInt32LittleEndian();
v2.Unknown12 = data.ReadUInt32LittleEndian();
v2.Unknown13 = data.ReadUInt32LittleEndian();
v2.Unknown14 = data.ReadUInt32LittleEndian();
v2.Unknown15 = data.ReadUInt32LittleEndian();
v2.Unknown16 = data.ReadUInt32LittleEndian();
v2.Unknown17 = data.ReadUInt32LittleEndian();
v2.TrackID = data.ReadUInt32LittleEndian();
v2.Year = data.ReadUInt32LittleEndian();
v2.TrackNumber = data.ReadUInt32LittleEndian();
v2.Unknown18 = data.ReadUInt32LittleEndian();
audioHeader = v2;
unknownOffset1 = v2.UnknownOffset1 + 0x54;
@@ -253,33 +253,33 @@ namespace SabreTools.Serialization.Deserializers
return null;
}
audioHeader.TrackLength = data.ReadUInt16();
audioHeader.TrackLength = data.ReadUInt16LittleEndian();
byte[] track = data.ReadBytes(audioHeader.TrackLength);
audioHeader.Track = Encoding.ASCII.GetString(track);
audioHeader.ArtistLength = data.ReadUInt16();
audioHeader.ArtistLength = data.ReadUInt16LittleEndian();
byte[] artist = data.ReadBytes(audioHeader.ArtistLength);
audioHeader.Artist = Encoding.ASCII.GetString(artist);
audioHeader.AlbumLength = data.ReadUInt16();
audioHeader.AlbumLength = data.ReadUInt16LittleEndian();
byte[] album = data.ReadBytes(audioHeader.AlbumLength);
audioHeader.Album = Encoding.ASCII.GetString(album);
audioHeader.WriterLength = data.ReadUInt16();
audioHeader.WriterLength = data.ReadUInt16LittleEndian();
byte[] writer = data.ReadBytes(audioHeader.WriterLength);
audioHeader.Writer = Encoding.ASCII.GetString(writer);
audioHeader.PublisherLength = data.ReadUInt16();
audioHeader.PublisherLength = data.ReadUInt16LittleEndian();
byte[] publisher = data.ReadBytes(audioHeader.PublisherLength);
audioHeader.Publisher = Encoding.ASCII.GetString(publisher);
audioHeader.LabelLength = data.ReadUInt16();
audioHeader.LabelLength = data.ReadUInt16LittleEndian();
byte[] label = data.ReadBytes(audioHeader.LabelLength);
audioHeader.Label = Encoding.ASCII.GetString(label);
if (data.Position - initialOffset < unknownOffset1)
{
audioHeader.CommentsLength = data.ReadUInt16();
audioHeader.CommentsLength = data.ReadUInt16LittleEndian();
byte[] comments = data.ReadBytes(audioHeader.CommentsLength);
audioHeader.Comments = Encoding.ASCII.GetString(comments);
}
@@ -296,7 +296,7 @@ namespace SabreTools.Serialization.Deserializers
{
var unknownBlock1 = new UnknownBlock1();
unknownBlock1.Length = data.ReadUInt32();
unknownBlock1.Length = data.ReadUInt32LittleEndian();
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
return unknownBlock1;
@@ -325,10 +325,10 @@ namespace SabreTools.Serialization.Deserializers
{
var dataFile = new DataFile();
dataFile.FileNameLength = data.ReadUInt16();
dataFile.FileNameLength = data.ReadUInt16LittleEndian();
byte[] fileName = data.ReadBytes(dataFile.FileNameLength);
dataFile.FileName = Encoding.ASCII.GetString(fileName);
dataFile.DataLength = data.ReadUInt32();
dataFile.DataLength = data.ReadUInt32LittleEndian();
dataFile.Data = data.ReadBytes((int)dataFile.DataLength);
return dataFile;

View File

@@ -66,7 +66,7 @@ namespace SabreTools.Serialization.Deserializers
{
var playlistHeader = new PlaylistHeader();
playlistHeader.TrackCount = data.ReadUInt32();
playlistHeader.TrackCount = data.ReadUInt32LittleEndian();
if (playlistHeader.TrackCount > int.MaxValue)
return null;

File diff suppressed because it is too large Load Diff

View File

@@ -23,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Signature != SignatureString)
var header = ParseHeader(data);
if (header.Signature != SignatureString)
return null;
// Set the archive header
@@ -40,11 +40,7 @@ namespace SabreTools.Serialization.Deserializers
// Read all entries in turn
for (int i = 0; i < header.FileCount; i++)
{
var file = ParseFileDescriptor(data, header.MinorVersion);
if (file == null)
return null;
fileDescriptors[i] = file;
fileDescriptors[i] = ParseFileDescriptor(data, header.MinorVersion);
}
// Set the file list
@@ -65,38 +61,58 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a file descriptor
/// Parse a Stream into a FileDescriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version of the archive</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
/// <returns>Filled FileDescriptor on success, null on error</returns>
public static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
var fileDescriptor = new FileDescriptor();
var obj = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)
obj.FileNameSize = ReadVariableLength(data);
if (obj.FileNameSize > 0)
{
byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize);
fileDescriptor.FileName = Encoding.ASCII.GetString(fileName);
byte[] fileName = data.ReadBytes(obj.FileNameSize);
obj.FileName = Encoding.ASCII.GetString(fileName);
}
fileDescriptor.CommentFieldSize = ReadVariableLength(data);
if (fileDescriptor.CommentFieldSize > 0)
obj.CommentFieldSize = ReadVariableLength(data);
if (obj.CommentFieldSize > 0)
{
byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize);
fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField);
byte[] commentField = data.ReadBytes(obj.CommentFieldSize);
obj.CommentField = Encoding.ASCII.GetString(commentField);
}
fileDescriptor.ExpandedFileSize = data.ReadUInt32();
fileDescriptor.FileTime = data.ReadUInt16();
fileDescriptor.FileDate = data.ReadUInt16();
obj.ExpandedFileSize = data.ReadUInt32LittleEndian();
obj.FileTime = data.ReadUInt16LittleEndian();
obj.FileDate = data.ReadUInt16LittleEndian();
// Hack for unknown format data
if (minorVersion == 22)
fileDescriptor.Unknown = data.ReadUInt16();
obj.Unknown = data.ReadUInt16LittleEndian();
return fileDescriptor;
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
byte[] signature = data.ReadBytes(2);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.MajorVersion = data.ReadByteValue();
obj.MinorVersion = data.ReadByteValue();
obj.FileCount = data.ReadUInt16LittleEndian();
obj.TableSize = data.ReadByteValue();
obj.CompressionFlags = data.ReadByteValue();
return obj;
}
/// <summary>

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using static SabreTools.Models.PlayStation3.Constants;
namespace SabreTools.Serialization.Deserializers
{
@@ -16,13 +17,32 @@ namespace SabreTools.Serialization.Deserializers
try
{
// Deserialize the SFB
var sfb = data.ReadType<Models.PlayStation3.SFB>();
if (sfb?.Magic == null)
var sfb = new Models.PlayStation3.SFB();
sfb.Magic = data.ReadUInt32BigEndian();
if (sfb.Magic != SFBMagic)
return null;
string magic = Encoding.ASCII.GetString(sfb.Magic);
if (magic != ".SFB")
return null;
sfb.FileVersion = data.ReadUInt32BigEndian();
sfb.Reserved1 = data.ReadBytes(0x18);
byte[] flagsType = data.ReadBytes(0x10);
sfb.FlagsType = Encoding.ASCII.GetString(flagsType).TrimEnd('\0');
sfb.DiscContentDataOffset = data.ReadUInt32BigEndian();
sfb.DiscContentDataLength = data.ReadUInt32BigEndian();
sfb.Reserved2 = data.ReadBytes(0x08);
byte[] discTitleName = data.ReadBytes(0x08);
sfb.DiscTitleName = Encoding.ASCII.GetString(discTitleName).TrimEnd('\0');
sfb.Reserved3 = data.ReadBytes(0x08);
sfb.DiscVersionDataOffset = data.ReadUInt32BigEndian();
sfb.DiscVersionDataLength = data.ReadUInt32BigEndian();
sfb.Reserved4 = data.ReadBytes(0x188);
byte[] discContent = data.ReadBytes(0x20);
sfb.DiscContent = Encoding.ASCII.GetString(discContent).TrimEnd('\0');
byte[] discTitle = data.ReadBytes(0x10);
sfb.DiscTitle = Encoding.ASCII.GetString(discTitle).TrimEnd('\0');
byte[] discVersion = data.ReadBytes(0x10);
sfb.DiscVersion = Encoding.ASCII.GetString(discVersion).TrimEnd('\0');
sfb.Reserved5 = data.ReadBytes(0x3C0);
return sfb;
}

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PlayStation3;
using static SabreTools.Models.PlayStation3.Constants;
namespace SabreTools.Serialization.Deserializers
{
@@ -21,8 +22,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
var header = ParseSFOHeader(data);
if (header.Magic != SFOMagic)
return null;
// Assign the header
@@ -61,31 +62,39 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into an SFO header
/// Parse a Stream into an SFOHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO header on success, null on error</returns>
public static Models.PlayStation3.SFOHeader? ParseHeader(Stream data)
/// <returns>Filled SFOHeader on success, null on error</returns>
public static SFOHeader ParseSFOHeader(Stream data)
{
var sfoHeader = data.ReadType<Models.PlayStation3.SFOHeader>();
if (sfoHeader == null)
return null;
var obj = new SFOHeader();
string magic = Encoding.ASCII.GetString(sfoHeader!.Magic!);
if (magic != "\0PSF")
return null;
obj.Magic = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.KeyTableStart = data.ReadUInt32LittleEndian();
obj.DataTableStart = data.ReadUInt32LittleEndian();
obj.TablesEntries = data.ReadUInt32LittleEndian();
return sfoHeader;
return obj;
}
/// <summary>
/// Parse a Stream into an SFO index table entry
/// Parse a Stream into an SFOIndexTableEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO index table entry on success, null on error</returns>
public static Models.PlayStation3.SFOIndexTableEntry? ParseIndexTableEntry(Stream data)
/// <returns>Filled SFOIndexTableEntry on success, null on error</returns>
public static SFOIndexTableEntry ParseIndexTableEntry(Stream data)
{
return data.ReadType<Models.PlayStation3.SFOIndexTableEntry>();
var obj = new SFOIndexTableEntry();
obj.KeyOffset = data.ReadUInt16LittleEndian();
obj.DataFormat = (DataFormat)data.ReadUInt16LittleEndian();
obj.DataLength = data.ReadUInt32LittleEndian();
obj.DataMaxLength = data.ReadUInt32LittleEndian();
obj.DataOffset = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -66,8 +66,8 @@ namespace SabreTools.Serialization.Deserializers
if (signature != SignatureString)
return null;
ushort majorVersion = data.ReadUInt16();
ushort minorVersion = data.ReadUInt16();
ushort majorVersion = data.ReadUInt16LittleEndian();
ushort minorVersion = data.ReadUInt16LittleEndian();
if (minorVersion != 0)
return null;
@@ -85,9 +85,9 @@ namespace SabreTools.Serialization.Deserializers
byte[] header4Name = data.ReadBytes(count: 128);
header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0');
header4.HeaderMD5 = data.ReadBytes(0x10);
header4.HeaderLength = data.ReadUInt32();
header4.FileDataOffset = data.ReadUInt32();
header4.Dummy0 = data.ReadUInt32();
header4.HeaderLength = data.ReadUInt32LittleEndian();
header4.FileDataOffset = data.ReadUInt32LittleEndian();
header4.Dummy0 = data.ReadUInt32LittleEndian();
return header4;
@@ -101,9 +101,9 @@ namespace SabreTools.Serialization.Deserializers
header6.MinorVersion = minorVersion;
byte[] header6Name = data.ReadBytes(count: 128);
header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0');
header6.HeaderLength = data.ReadUInt32();
header6.FileDataOffset = data.ReadUInt32();
header6.Dummy0 = data.ReadUInt32();
header6.HeaderLength = data.ReadUInt32LittleEndian();
header6.FileDataOffset = data.ReadUInt32LittleEndian();
header6.Dummy0 = data.ReadUInt32LittleEndian();
return header6;
@@ -743,14 +743,14 @@ namespace SabreTools.Serialization.Deserializers
{
var directoryHeader4 = new DirectoryHeader4();
directoryHeader4.SectionOffset = data.ReadUInt32();
directoryHeader4.SectionCount = data.ReadUInt16();
directoryHeader4.FolderOffset = data.ReadUInt32();
directoryHeader4.FolderCount = data.ReadUInt16();
directoryHeader4.FileOffset = data.ReadUInt32();
directoryHeader4.FileCount = data.ReadUInt16();
directoryHeader4.StringTableOffset = data.ReadUInt32();
directoryHeader4.StringTableCount = data.ReadUInt16();
directoryHeader4.SectionOffset = data.ReadUInt32LittleEndian();
directoryHeader4.SectionCount = data.ReadUInt16LittleEndian();
directoryHeader4.FolderOffset = data.ReadUInt32LittleEndian();
directoryHeader4.FolderCount = data.ReadUInt16LittleEndian();
directoryHeader4.FileOffset = data.ReadUInt32LittleEndian();
directoryHeader4.FileCount = data.ReadUInt16LittleEndian();
directoryHeader4.StringTableOffset = data.ReadUInt32LittleEndian();
directoryHeader4.StringTableCount = data.ReadUInt16LittleEndian();
return directoryHeader4;
}
@@ -764,14 +764,14 @@ namespace SabreTools.Serialization.Deserializers
{
var directoryHeader5 = new DirectoryHeader5();
directoryHeader5.SectionOffset = data.ReadUInt32();
directoryHeader5.SectionCount = data.ReadUInt32();
directoryHeader5.FolderOffset = data.ReadUInt32();
directoryHeader5.FolderCount = data.ReadUInt32();
directoryHeader5.FileOffset = data.ReadUInt32();
directoryHeader5.FileCount = data.ReadUInt32();
directoryHeader5.StringTableOffset = data.ReadUInt32();
directoryHeader5.StringTableCount = data.ReadUInt32();
directoryHeader5.SectionOffset = data.ReadUInt32LittleEndian();
directoryHeader5.SectionCount = data.ReadUInt32LittleEndian();
directoryHeader5.FolderOffset = data.ReadUInt32LittleEndian();
directoryHeader5.FolderCount = data.ReadUInt32LittleEndian();
directoryHeader5.FileOffset = data.ReadUInt32LittleEndian();
directoryHeader5.FileCount = data.ReadUInt32LittleEndian();
directoryHeader5.StringTableOffset = data.ReadUInt32LittleEndian();
directoryHeader5.StringTableCount = data.ReadUInt32LittleEndian();
return directoryHeader5;
}
@@ -785,16 +785,16 @@ namespace SabreTools.Serialization.Deserializers
{
var directoryHeader7 = new DirectoryHeader7();
directoryHeader7.SectionOffset = data.ReadUInt32();
directoryHeader7.SectionCount = data.ReadUInt32();
directoryHeader7.FolderOffset = data.ReadUInt32();
directoryHeader7.FolderCount = data.ReadUInt32();
directoryHeader7.FileOffset = data.ReadUInt32();
directoryHeader7.FileCount = data.ReadUInt32();
directoryHeader7.StringTableOffset = data.ReadUInt32();
directoryHeader7.StringTableCount = data.ReadUInt32();
directoryHeader7.HashTableOffset = data.ReadUInt32();
directoryHeader7.BlockSize = data.ReadUInt32();
directoryHeader7.SectionOffset = data.ReadUInt32LittleEndian();
directoryHeader7.SectionCount = data.ReadUInt32LittleEndian();
directoryHeader7.FolderOffset = data.ReadUInt32LittleEndian();
directoryHeader7.FolderCount = data.ReadUInt32LittleEndian();
directoryHeader7.FileOffset = data.ReadUInt32LittleEndian();
directoryHeader7.FileCount = data.ReadUInt32LittleEndian();
directoryHeader7.StringTableOffset = data.ReadUInt32LittleEndian();
directoryHeader7.StringTableCount = data.ReadUInt32LittleEndian();
directoryHeader7.HashTableOffset = data.ReadUInt32LittleEndian();
directoryHeader7.BlockSize = data.ReadUInt32LittleEndian();
return directoryHeader7;
}
@@ -813,11 +813,11 @@ namespace SabreTools.Serialization.Deserializers
section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0');
byte[] section4Name = data.ReadBytes(64);
section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0');
section4.FolderStartIndex = data.ReadUInt16();
section4.FolderEndIndex = data.ReadUInt16();
section4.FileStartIndex = data.ReadUInt16();
section4.FileEndIndex = data.ReadUInt16();
section4.FolderRootIndex = data.ReadUInt16();
section4.FolderStartIndex = data.ReadUInt16LittleEndian();
section4.FolderEndIndex = data.ReadUInt16LittleEndian();
section4.FileStartIndex = data.ReadUInt16LittleEndian();
section4.FileEndIndex = data.ReadUInt16LittleEndian();
section4.FolderRootIndex = data.ReadUInt16LittleEndian();
return section4;
}
@@ -836,11 +836,11 @@ namespace SabreTools.Serialization.Deserializers
section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0');
byte[] section5Name = data.ReadBytes(64);
section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0');
section5.FolderStartIndex = data.ReadUInt32();
section5.FolderEndIndex = data.ReadUInt32();
section5.FileStartIndex = data.ReadUInt32();
section5.FileEndIndex = data.ReadUInt32();
section5.FolderRootIndex = data.ReadUInt32();
section5.FolderStartIndex = data.ReadUInt32LittleEndian();
section5.FolderEndIndex = data.ReadUInt32LittleEndian();
section5.FileStartIndex = data.ReadUInt32LittleEndian();
section5.FileEndIndex = data.ReadUInt32LittleEndian();
section5.FolderRootIndex = data.ReadUInt32LittleEndian();
return section5;
}
@@ -855,12 +855,12 @@ namespace SabreTools.Serialization.Deserializers
{
var folder4 = new Folder4();
folder4.NameOffset = data.ReadUInt32();
folder4.NameOffset = data.ReadUInt32LittleEndian();
folder4.Name = null; // Read from string table
folder4.FolderStartIndex = data.ReadUInt16();
folder4.FolderEndIndex = data.ReadUInt16();
folder4.FileStartIndex = data.ReadUInt16();
folder4.FileEndIndex = data.ReadUInt16();
folder4.FolderStartIndex = data.ReadUInt16LittleEndian();
folder4.FolderEndIndex = data.ReadUInt16LittleEndian();
folder4.FileStartIndex = data.ReadUInt16LittleEndian();
folder4.FileEndIndex = data.ReadUInt16LittleEndian();
return folder4;
}
@@ -875,12 +875,12 @@ namespace SabreTools.Serialization.Deserializers
{
var folder5 = new Folder5();
folder5.NameOffset = data.ReadUInt32();
folder5.NameOffset = data.ReadUInt32LittleEndian();
folder5.Name = null; // Read from string table
folder5.FolderStartIndex = data.ReadUInt32();
folder5.FolderEndIndex = data.ReadUInt32();
folder5.FileStartIndex = data.ReadUInt32();
folder5.FileEndIndex = data.ReadUInt32();
folder5.FolderStartIndex = data.ReadUInt32LittleEndian();
folder5.FolderEndIndex = data.ReadUInt32LittleEndian();
folder5.FileStartIndex = data.ReadUInt32LittleEndian();
folder5.FileEndIndex = data.ReadUInt32LittleEndian();
return folder5;
}
@@ -895,12 +895,12 @@ namespace SabreTools.Serialization.Deserializers
{
var file4 = new File4();
file4.NameOffset = data.ReadUInt32();
file4.NameOffset = data.ReadUInt32LittleEndian();
file4.Name = null; // Read from string table
file4.Offset = data.ReadUInt32();
file4.SizeOnDisk = data.ReadUInt32();
file4.Size = data.ReadUInt32();
file4.TimeModified = data.ReadUInt32();
file4.Offset = data.ReadUInt32LittleEndian();
file4.SizeOnDisk = data.ReadUInt32LittleEndian();
file4.Size = data.ReadUInt32LittleEndian();
file4.TimeModified = data.ReadUInt32LittleEndian();
file4.Dummy0 = data.ReadByteValue();
file4.Type = data.ReadByteValue();
@@ -917,15 +917,15 @@ namespace SabreTools.Serialization.Deserializers
{
var file6 = new File6();
file6.NameOffset = data.ReadUInt32();
file6.NameOffset = data.ReadUInt32LittleEndian();
file6.Name = null; // Read from string table
file6.Offset = data.ReadUInt32();
file6.SizeOnDisk = data.ReadUInt32();
file6.Size = data.ReadUInt32();
file6.TimeModified = data.ReadUInt32();
file6.Offset = data.ReadUInt32LittleEndian();
file6.SizeOnDisk = data.ReadUInt32LittleEndian();
file6.Size = data.ReadUInt32LittleEndian();
file6.TimeModified = data.ReadUInt32LittleEndian();
file6.Dummy0 = data.ReadByteValue();
file6.Type = data.ReadByteValue();
file6.CRC32 = data.ReadUInt32();
file6.CRC32 = data.ReadUInt32LittleEndian();
return file6;
}
@@ -940,16 +940,16 @@ namespace SabreTools.Serialization.Deserializers
{
var file7 = new File7();
file7.NameOffset = data.ReadUInt32();
file7.NameOffset = data.ReadUInt32LittleEndian();
file7.Name = null; // Read from string table
file7.Offset = data.ReadUInt32();
file7.SizeOnDisk = data.ReadUInt32();
file7.Size = data.ReadUInt32();
file7.TimeModified = data.ReadUInt32();
file7.Offset = data.ReadUInt32LittleEndian();
file7.SizeOnDisk = data.ReadUInt32LittleEndian();
file7.Size = data.ReadUInt32LittleEndian();
file7.TimeModified = data.ReadUInt32LittleEndian();
file7.Dummy0 = data.ReadByteValue();
file7.Type = data.ReadByteValue();
file7.CRC32 = data.ReadUInt32();
file7.HashOffset = data.ReadUInt32();
file7.CRC32 = data.ReadUInt32LittleEndian();
file7.HashOffset = data.ReadUInt32LittleEndian();
return file7;
}

View File

@@ -6,6 +6,7 @@ using SabreTools.IO.Extensions;
using SabreTools.Models.BSP;
using static SabreTools.Models.BSP.Constants;
// TODO: Finish replacing ReadType
namespace SabreTools.Serialization.Deserializers
{
public class VBSP : BaseBinaryDeserializer<VbspFile>
@@ -25,8 +26,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<VbspHeader>();
if (header?.Signature != SignatureString)
var header = ParseVbspHeader(data);
if (header.Signature != SignatureString)
return null;
if (Array.IndexOf([17, 18, 19, 20, 21, 22, 23, 25, 27, 29, 0x00040014], header.Version) > -1)
return null;
@@ -265,6 +266,43 @@ namespace SabreTools.Serialization.Deserializers
}
}
/// <summary>
/// Parse a Stream into VbspHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled VbspHeader on success, null on error</returns>
public static VbspHeader ParseVbspHeader(Stream data)
{
var obj = new VbspHeader();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.Version = data.ReadInt32LittleEndian();
obj.Lumps = new VbspLumpEntry[VBSP_HEADER_LUMPS];
for (int i = 0; i < VBSP_HEADER_LUMPS; i++)
{
obj.Lumps[i] = ParseVbspLumpEntry(data);
}
obj.MapRevision = data.ReadInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into VbspLumpEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled VbspLumpEntry on success, null on error</returns>
public static VbspLumpEntry ParseVbspLumpEntry(Stream data)
{
var obj = new VbspLumpEntry();
obj.Version = data.ReadUInt32LittleEndian();
obj.FourCC = data.ReadBytes(4);
return obj;
}
/// <summary>
/// Parse a Stream into LUMP_ENTITIES
/// </summary>
@@ -372,14 +410,14 @@ namespace SabreTools.Serialization.Deserializers
{
var lump = new VisibilityLump();
lump.NumClusters = data.ReadInt32();
lump.NumClusters = data.ReadInt32LittleEndian();
lump.ByteOffsets = new int[lump.NumClusters][];
for (int i = 0; i < lump.NumClusters; i++)
{
lump.ByteOffsets[i] = new int[2];
for (int j = 0; j < 2; j++)
{
lump.ByteOffsets[i][j] = data.ReadInt32();
lump.ByteOffsets[i][j] = data.ReadInt32LittleEndian();
}
}
@@ -467,10 +505,10 @@ namespace SabreTools.Serialization.Deserializers
{
var model = new PhysModel();
model.ModelIndex = data.ReadInt32();
model.DataSize = data.ReadInt32();
model.KeydataSize = data.ReadInt32();
model.SolidCount = data.ReadInt32();
model.ModelIndex = data.ReadInt32LittleEndian();
model.DataSize = data.ReadInt32LittleEndian();
model.KeydataSize = data.ReadInt32LittleEndian();
model.SolidCount = data.ReadInt32LittleEndian();
model.Solids = new PhysSolid[model.SolidCount];
for (int i = 0; i < model.Solids.Length; i++)
{
@@ -491,7 +529,7 @@ namespace SabreTools.Serialization.Deserializers
{
var solid = new PhysSolid();
solid.Size = data.ReadInt32();
solid.Size = data.ReadInt32LittleEndian();
if (solid.Size > 0)
solid.CollisionData = data.ReadBytes(solid.Size);
@@ -525,7 +563,7 @@ namespace SabreTools.Serialization.Deserializers
{
var lump = new OcclusionLump();
lump.Count = data.ReadInt32();
lump.Count = data.ReadInt32LittleEndian();
lump.Data = new OccluderData[lump.Count];
for (int i = 0; i < lump.Count; i++)
{
@@ -533,7 +571,7 @@ namespace SabreTools.Serialization.Deserializers
if (occluderData != null)
lump.Data[i] = occluderData;
}
lump.PolyDataCount = data.ReadInt32();
lump.PolyDataCount = data.ReadInt32LittleEndian();
lump.PolyData = new OccluderPolyData[lump.Count];
for (int i = 0; i < lump.Count; i++)
{
@@ -541,11 +579,11 @@ namespace SabreTools.Serialization.Deserializers
if (polyData != null)
lump.PolyData[i] = polyData;
}
lump.VertexIndexCount = data.ReadInt32();
lump.VertexIndexCount = data.ReadInt32LittleEndian();
lump.VertexIndicies = new int[lump.VertexIndexCount];
for (int i = 0; i < lump.VertexIndexCount; i++)
{
lump.VertexIndicies[i] = data.ReadInt32();
lump.VertexIndicies[i] = data.ReadInt32LittleEndian();
}
return lump;
@@ -578,29 +616,29 @@ namespace SabreTools.Serialization.Deserializers
{
var leaf = new VbspLeaf();
leaf.Contents = (VbspContents)data.ReadUInt32();
leaf.Cluster = data.ReadInt16();
leaf.AreaFlags = data.ReadInt16();
leaf.Contents = (VbspContents)data.ReadUInt32LittleEndian();
leaf.Cluster = data.ReadInt16LittleEndian();
leaf.AreaFlags = data.ReadInt16LittleEndian();
leaf.Mins = new short[3];
for (int i = 0; i < leaf.Mins.Length; i++)
{
leaf.Mins[i] = data.ReadInt16();
leaf.Mins[i] = data.ReadInt16LittleEndian();
}
leaf.Maxs = new short[3];
for (int i = 0; i < leaf.Maxs.Length; i++)
{
leaf.Maxs[i] = data.ReadInt16();
leaf.Maxs[i] = data.ReadInt16LittleEndian();
}
leaf.FirstLeafFace = data.ReadUInt16();
leaf.NumLeafFaces = data.ReadUInt16();
leaf.FirstLeafBrush = data.ReadUInt16();
leaf.NumLeafBrushes = data.ReadUInt16();
leaf.LeafWaterDataID = data.ReadInt16();
leaf.FirstLeafFace = data.ReadUInt16LittleEndian();
leaf.NumLeafFaces = data.ReadUInt16LittleEndian();
leaf.FirstLeafBrush = data.ReadUInt16LittleEndian();
leaf.NumLeafBrushes = data.ReadUInt16LittleEndian();
leaf.LeafWaterDataID = data.ReadInt16LittleEndian();
if (version == 1)
leaf.AmbientLighting = data.ReadType<CompressedLightCube>();
else
leaf.Padding = data.ReadInt16();
leaf.Padding = data.ReadInt16LittleEndian();
return leaf;
}
@@ -615,7 +653,7 @@ namespace SabreTools.Serialization.Deserializers
var marksurfaces = new List<ushort>();
while (data.Position < offset + length)
{
marksurfaces.Add(data.ReadUInt16());
marksurfaces.Add(data.ReadUInt16LittleEndian());
}
return new MarksurfacesLump { Marksurfaces = [.. marksurfaces] };
@@ -649,7 +687,7 @@ namespace SabreTools.Serialization.Deserializers
var surfedges = new List<int>();
while (data.Position < offset + length)
{
surfedges.Add(data.ReadInt32());
surfedges.Add(data.ReadInt32LittleEndian());
}
return new SurfedgesLump { Surfedges = [.. surfedges] };
@@ -701,7 +739,7 @@ namespace SabreTools.Serialization.Deserializers
var map = new List<ushort>();
while (data.Position < offset + length)
{
map.Add(data.ReadUInt16());
map.Add(data.ReadUInt16LittleEndian());
}
return new LeafFacesLump { Map = [.. map] };
@@ -717,7 +755,7 @@ namespace SabreTools.Serialization.Deserializers
var map = new List<ushort>();
while (data.Position < offset + length)
{
map.Add(data.ReadUInt16());
map.Add(data.ReadUInt16LittleEndian());
}
return new LeafBrushesLump { Map = [.. map] };
@@ -804,7 +842,7 @@ namespace SabreTools.Serialization.Deserializers
{
var lump = new GameLump();
lump.LumpCount = data.ReadInt32();
lump.LumpCount = data.ReadInt32LittleEndian();
lump.Directories = new GameLumpDirectory[lump.LumpCount];
for (int i = 0; i < lump.LumpCount; i++)
{
@@ -876,7 +914,7 @@ namespace SabreTools.Serialization.Deserializers
var offsets = new List<int>();
while (data.Position < offset + length)
{
offsets.Add(data.ReadInt32());
offsets.Add(data.ReadInt32LittleEndian());
}
return new TexdataStringTable { Offsets = [.. offsets] };

View File

@@ -23,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// The original version had no signature.
var header = data.ReadType<Header>();
if (header?.Signature != SignatureUInt32)
var header = ParseHeader(data);
if (header.Signature != SignatureUInt32)
return null;
if (header.Version > 2)
return null;
@@ -36,28 +36,16 @@ namespace SabreTools.Serialization.Deserializers
#region Extended Header
// Set the package extended header
if (header.Version == 2)
{
// Try to parse the extended header
var extendedHeader = data.ReadType<ExtendedHeader>();
if (extendedHeader == null)
return null;
// Set the package extended header
file.ExtendedHeader = extendedHeader;
}
file.ExtendedHeader = ParseExtendedHeader(data);
#endregion
#region Directory Items
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
if (directoryItems == null)
return null;
// Set the directory items
file.DirectoryItems = directoryItems;
file.DirectoryItems = ParseDirectoryItemTree(data);
#endregion
@@ -77,10 +65,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory items
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveMD5SectionSize)
{
var archiveHash = data.ReadType<ArchiveHash>();
if (archiveHash == null)
return null;
var archiveHash = ParseArchiveHash(data);
archiveHashes.Add(archiveHash);
}
@@ -98,12 +83,104 @@ namespace SabreTools.Serialization.Deserializers
}
}
/// <summary>
/// Parse a Stream into a ArchiveHash
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ArchiveHash on success, null on error</returns>
public static ArchiveHash ParseArchiveHash(Stream data)
{
var obj = new ArchiveHash();
obj.ArchiveIndex = data.ReadUInt32LittleEndian();
obj.ArchiveOffset = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32LittleEndian();
obj.Hash = data.ReadBytes(0x10);
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryEntry on success, null on error</returns>
public static DirectoryEntry ParseDirectoryEntry(Stream data)
{
var obj = new DirectoryEntry();
obj.CRC = data.ReadUInt32LittleEndian();
obj.PreloadBytes = data.ReadUInt16LittleEndian();
obj.ArchiveIndex = data.ReadUInt16LittleEndian();
obj.EntryOffset = data.ReadUInt32LittleEndian();
obj.EntryLength = data.ReadUInt32LittleEndian();
obj.Dummy0 = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Valve Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
public static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
{
var obj = new DirectoryItem();
obj.Extension = extension;
obj.Path = path;
obj.Name = name;
// Set the directory entry
obj.DirectoryEntry = ParseDirectoryEntry(data);
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (obj.DirectoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE
&& obj.DirectoryEntry.EntryLength > 0
&& data.Position + obj.DirectoryEntry.EntryLength <= data.Length)
{
preloadDataPointer = obj.DirectoryEntry.EntryOffset;
preloadDataLength = (int)obj.DirectoryEntry.EntryLength;
}
else if (obj.DirectoryEntry.PreloadBytes > 0)
{
preloadDataPointer = data.Position;
preloadDataLength = obj.DirectoryEntry.PreloadBytes;
}
// If we had a valid preload data pointer
byte[]? preloadData = null;
if (preloadDataPointer >= 0
&& preloadDataLength > 0
&& data.Position + preloadDataLength <= data.Length)
{
// Cache the current offset
long initialOffset = data.Position;
// Seek to the preload data offset
data.Seek(preloadDataPointer, SeekOrigin.Begin);
// Read the preload data
preloadData = data.ReadBytes(preloadDataLength);
// Seek back to the original offset
data.Seek(initialOffset, SeekOrigin.Begin);
}
// Set the preload data
obj.PreloadData = preloadData;
return obj;
}
/// <summary>
/// Parse a Stream into a Valve Package directory item tree
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[]? ParseDirectoryItemTree(Stream data)
public static DirectoryItem[] ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
@@ -149,8 +226,6 @@ namespace SabreTools.Serialization.Deserializers
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString!, pathString!, nameString!);
if (directoryItem == null)
return null;
// Add the directory item
directoryItems.Add(directoryItem);
@@ -162,64 +237,36 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a Valve Package directory item
/// Parse a Stream into a ExtendedHeader
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem? ParseDirectoryItem(Stream data, string extension, string path, string name)
/// <returns>Filled ExtendedHeader on success, null on error</returns>
public static ExtendedHeader ParseExtendedHeader(Stream data)
{
var directoryItem = new DirectoryItem();
var obj = new ExtendedHeader();
directoryItem.Extension = extension;
directoryItem.Path = path;
directoryItem.Name = name;
obj.FileDataSectionSize = data.ReadUInt32LittleEndian();
obj.ArchiveMD5SectionSize = data.ReadUInt32LittleEndian();
obj.OtherMD5SectionSize = data.ReadUInt32LittleEndian();
obj.SignatureSectionSize = data.ReadUInt32LittleEndian();
// Get the directory entry
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
return null;
return obj;
}
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE
&& directoryEntry.EntryLength > 0
&& data.Position + directoryEntry.EntryLength <= data.Length)
{
preloadDataPointer = directoryEntry.EntryOffset;
preloadDataLength = (int)directoryEntry.EntryLength;
}
else if (directoryEntry.PreloadBytes > 0)
{
preloadDataPointer = data.Position;
preloadDataLength = directoryEntry.PreloadBytes;
}
obj.Signature = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.TreeSize = data.ReadUInt32LittleEndian();
// If we had a valid preload data pointer
byte[]? preloadData = null;
if (preloadDataPointer >= 0
&& preloadDataLength > 0
&& data.Position + preloadDataLength <= data.Length)
{
// Cache the current offset
long initialOffset = data.Position;
// Seek to the preload data offset
data.Seek(preloadDataPointer, SeekOrigin.Begin);
// Read the preload data
preloadData = data.ReadBytes(preloadDataLength);
// Seek back to the original offset
data.Seek(initialOffset, SeekOrigin.Begin);
}
// Set the preload data
directoryItem.PreloadData = preloadData;
return directoryItem;
return obj;
}
}
}

View File

@@ -23,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Signature != SignatureString)
var header = ParseHeader(data);
if (header.Signature != SignatureString)
return null;
// Set the package header
@@ -46,11 +46,7 @@ namespace SabreTools.Serialization.Deserializers
file.DirEntries = new DirEntry[header.NumDirs];
for (int i = 0; i < header.NumDirs; i++)
{
var lump = data.ReadType<DirEntry>();
if (lump == null)
return null;
file.DirEntries[i] = lump;
file.DirEntries[i] = ParseDirEntry(data);
}
#endregion
@@ -95,12 +91,48 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package file entry
/// Parse a Stream into a CharInfo
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CharInfo on success, null on error</returns>
public static CharInfo ParseCharInfo(Stream data)
{
var obj = new CharInfo();
obj.StartOffset = data.ReadUInt16LittleEndian();
obj.CharWidth = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirEntry on success, null on error</returns>
public static DirEntry ParseDirEntry(Stream data)
{
var obj = new DirEntry();
obj.Offset = data.ReadUInt32LittleEndian();
obj.DiskLength = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32LittleEndian();
obj.Type = (FileType)data.ReadByteValue();
obj.Compression = data.ReadByteValue();
obj.Padding = data.ReadUInt16LittleEndian();
byte[] name = data.ReadBytes(16);
obj.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return obj;
}
/// <summary>
/// Parse a Stream into a FileEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="type">File entry type</param>
/// <returns>Filled Half-Life Texture Package file entry on success, null on error</returns>
private static FileEntry? ParseFileEntry(Stream data, FileType type)
/// <returns>Filled FileEntry on success, null on error</returns>
public static FileEntry? ParseFileEntry(Stream data, FileType type)
{
return type switch
{
@@ -113,115 +145,130 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package MipTex
/// Parse a Stream into a Font
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package MipTex on success, null on error</returns>
private static MipTex ParseMipTex(Stream data)
/// <returns>Filled Font on success, null on error</returns>
public static Font ParseFont(Stream data)
{
var miptex = new MipTex();
var obj = new Font();
byte[] nameBytes = data.ReadBytes(16);
miptex.Name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
miptex.Width = data.ReadUInt32();
miptex.Height = data.ReadUInt32();
miptex.MipOffsets = new uint[4];
for (int i = 0; i < miptex.MipOffsets.Length; i++)
obj.Width = data.ReadUInt32LittleEndian();
obj.Height = data.ReadUInt32LittleEndian();
obj.RowCount = data.ReadUInt32LittleEndian();
obj.RowHeight = data.ReadUInt32LittleEndian();
obj.FontInfo = new CharInfo[256];
for (int i = 0; i < obj.FontInfo.Length; i++)
{
miptex.MipOffsets[i] = data.ReadUInt32();
obj.FontInfo[i] = ParseCharInfo(data);
}
miptex.MipImages = new MipMap[4];
for (int i = 0; i < miptex.MipImages.Length; i++)
obj.Data = new byte[obj.Height][];
for (int i = 0; i < obj.Height; i++)
{
miptex.MipImages[i] = ParseMipMap(data, miptex.Width, miptex.Height);
obj.Data[i] = data.ReadBytes((int)obj.Width);
}
miptex.ColorsUsed = data.ReadUInt16();
miptex.Palette = new byte[miptex.ColorsUsed][];
for (int i = 0; i < miptex.ColorsUsed; i++)
obj.ColorsUsed = data.ReadUInt16LittleEndian();
obj.Palette = new byte[obj.ColorsUsed][];
for (int i = 0; i < obj.ColorsUsed; i++)
{
miptex.Palette[i] = data.ReadBytes(3);
obj.Palette[i] = data.ReadBytes(3);
}
return miptex;
return obj;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package MipMap
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package MipMap on success, null on error</returns>
private static MipMap ParseMipMap(Stream data, uint width, uint height)
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var mipmap = new MipMap();
var obj = new Header();
mipmap.Data = new byte[width][];
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.NumDirs = data.ReadUInt32LittleEndian();
obj.DirOffset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a MipMap
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled MipMap on success, null on error</returns>
public static MipMap ParseMipMap(Stream data, uint width, uint height)
{
var obj = new MipMap();
obj.Data = new byte[width][];
for (int i = 0; i < width; i++)
{
mipmap.Data[i] = data.ReadBytes((int)height);
obj.Data[i] = data.ReadBytes((int)height);
}
return mipmap;
return obj;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package Qpic image
/// Parse a Stream into a MipTex
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package Qpic image on success, null on error</returns>
private static QpicImage ParseQpicImage(Stream data)
/// <returns>Filled MipTex on success, null on error</returns>
public static MipTex ParseMipTex(Stream data)
{
var qpic = new QpicImage();
var obj = new MipTex();
qpic.Width = data.ReadUInt32();
qpic.Height = data.ReadUInt32();
qpic.Data = new byte[qpic.Height][];
for (int i = 0; i < qpic.Height; i++)
byte[] nameBytes = data.ReadBytes(16);
obj.Name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
obj.Width = data.ReadUInt32LittleEndian();
obj.Height = data.ReadUInt32LittleEndian();
obj.MipOffsets = new uint[4];
for (int i = 0; i < obj.MipOffsets.Length; i++)
{
qpic.Data[i] = data.ReadBytes((int)qpic.Width);
obj.MipOffsets[i] = data.ReadUInt32LittleEndian();
}
qpic.ColorsUsed = data.ReadUInt16();
qpic.Palette = new byte[qpic.ColorsUsed][];
for (int i = 0; i < qpic.ColorsUsed; i++)
obj.MipImages = new MipMap[4];
for (int i = 0; i < obj.MipImages.Length; i++)
{
qpic.Palette[i] = data.ReadBytes(3);
obj.MipImages[i] = ParseMipMap(data, obj.Width, obj.Height);
}
obj.ColorsUsed = data.ReadUInt16LittleEndian();
obj.Palette = new byte[obj.ColorsUsed][];
for (int i = 0; i < obj.ColorsUsed; i++)
{
obj.Palette[i] = data.ReadBytes(3);
}
return qpic;
return obj;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package font
/// Parse a Stream into a QpicImage
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package font on success, null on error</returns>
private static Font ParseFont(Stream data)
/// <returns>Filled QpicImage on success, null on error</returns>
public static QpicImage ParseQpicImage(Stream data)
{
var font = new Font();
var obj = new QpicImage();
font.Width = data.ReadUInt32();
font.Height = data.ReadUInt32();
font.RowCount = data.ReadUInt32();
font.RowHeight = data.ReadUInt32();
font.FontInfo = new CharInfo[256];
for (int i = 0; i < font.FontInfo.Length; i++)
obj.Width = data.ReadUInt32LittleEndian();
obj.Height = data.ReadUInt32LittleEndian();
obj.Data = new byte[obj.Height][];
for (int i = 0; i < obj.Height; i++)
{
var fontInfo = data.ReadType<CharInfo>();
if (fontInfo != null)
font.FontInfo[i] = fontInfo;
obj.Data[i] = data.ReadBytes((int)obj.Width);
}
font.Data = new byte[font.Height][];
for (int i = 0; i < font.Height; i++)
obj.ColorsUsed = data.ReadUInt16LittleEndian();
obj.Palette = new byte[obj.ColorsUsed][];
for (int i = 0; i < obj.ColorsUsed; i++)
{
font.Data[i] = data.ReadBytes((int)font.Width);
}
font.ColorsUsed = data.ReadUInt16();
font.Palette = new byte[font.ColorsUsed][];
for (int i = 0; i < font.ColorsUsed; i++)
{
font.Palette[i] = data.ReadBytes(3);
obj.Palette[i] = data.ReadBytes(3);
}
return font;
return obj;
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.XZP;
using static SabreTools.Models.XZP.Constants;
@@ -22,8 +23,8 @@ namespace SabreTools.Serialization.Deserializers
#region Header
// Try to parse the header
var header = data.ReadType<Header>();
if (header?.Signature != HeaderSignatureString)
var header = ParseHeader(data);
if (header.Signature != HeaderSignatureString)
return null;
if (header.Version != 6)
return null;
@@ -41,11 +42,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory entries
for (int i = 0; i < file.DirectoryEntries.Length; i++)
{
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
continue;
file.DirectoryEntries[i] = directoryEntry;
file.DirectoryEntries[i] = ParseDirectoryEntry(data);
}
#endregion
@@ -60,11 +57,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the preload directory entries
for (int i = 0; i < file.PreloadDirectoryEntries.Length; i++)
{
var directoryEntry = data.ReadType<DirectoryEntry>();
if (directoryEntry == null)
continue;
file.PreloadDirectoryEntries[i] = directoryEntry;
file.PreloadDirectoryEntries[i] = ParseDirectoryEntry(data);
}
}
@@ -80,11 +73,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the preload directory mappings
for (int i = 0; i < file.PreloadDirectoryMappings.Length; i++)
{
var directoryMapping = data.ReadType<DirectoryMapping>();
if (directoryMapping == null)
continue;
file.PreloadDirectoryMappings[i] = directoryMapping;
file.PreloadDirectoryMappings[i] = ParseDirectoryMapping(data);
}
}
@@ -108,8 +97,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the directory items
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
file.DirectoryItems[i] = ParseDirectoryItem(data);
}
}
@@ -121,8 +109,8 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(-8, SeekOrigin.End);
// Try to parse the footer
var footer = data.ReadType<Footer>();
if (footer?.Signature != FooterSignatureString)
var footer = ParseFooter(data);
if (footer.Signature != FooterSignatureString)
return null;
// Set the package footer
@@ -140,31 +128,100 @@ namespace SabreTools.Serialization.Deserializers
}
/// <summary>
/// Parse a Stream into a XBox Package File directory item
/// Parse a Stream into a DirectoryEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
/// <returns>Filled DirectoryEntry on success, null on error</returns>
public static DirectoryEntry ParseDirectoryEntry(Stream data)
{
var directoryItem = new DirectoryItem();
var obj = new DirectoryEntry();
directoryItem.FileNameCRC = data.ReadUInt32();
directoryItem.NameOffset = data.ReadUInt32();
directoryItem.TimeCreated = data.ReadUInt32();
obj.FileNameCRC = data.ReadUInt32LittleEndian();
obj.EntryLength = data.ReadUInt32LittleEndian();
obj.EntryOffset = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryItem
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryItem on success, null on error</returns>
public static DirectoryItem ParseDirectoryItem(Stream data)
{
var obj = new DirectoryItem();
obj.FileNameCRC = data.ReadUInt32LittleEndian();
obj.NameOffset = data.ReadUInt32LittleEndian();
obj.TimeCreated = data.ReadUInt32LittleEndian();
// Cache the current offset
long currentPosition = data.Position;
// Seek to the name offset
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
data.Seek(obj.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadNullTerminatedAnsiString();
obj.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
return directoryItem;
return obj;
}
/// <summary>
/// Parse a Stream into a DirectoryMapping
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled DirectoryMapping on success, null on error</returns>
public static DirectoryMapping ParseDirectoryMapping(Stream data)
{
var obj = new DirectoryMapping();
obj.PreloadDirectoryEntryIndex = data.ReadUInt16LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Footer on success, null on error</returns>
public static Footer ParseFooter(Stream data)
{
var obj = new Footer();
obj.FileLength = data.ReadUInt32LittleEndian();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
return obj;
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
byte[] signature = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(signature);
obj.Version = data.ReadUInt32LittleEndian();
obj.PreloadDirectoryEntryCount = data.ReadUInt32LittleEndian();
obj.DirectoryEntryCount = data.ReadUInt32LittleEndian();
obj.PreloadBytes = data.ReadUInt32LittleEndian();
obj.HeaderLength = data.ReadUInt32LittleEndian();
obj.DirectoryItemCount = data.ReadUInt32LittleEndian();
obj.DirectoryItemOffset = data.ReadUInt32LittleEndian();
obj.DirectoryItemLength = data.ReadUInt32LittleEndian();
return obj;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -42,9 +42,13 @@ namespace SabreTools.Serialization
Wrapper.CHD item => item.PrettyPrint(),
Wrapper.CIA item => item.PrettyPrint(),
Wrapper.GCF item => item.PrettyPrint(),
Wrapper.InstallShieldArchiveV3 item => item.PrettyPrint(),
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
Wrapper.IRD item => item.PrettyPrint(),
Wrapper.LinearExecutable item => item.PrettyPrint(),
Wrapper.LZKWAJ item => item.PrettyPrint(),
Wrapper.LZQBasic item => item.PrettyPrint(),
Wrapper.LZSZDD item => item.PrettyPrint(),
Wrapper.MicrosoftCabinet item => item.PrettyPrint(),
Wrapper.MoPaQ item => item.PrettyPrint(),
Wrapper.MSDOS item => item.PrettyPrint(),
@@ -87,9 +91,13 @@ namespace SabreTools.Serialization
Wrapper.CHD item => item.ExportJSON(),
Wrapper.CIA item => item.ExportJSON(),
Wrapper.GCF item => item.ExportJSON(),
Wrapper.InstallShieldArchiveV3 item => item.ExportJSON(),
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
Wrapper.IRD item => item.ExportJSON(),
Wrapper.LinearExecutable item => item.ExportJSON(),
Wrapper.LZKWAJ item => item.ExportJSON(),
Wrapper.LZQBasic item => item.ExportJSON(),
Wrapper.LZSZDD item => item.ExportJSON(),
Wrapper.MicrosoftCabinet item => item.ExportJSON(),
Wrapper.MoPaQ item => item.ExportJSON(),
Wrapper.MSDOS item => item.ExportJSON(),
@@ -199,6 +207,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.InstallShieldArchiveV3 item)
{
var builder = new StringBuilder();
InstallShieldArchiveV3.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
@@ -229,6 +247,36 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZKWAJ item)
{
var builder = new StringBuilder();
LZKWAJ.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZQBasic item)
{
var builder = new StringBuilder();
LZQBasic.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZSZDD item)
{
var builder = new StringBuilder();
LZSZDD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,114 @@
using System.Text;
using SabreTools.Models.InstallShieldArchiveV3;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class InstallShieldArchiveV3 : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("InstallShield Archive V3 Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.Header);
Print(builder, archive.Directories);
Print(builder, archive.Files);
}
private static void Print(StringBuilder builder, Header? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Signature1, " Signature 1");
builder.AppendLine(header.Signature2, " Signature 2");
builder.AppendLine(header.Reserved0, " Reserved 0");
builder.AppendLine(header.IsMultivolume, " Is multivolume");
builder.AppendLine(header.FileCount, " File count");
builder.AppendLine(header.DateTime, " Datetime");
builder.AppendLine(header.CompressedSize, " Compressed size");
builder.AppendLine(header.UncompressedSize, " Uncompressed size");
builder.AppendLine(header.Reserved1, " Reserved 1");
builder.AppendLine(header.VolumeTotal, " Volume total");
builder.AppendLine(header.VolumeNumber, " Volume number");
builder.AppendLine(header.Reserved2, " Reserved 2");
builder.AppendLine(header.SplitBeginAddress, " Split begin address");
builder.AppendLine(header.SplitEndAddress, " Split end address");
builder.AppendLine(header.TocAddress, " TOC address");
builder.AppendLine(header.Reserved3, " Reserved 3");
builder.AppendLine(header.DirCount, " Dir count");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine(header.Reserved5, " Reserved 5");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Directory[]? entries)
{
builder.AppendLine(" Directories:");
builder.AppendLine(" -------------------------");
if (entries == null || entries.Length == 0)
{
builder.AppendLine(" No directories");
builder.AppendLine();
return;
}
for (int i = 0; i < entries.Length; i++)
{
var entry = entries[i];
builder.AppendLine($" Directory {i}");
builder.AppendLine(entry.FileCount, " File count");
builder.AppendLine(entry.ChunkSize, " Chunk size");
builder.AppendLine(entry.NameLength, " Name length");
builder.AppendLine(entry.Name, " Name");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, File[]? entries)
{
builder.AppendLine(" Files:");
builder.AppendLine(" -------------------------");
if (entries == null || entries.Length == 0)
{
builder.AppendLine(" No files");
builder.AppendLine();
return;
}
for (int i = 0; i < entries.Length; i++)
{
var entry = entries[i];
builder.AppendLine($" File {i}");
builder.AppendLine(entry.VolumeEnd, " Volume end");
builder.AppendLine(entry.Index, " Index");
builder.AppendLine(entry.UncompressedSize, " Uncompressed size");
builder.AppendLine(entry.CompressedSize, " Compressed size");
builder.AppendLine(entry.Offset, " Offset");
builder.AppendLine(entry.DateTime, " Datetime");
builder.AppendLine(entry.Reserved0, " Reserved 0");
builder.AppendLine(entry.ChunkSize, " Chunk size");
builder.AppendLine($" Attrib: {entry.Attrib} (0x{entry.Attrib:X})");
builder.AppendLine(entry.IsSplit, " Is split");
builder.AppendLine(entry.Reserved1, " Reserved 1");
builder.AppendLine(entry.VolumeStart, " Volume start");
builder.AppendLine(entry.Name, " Name");
}
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,63 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZKWAJ : IPrinter<KWAJFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, KWAJFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, KWAJFile file)
{
builder.AppendLine("LZ-compressed File, KWAJ Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
Print(builder, file.HeaderExtensions);
}
private static void Print(StringBuilder builder, KWAJHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
builder.AppendLine(header.DataOffset, " Data offset");
builder.AppendLine($" Header flags: {header.HeaderFlags} (0x{header.HeaderFlags:X})");
builder.AppendLine();
}
private static void Print(StringBuilder builder, KWAJHeaderExtensions? header)
{
builder.AppendLine(" Header Extensions Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header extensions");
builder.AppendLine();
return;
}
builder.AppendLine(header.DecompressedLength, " Decompressed length");
builder.AppendLine(header.UnknownPurpose, " Unknown purpose");
builder.AppendLine(header.UnknownDataLength, " Unknown data length");
builder.AppendLine(header.UnknownData, " Unknown data");
builder.AppendLine(header.FileName, " File name");
builder.AppendLine(header.FileExtension, " File extension");
builder.AppendLine(header.ArbitraryTextLength, " Arbitrary text length");
builder.AppendLine(header.ArbitraryText, " Arbitrary text");
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,38 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZQBasic : IPrinter<QBasicFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, QBasicFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, QBasicFile file)
{
builder.AppendLine("LZ-compressed File, QBasic Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
}
private static void Print(StringBuilder builder, QBasicHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine(header.RealLength, " Real length");
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,40 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZSZDD : IPrinter<SZDDFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, SZDDFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, SZDDFile file)
{
builder.AppendLine("LZ-compressed File, SZDD Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
}
private static void Print(StringBuilder builder, SZDDHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
builder.AppendLine(header.LastChar, " Last char");
builder.AppendLine(header.RealLength, " Real length");
builder.AppendLine();
}
}
}

View File

@@ -4,6 +4,7 @@ using System.Text;
using System.Xml;
using SabreTools.ASN1;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Models.PortableExecutable;
using SabreTools.Serialization.Interfaces;
@@ -1346,32 +1347,36 @@ namespace SabreTools.Serialization.Printers
else
{
int offset = 0;
byte[]? magic = entry.Data.ReadBytes(ref offset, Math.Min(entry.Data.Length, 16));
byte[] magic = entry.Data.ReadBytes(ref offset, Math.Min(entry.Data.Length, 16));
if (magic == null)
{
// No-op
}
else if (magic[0] == 0x4D && magic[1] == 0x5A)
if (magic.StartsWith([0x4D, 0x5A]))
{
builder.AppendLine($"{padding}Data: [Embedded Executable File]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x4D && magic[1] == 0x53 && magic[2] == 0x46 && magic[3] == 0x54)
else if (magic.StartsWith([0x4D, 0x53, 0x46, 0x54]))
{
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
else if (magic.StartsWith([0x50, 0x4B, 0x03, 0x04]))
{
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
else if (magic.StartsWith([0x50, 0x4B, 0x05, 0x06]))
{
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
else if (magic.StartsWith([0x50, 0x4B, 0x07, 0x08]))
{
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x00]))
{
builder.AppendLine($"{padding}Data: [Embedded RAR file]"); // TODO: Parse this out and print separately
}
else if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x01, 0x00]))
{
builder.AppendLine($"{padding}Data: [Embedded RAR5 file]"); // TODO: Parse this out and print separately
}
else
{
builder.AppendLine(magic, $"{padding}Data");
@@ -1830,26 +1835,34 @@ namespace SabreTools.Serialization.Printers
int offset = 0;
byte[] magic = entry.Data.ReadBytes(ref offset, Math.Min(entry.Data.Length, 16));
if (magic[0] == 0x4D && magic[1] == 0x5A)
if (magic.StartsWith([0x4D, 0x5A]))
{
builder.AppendLine($"{padding}Data: [Embedded Executable File]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x4D && magic[1] == 0x53 && magic[2] == 0x46 && magic[3] == 0x54)
else if (magic.StartsWith([0x4D, 0x53, 0x46, 0x54]))
{
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
else if (magic.StartsWith([0x50, 0x4B, 0x03, 0x04]))
{
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
else if (magic.StartsWith([0x50, 0x4B, 0x05, 0x06]))
{
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
else if (magic.StartsWith([0x50, 0x4B, 0x07, 0x08]))
{
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
}
else if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x00]))
{
builder.AppendLine($"{padding}Data: [Embedded RAR file]"); // TODO: Parse this out and print separately
}
else if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x01, 0x00]))
{
builder.AppendLine($"{padding}Data: [Embedded RAR5 file]"); // TODO: Parse this out and print separately
}
else
{
builder.AppendLine(magic, $"{padding}Data");

View File

@@ -6,16 +6,18 @@
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.8.1</Version>
<Version>1.8.6</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Serialization and deserialization helpers for various types</Description>
<Copyright>Copyright (c) Matt Nadareski 2019-2024</Copyright>
<Copyright>Copyright (c) Matt Nadareski 2019-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.Serialization</RepositoryUrl>
@@ -30,10 +32,11 @@
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.5.0" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.IO" Version="1.6.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.5" />
<PackageReference Include="SabreTools.ASN1" Version="1.5.1" />
<PackageReference Include="SabreTools.Compression" Version="0.6.3" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.1" />
<PackageReference Include="SabreTools.IO" Version="1.6.2" />
<PackageReference Include="SabreTools.Models" Version="1.5.8" />
</ItemGroup>
</Project>

View File

@@ -117,6 +117,7 @@ namespace SabreTools.Serialization.Serializers
writer.WriteRequiredAttributeString("size", file.Size, throwOnError: true);
writer.WriteOptionalAttributeString("date", file.Date);
writer.WriteRequiredAttributeString("crc", file.CRC?.ToUpperInvariant(), throwOnError: true);
writer.WriteRequiredAttributeString("sha1", file.SHA1?.ToUpperInvariant());
writer.WriteEndElement(); // file
}

View File

@@ -1,8 +1,10 @@
using System.IO;
using SabreTools.Compression.Deflate;
using SabreTools.Models.BFPK;
namespace SabreTools.Serialization.Wrappers
{
public class BFPK : WrapperBase<Models.BFPK.Archive>
public class BFPK : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -11,17 +13,24 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extension Properties
/// <inheritdoc cref="Archive.Files"/>
public FileEntry[] Files => Model.Files ?? [];
#endregion
#region Constructors
/// <inheritdoc/>
public BFPK(Models.BFPK.Archive? model, byte[]? data, int offset)
public BFPK(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public BFPK(Models.BFPK.Archive? model, Stream? data)
public BFPK(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
@@ -74,5 +83,96 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the BFPK to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the BFPK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= Files.Length)
return false;
// Get the file information
var file = Files[index];
if (file == null)
return false;
// Get the read index and length
int offset = file.Offset + 4;
int compressedSize = file.CompressedSize;
// Some files can lack the length prefix
if (compressedSize > GetEndOfFile())
{
offset -= 4;
compressedSize = file.UncompressedSize;
}
try
{
// Ensure the output directory exists
Directory.CreateDirectory(outputDirectory);
// Create the output path
string filePath = Path.Combine(outputDirectory, file.Name ?? $"file{index}");
using FileStream fs = File.OpenWrite(filePath);
// Read the data block
var data = ReadFromDataSource(offset, compressedSize);
if (data == null)
return false;
// If we have uncompressed data
if (compressedSize == file.UncompressedSize)
{
fs.Write(data, 0, compressedSize);
}
else
{
MemoryStream ms = new MemoryStream(data);
ZlibStream zs = new ZlibStream(ms, CompressionMode.Decompress);
zs.CopyTo(fs);
}
return true;
}
catch
{
return false;
}
}
#endregion
}
}

View File

@@ -0,0 +1,287 @@
using System.Collections.Generic;
using System.IO;
using SabreTools.Compression.Blast;
using SabreTools.Models.InstallShieldArchiveV3;
namespace SabreTools.Serialization.Wrappers
{
/// <remarks>
/// Reference (de)compressor: https://www.sac.sk/download/pack/icomp95.zip
/// </remarks>
/// <see href="https://github.com/wfr/unshieldv3"/>
public partial class InstallShieldArchiveV3 : WrapperBase<Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "InstallShield Archive V3";
#endregion
#region Extension Properties
/// <inheritdoc cref="Header.DirCount"/>
public ushort DirCount => Model.Header?.DirCount ?? 0;
/// <inheritdoc cref="Header.FileCount"/>
public ushort FileCount => Model.Header?.FileCount ?? 0;
/// <inheritdoc cref="Archive.Directories"/>
public Models.InstallShieldArchiveV3.Directory[] Directories => Model.Directories ?? [];
/// <inheritdoc cref="Archive.Files"/>
public Models.InstallShieldArchiveV3.File[] Files => Model.Files ?? [];
/// <summary>
/// Map of all files to their parent directories by index
/// </summary>
public Dictionary<int, int> FileDirMap
{
get
{
// Return the prebuilt map
if (_fileDirMap != null)
return _fileDirMap;
// Build the file map
_fileDirMap = [];
int fileId = 0;
for (int i = 0; i < Directories.Length; i++)
{
var dir = Directories[i];
for (int j = 0; j < dir.FileCount; j++)
{
_fileDirMap[fileId++] = i;
}
}
return _fileDirMap;
}
}
private Dictionary<int, int>? _fileDirMap = null;
/// <summary>
/// Map of all files found in the archive
/// </summary>
public Dictionary<string, Models.InstallShieldArchiveV3.File> FileNameMap
{
get
{
// Return the prebuilt map
if (_fileNameMap != null)
return _fileNameMap;
// Build the file map
_fileNameMap = [];
for (int fileIndex = 0; fileIndex < Files.Length; fileIndex++)
{
// Get the current file
var file = Files[fileIndex];
// Get the parent directory
int dirIndex = FileDirMap[fileIndex];
if (dirIndex < 0 || dirIndex >= DirCount)
continue;
// Create the filename
string filename = Path.Combine(
Directories[dirIndex]?.Name ?? $"dir_{dirIndex}",
file.Name ?? $"file_{fileIndex}"
);
// Add to the map
_fileNameMap[filename] = file;
}
return _fileNameMap;
}
}
private Dictionary<string, Models.InstallShieldArchiveV3.File>? _fileNameMap = null;
/// <summary>
/// Data offset for all archives
/// </summary>
private const uint DataStart = 255;
#endregion
#region Constructors
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an InstallShield Archive V3 from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A archive wrapper on success, null on failure</returns>
public static InstallShieldArchiveV3? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a InstallShield Archive V3 from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A archive wrapper on success, null on failure</returns>
public static InstallShieldArchiveV3? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var archive = Deserializers.InstallShieldArchiveV3.DeserializeStream(data);
if (archive == null)
return null;
return new InstallShieldArchiveV3(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the ISAv3 to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// Get the file count
int fileCount = Files.Length;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the ISAv3 to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If the files index is invalid
if (index < 0 || index >= FileCount)
return false;
// Get the file
var file = Files[index];
if (file == null)
return false;
// Create the filename
var filename = file.Name;
if (filename == null)
return false;
// Get the directory index
int dirIndex = FileDirMap[index];
if (dirIndex < 0 || dirIndex > DirCount)
return false;
// Get the directory name
var dirName = Directories[dirIndex].Name;
if (dirName != null)
filename = Path.Combine(dirName, filename);
// Get and adjust the file offset
long fileOffset = file.Offset + DataStart;
if (fileOffset < 0 || fileOffset >= Length)
return false;
// Get the file sizes
long fileSize = file.CompressedSize;
long outputFileSize = file.UncompressedSize;
// Read the compressed data directly
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Decompress the data
var decomp = Decompressor.Create();
var outData = new MemoryStream();
decomp.CopyTo(compressedData, outData);
data = outData.ToArray();
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = System.IO.File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
}
catch
{
return false;
}
return false;
}
#endregion
}
}

View File

@@ -0,0 +1,139 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZKWAJ : WrapperBase<KWAJFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, KWAJ variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (KWAJ variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (KWAJ variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
public static LZKWAJ? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (KWAJ variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (KWAJ variant)</param>
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
public static LZKWAJ? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZKWAJ.DeserializeStream(data);
if (file == null)
return null;
return new LZKWAJ(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - Model.Header!.DataOffset;
if (compressedSize < Model.Header.DataOffset)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(Model.Header.DataOffset, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateKWAJ(contents, Model.Header!.CompressionType);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = "tempfile";
if (Model.HeaderExtensions?.FileName != null)
filename = Model.HeaderExtensions.FileName;
if (Model.HeaderExtensions?.FileExtension != null)
filename += $".{Model.HeaderExtensions.FileExtension}";
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,134 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZQBasic : WrapperBase<QBasicFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, QBasic variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZQBasic(QBasicFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZQBasic(QBasicFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (QBasic variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (QBasic variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
public static LZQBasic? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (QBasic variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (QBasic variant)</param>
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
public static LZQBasic? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZQBasic.DeserializeStream(data);
if (file == null)
return null;
return new LZQBasic(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="filename">Original filename to use as a base</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - 12;
if (compressedSize < 12)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(12, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateQBasic(contents);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = Path.Combine(outputDirectory, "tempfile.bin");
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,166 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZSZDD : WrapperBase<SZDDFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, SZDD variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZSZDD(SZDDFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZSZDD(SZDDFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (SZDD variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (SZDD variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
public static LZSZDD? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (SZDD variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (SZDD variant)</param>
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
public static LZSZDD? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZSZDD.DeserializeStream(data);
if (file == null)
return null;
return new LZSZDD(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="filename">Original filename to use as a base</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string filename, string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - 14;
if (compressedSize < 14)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(14, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateSZDD(contents);
if (decompressor == null)
return false;
// Create the output file
filename = GetExpandedName(filename).TrimEnd('\0');
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
/// <summary>
/// Get the full name of the input file
/// </summary>
private string GetExpandedName(string input)
{
// If the extension is missing
string extension = Path.GetExtension(input).TrimStart('.');
if (string.IsNullOrEmpty(extension))
return Path.GetFileNameWithoutExtension(input);
// If the extension is a single character
if (extension.Length == 1)
{
if (extension == "_" || extension == "$")
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(Model.Header!.LastChar)}";
return Path.GetFileNameWithoutExtension(input);
}
// If the extension isn't formatted
if (!extension.EndsWith("_"))
return Path.GetFileNameWithoutExtension(input);
// Handle replacing characters
char c = (char.IsUpper(input[0]) ? char.ToLower(Model.Header!.LastChar) : char.ToUpper(Model.Header!.LastChar));
string text2 = extension.Substring(0, extension.Length - 1) + c;
return Path.GetFileNameWithoutExtension(input) + "." + text2;
}
#endregion
}
}

View File

@@ -376,8 +376,11 @@ namespace SabreTools.Serialization.Wrappers
return _overlayStrings;
}
// TODO: Revisit the 16 MiB limit
// Cap the check for overlay strings to 16 MiB (arbitrary)
int overlayLength = Math.Min(endOfFile - endOfSectionData, 16 * 1024 * 1024);
// Otherwise, cache and return the strings
int overlayLength = endOfFile - endOfSectionData;
_overlayStrings = ReadStringsFromDataSource(endOfSectionData, overlayLength, charLimit: 3);
return _overlayStrings;
}
@@ -1028,7 +1031,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Read the signature
int offset = 0;
uint signature = entryData.ReadUInt32(ref offset);
uint signature = entryData.ReadUInt32LittleEndian(ref offset);
// Reset the offset
offset = 0;
@@ -1036,7 +1039,7 @@ namespace SabreTools.Serialization.Wrappers
// NB10
if (signature == 0x3031424E)
{
var nb10ProgramDatabase = entryData.AsNB10ProgramDatabase(ref offset);
var nb10ProgramDatabase = entryData.ParseNB10ProgramDatabase(ref offset);
if (nb10ProgramDatabase != null)
{
_debugData[i] = nb10ProgramDatabase;
@@ -1047,7 +1050,7 @@ namespace SabreTools.Serialization.Wrappers
// RSDS
else if (signature == 0x53445352)
{
var rsdsProgramDatabase = entryData.AsRSDSProgramDatabase(ref offset);
var rsdsProgramDatabase = entryData.ParseRSDSProgramDatabase(ref offset);
if (rsdsProgramDatabase != null)
{
_debugData[i] = rsdsProgramDatabase;

View File

@@ -1,8 +1,9 @@
using System.IO;
using SabreTools.Models.Quantum;
namespace SabreTools.Serialization.Wrappers
{
public class Quantum : WrapperBase<Models.Quantum.Archive>
public class Quantum : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -14,14 +15,14 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public Quantum(Models.Quantum.Archive? model, byte[]? data, int offset)
public Quantum(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public Quantum(Models.Quantum.Archive? model, Stream? data)
public Quantum(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
@@ -74,5 +75,130 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extension Properties
/// <inheritdoc cref="Archive.CompressedDataOffset"/>
public long CompressedDataOffset => Model.CompressedDataOffset;
/// <inheritdoc cref="Header.FileCount"/>
public ushort FileCount => Model.Header?.FileCount ?? 0;
/// <inheritdoc cref="Archive.FileList"/>
public FileDescriptor[] FileList => Model.FileList ?? [];
#endregion
#region Extraction
/// <summary>
/// Extract all files from the Quantum archive to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (FileList == null || FileList.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < FileList.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the Quantum archive to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no files
if (Model.Header == null || FileCount == 0 || FileList == null || FileList.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= FileList.Length)
return false;
// Get the file information
var fileDescriptor = FileList[index];
// Read the entire compressed data
int compressedDataOffset = (int)CompressedDataOffset;
int compressedDataLength = GetEndOfFile() - compressedDataOffset;
var compressedData = ReadFromDataSource(compressedDataOffset, compressedDataLength);
// TODO: Figure out decompression
// - Single-file archives seem to work
// - Single-file archives with files that span a window boundary seem to work
// - The first files in each archive seem to work
return false;
// // Setup the decompression state
// State state = new State();
// Decompressor.InitState(state, TableSize, CompressionFlags);
// // Decompress the entire array
// int decompressedDataLength = (int)FileList.Sum(fd => fd.ExpandedFileSize);
// byte[] decompressedData = new byte[decompressedDataLength];
// Decompressor.Decompress(state, compressedData.Length, compressedData, decompressedData.Length, decompressedData);
// // Read the data
// int offset = (int)FileList.Take(index).Sum(fd => fd.ExpandedFileSize);
// byte[] data = new byte[fileDescriptor.ExpandedFileSize];
// Array.Copy(decompressedData, offset, data, 0, data.Length);
// // Loop through all files before the current
// for (int i = 0; i < index; i++)
// {
// // Decompress the next block of data
// byte[] tempData = new byte[FileList[i].ExpandedFileSize];
// int lastRead = Decompressor.Decompress(state, compressedData.Length, compressedData, tempData.Length, tempData);
// compressedData = new ReadOnlySpan<byte>(compressedData, (lastRead), compressedData.Length - (lastRead)).ToArray();
// }
// // Read the data
// byte[] data = new byte[fileDescriptor.ExpandedFileSize];
// _ = Decompressor.Decompress(state, compressedData.Length, compressedData, data.Length, data);
// // Create the filename
// string filename = fileDescriptor.FileName;
// // If we have an invalid output directory
// if (string.IsNullOrEmpty(outputDirectory))
// return false;
// // Create the full output path
// filename = Path.Combine(outputDirectory, filename);
// // Ensure the output directory is created
// Directory.CreateDirectory(Path.GetDirectoryName(filename));
// // Try to write the data
// try
// {
// // Open the output file for writing
// using (Stream fs = File.OpenWrite(filename))
// {
// fs.Write(data, 0, data.Length);
// }
// }
// catch
// {
// return false;
// }
// return true;
}
#endregion
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.Compression.zlib;
using SabreTools.Models.SGA;
namespace SabreTools.Serialization.Wrappers
@@ -120,6 +122,148 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extraction
/// <summary>
/// Extract all files from the SGA to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// Get the file count
int fileCount = FileCount;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the SGA to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// Get the file count
int fileCount = FileCount;
if (fileCount == 0)
return false;
// If the files index is invalid
if (index < 0 || index >= fileCount)
return false;
// Create the filename
var filename = GetFileName(index);
if (filename == null)
return false;
// Loop through and get all parent directories
var parentNames = new List<string> { filename };
// Get the parent directory
string? folderName = GetParentName(index);
if (folderName != null)
parentNames.Add(folderName);
// TODO: Should the section name/alias be used in the path as well?
// Reverse and assemble the filename
parentNames.Reverse();
#if NET20 || NET35
filename = parentNames[0];
for (int i = 1; i < parentNames.Count; i++)
{
filename = Path.Combine(filename, parentNames[i]);
}
#else
filename = Path.Combine([.. parentNames]);
#endif
// Get and adjust the file offset
long fileOffset = GetFileOffset(index);
fileOffset += FileDataOffset;
if (fileOffset < 0)
return false;
// Get the file sizes
long fileSize = GetCompressedSize(index);
long outputFileSize = GetUncompressedSize(index);
// Read the compressed data directly
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Inflate the data into the buffer
var zstream = new ZLib.z_stream_s();
data = new byte[outputFileSize];
unsafe
{
fixed (byte* payloadPtr = compressedData)
fixed (byte* dataPtr = data)
{
zstream.next_in = payloadPtr;
zstream.avail_in = (uint)compressedData.Length;
zstream.total_in = (uint)compressedData.Length;
zstream.next_out = dataPtr;
zstream.avail_out = (uint)data.Length;
zstream.total_out = 0;
ZLib.inflateInit_(zstream, ZLib.zlibVersion(), compressedData.Length);
int zret = ZLib.inflate(zstream, 1);
ZLib.inflateEnd(zstream);
}
}
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = System.IO.File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
}
catch
{
return false;
}
return false;
}
#endregion
#region File
/// <summary>

View File

@@ -45,6 +45,24 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
public T Model { get; private set; }
/// <summary>
/// Length of the underlying data
/// </summary>
public long Length
{
get
{
return _dataSource switch
{
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
DataSource.Stream => _streamData!.Length,
// Everything else is invalid
_ => -1,
};
}
}
#endregion
#region Instance Variables
@@ -202,27 +220,36 @@ namespace SabreTools.Serialization.Wrappers
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
try
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
}
}
return sectionData;
case DataSource.Stream:
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;
}
catch
{
// Absorb the error
return null;
}
}
/// <summary>
@@ -368,7 +395,7 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion

View File

@@ -28,11 +28,13 @@ namespace SabreTools.Serialization.Wrappers
WrapperType.GCF => GCF.Create(data),
WrapperType.GZIP => null,// TODO: Implement wrapper
WrapperType.IniFile => null,// TODO: Implement wrapper
WrapperType.InstallShieldArchiveV3 => null,// TODO: Implement wrapper
WrapperType.InstallShieldArchiveV3 => InstallShieldArchiveV3.Create(data),
WrapperType.InstallShieldCAB => InstallShieldCabinet.Create(data),
WrapperType.LDSCRYPT => null,// TODO: Implement wrapper
WrapperType.LZKWAJ => LZKWAJ.Create(data),
WrapperType.LZQBasic => LZQBasic.Create(data),
WrapperType.LZSZDD => LZSZDD.Create(data),
WrapperType.MicrosoftCAB => MicrosoftCabinet.Create(data),
WrapperType.MicrosoftLZ => null,// TODO: Implement wrapper
WrapperType.MoPaQ => MoPaQ.Create(data),
WrapperType.N3DS => N3DS.Create(data),
WrapperType.NCF => NCF.Create(data),
@@ -330,6 +332,19 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region LZ
if (magic.StartsWith([0x4B, 0x57, 0x41, 0x4A, 0x88, 0xF0, 0x27, 0xD1]))
return WrapperType.LZKWAJ;
if (magic.StartsWith([0x53, 0x5A, 0x20, 0x88, 0xF0, 0x27, 0x33, 0xD1]))
return WrapperType.LZQBasic;
if (magic.StartsWith([0x53, 0x5A, 0x44, 0x44, 0x88, 0xF0, 0x27, 0x33]))
return WrapperType.LZSZDD;
#endregion
#region MicrosoftCAB
if (magic.StartsWith([0x4d, 0x53, 0x43, 0x46]))
@@ -339,13 +354,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region MicrosoftLZ
if (magic.StartsWith([0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33]))
return WrapperType.MicrosoftLZ;
#endregion
#region MoPaQ
if (magic.StartsWith([0x4d, 0x50, 0x51, 0x1a]))

View File

@@ -77,7 +77,6 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// InstallShield archive v3
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
InstallShieldArchiveV3,
/// <summary>
@@ -91,17 +90,26 @@ namespace SabreTools.Serialization.Wrappers
/// <remarks>Currently has no IWrapper implementation</remarks>
LDSCRYPT,
/// <summary>
/// LZ-compressed file, KWAJ variant
/// </summary>
LZKWAJ,
/// <summary>
/// LZ-compressed file, QBasic variant
/// </summary>
LZQBasic,
/// <summary>
/// LZ-compressed file, SZDD variant
/// </summary>
LZSZDD,
/// <summary>
/// Microsoft cabinet file
/// </summary>
MicrosoftCAB,
/// <summary>
/// Microsoft LZ-compressed file
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
MicrosoftLZ,
/// <summary>
/// MPQ game data archive
/// </summary>

View File

@@ -10,13 +10,17 @@
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "uba" OPTION; do
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
@@ -39,6 +43,7 @@ COMMIT=$(git log --pretty=%H -1)
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
@@ -91,14 +96,14 @@ if [ $NO_BUILD = false ]; then
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if building all
if [ $USE_ALL = true ]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if building all
if [ $USE_ALL = true ]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
@@ -131,8 +136,8 @@ if [ $NO_ARCHIVE = false ]; then
fi
fi
# Only include Debug if building all
if [ $USE_ALL = true ]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/InfoPrint/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/InfoPrint_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi

View File

@@ -12,6 +12,10 @@ param(
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
@@ -30,6 +34,7 @@ $COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
@@ -78,15 +83,15 @@ if (!$NO_BUILD.IsPresent) {
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
@@ -115,8 +120,8 @@ if (!$NO_ARCHIVE.IsPresent) {
continue
}
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\InfoPrint\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\InfoPrint_${FRAMEWORK}_${RUNTIME}_debug.zip *
}