mirror of
https://github.com/SabreTools/SabreTools.Serialization.git
synced 2026-02-04 05:36:12 +00:00
Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0bda1f4f88 | ||
|
|
7d50e0e1c5 | ||
|
|
224a4caab0 | ||
|
|
b4689da404 | ||
|
|
af66657399 | ||
|
|
0f3e2d8275 | ||
|
|
d664b6defc | ||
|
|
adbf74a6e0 | ||
|
|
7eb401efed | ||
|
|
ba97381b99 | ||
|
|
3de92de225 | ||
|
|
01a195c8aa | ||
|
|
12d43ef68a | ||
|
|
0df806a6d1 | ||
|
|
f8c713b260 | ||
|
|
4d0122f97c |
@@ -1,4 +1,4 @@
|
||||
name: Nuget Pack
|
||||
name: Build and Test
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -18,29 +18,23 @@ jobs:
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build library
|
||||
run: dotnet build
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test
|
||||
|
||||
- name: Pack
|
||||
run: dotnet pack
|
||||
- name: Run publish script
|
||||
run: ./publish-nix.sh -d
|
||||
|
||||
- name: Upload build
|
||||
- name: Upload package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: 'Nuget Package'
|
||||
path: 'SabreTools.Serialization/bin/Release/*.nupkg'
|
||||
path: '*.nupkg'
|
||||
|
||||
- name: Upload to rolling
|
||||
uses: ncipollo/release-action@v1.14.0
|
||||
with:
|
||||
allowUpdates: True
|
||||
artifacts: 'SabreTools.Serialization/bin/Release/*.nupkg'
|
||||
artifacts: "*.nupkg,*.zip"
|
||||
body: 'Last built commit: ${{ github.sha }}'
|
||||
name: 'Rolling Release'
|
||||
prerelease: True
|
||||
55
.github/workflows/build_test.yml
vendored
55
.github/workflows/build_test.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Build InfoPrint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
project: [InfoPrint]
|
||||
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
|
||||
framework: [net9.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0, net9.0]
|
||||
conf: [Debug] #[Release, Debug]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build
|
||||
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8') || startsWith(matrix.framework, 'net9')) && '-p:PublishSingleFile=true' || ''}}
|
||||
|
||||
- name: Archive build
|
||||
run: |
|
||||
cd ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
|
||||
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
|
||||
|
||||
- name: Upload build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
|
||||
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
|
||||
|
||||
- name: Upload to rolling
|
||||
uses: ncipollo/release-action@v1.14.0
|
||||
with:
|
||||
allowUpdates: True
|
||||
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
|
||||
body: 'Last built commit: ${{ github.sha }}'
|
||||
name: 'Rolling Release'
|
||||
prerelease: True
|
||||
replacesArtifacts: True
|
||||
tag: "rolling"
|
||||
updateOnlyUnreleased: True
|
||||
@@ -9,7 +9,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.8.1</Version>
|
||||
<Version>1.8.3</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Support All Frameworks -->
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# SabreTools.Serialization
|
||||
|
||||
[](https://github.com/SabreTools/SabreTools.Serialization/actions/workflows/build_and_test.yml)
|
||||
|
||||
This library comprises of serializers that both read and write from files and streams to the dedicated models as well as convert to and from the common internal models. This library is partially used by the current parsing and writing code but none of the internal model serialization is used.
|
||||
|
||||
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.Serialization).
|
||||
|
||||
@@ -49,6 +49,7 @@ namespace SabreTools.Serialization.Test.CrossModel
|
||||
Name = "XXXXXX",
|
||||
Size = "XXXXXX",
|
||||
CRC = "XXXXXX",
|
||||
SHA1 = "XXXXXX",
|
||||
Date = "XXXXXX",
|
||||
};
|
||||
|
||||
@@ -102,6 +103,7 @@ namespace SabreTools.Serialization.Test.CrossModel
|
||||
Assert.Equal("XXXXXX", rom.Name);
|
||||
Assert.Equal("XXXXXX", rom.Size);
|
||||
Assert.Equal("XXXXXX", rom.CRC);
|
||||
Assert.Equal("XXXXXX", rom.SHA1);
|
||||
Assert.Equal("XXXXXX", rom.Date);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,6 +117,7 @@ namespace SabreTools.Serialization.Test.Deserializers
|
||||
Name = "XXXXXX",
|
||||
Size = "XXXXXX",
|
||||
CRC = "XXXXXX",
|
||||
SHA1 = "XXXXXX",
|
||||
Date = "XXXXXX XXXXXX",
|
||||
};
|
||||
|
||||
@@ -170,6 +171,7 @@ namespace SabreTools.Serialization.Test.Deserializers
|
||||
Assert.Equal("XXXXXX", rom.Name);
|
||||
Assert.Equal("XXXXXX", rom.Size);
|
||||
Assert.Equal("XXXXXX", rom.CRC);
|
||||
Assert.Equal("XXXXXX", rom.SHA1);
|
||||
Assert.Equal("XXXXXX XXXXXX", rom.Date);
|
||||
}
|
||||
}
|
||||
|
||||
73
SabreTools.Serialization.Test/Deserializers/LZKWAJTests.cs
Normal file
73
SabreTools.Serialization.Test/Deserializers/LZKWAJTests.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Deserializers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Deserializers
|
||||
{
|
||||
public class LZKWAJTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var deserializer = new LZKWAJ();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
74
SabreTools.Serialization.Test/Deserializers/LZQBasicTests.cs
Normal file
74
SabreTools.Serialization.Test/Deserializers/LZQBasicTests.cs
Normal file
@@ -0,0 +1,74 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Deserializers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Deserializers
|
||||
{
|
||||
public class LZQBasicTests
|
||||
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var deserializer = new LZQBasic();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
73
SabreTools.Serialization.Test/Deserializers/LZSZDDTests.cs
Normal file
73
SabreTools.Serialization.Test/Deserializers/LZSZDDTests.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Deserializers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Deserializers
|
||||
{
|
||||
public class LZSZDDTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var deserializer = new LZSZDD();
|
||||
|
||||
var actual = deserializer.Deserialize(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -28,7 +28,7 @@
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.5" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.7" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Wrappers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Wrappers
|
||||
{
|
||||
public class InstallShieldArchiveV3Tests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var actual = InstallShieldArchiveV3.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var actual = InstallShieldArchiveV3.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var actual = InstallShieldArchiveV3.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var actual = InstallShieldArchiveV3.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var actual = InstallShieldArchiveV3.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var actual = InstallShieldArchiveV3.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
61
SabreTools.Serialization.Test/Wrappers/LZKWAJTests.cs
Normal file
61
SabreTools.Serialization.Test/Wrappers/LZKWAJTests.cs
Normal file
@@ -0,0 +1,61 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Wrappers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Wrappers
|
||||
{
|
||||
public class LZKWAJTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var actual = LZKWAJ.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var actual = LZKWAJ.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var actual = LZKWAJ.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var actual = LZKWAJ.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var actual = LZKWAJ.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var actual = LZKWAJ.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
61
SabreTools.Serialization.Test/Wrappers/LZQBasicTests.cs
Normal file
61
SabreTools.Serialization.Test/Wrappers/LZQBasicTests.cs
Normal file
@@ -0,0 +1,61 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Wrappers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Wrappers
|
||||
{
|
||||
public class LZQBasicTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var actual = LZQBasic.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var actual = LZQBasic.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var actual = LZQBasic.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var actual = LZQBasic.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var actual = LZQBasic.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var actual = LZQBasic.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
61
SabreTools.Serialization.Test/Wrappers/LZSZDDTests.cs
Normal file
61
SabreTools.Serialization.Test/Wrappers/LZSZDDTests.cs
Normal file
@@ -0,0 +1,61 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Serialization.Wrappers;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.Serialization.Test.Wrappers
|
||||
{
|
||||
public class LZSZDDTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullArray_Null()
|
||||
{
|
||||
byte[]? data = null;
|
||||
int offset = 0;
|
||||
var actual = LZSZDD.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArray_Null()
|
||||
{
|
||||
byte[]? data = [];
|
||||
int offset = 0;
|
||||
var actual = LZSZDD.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidArray_Null()
|
||||
{
|
||||
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
|
||||
int offset = 0;
|
||||
var actual = LZSZDD.Create(data, offset);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullStream_Null()
|
||||
{
|
||||
Stream? data = null;
|
||||
var actual = LZSZDD.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([]);
|
||||
var actual = LZSZDD.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidStream_Null()
|
||||
{
|
||||
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
|
||||
var actual = LZSZDD.Create(data);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -70,6 +70,7 @@ namespace SabreTools.Serialization.CrossModel
|
||||
Name = item.ReadString(Models.Metadata.Rom.NameKey),
|
||||
Size = item.ReadString(Models.Metadata.Rom.SizeKey),
|
||||
CRC = item.ReadString(Models.Metadata.Rom.CRCKey),
|
||||
SHA1 = item.ReadString(Models.Metadata.Rom.SHA1Key),
|
||||
Date = item.ReadString(Models.Metadata.Rom.DateKey),
|
||||
};
|
||||
return file;
|
||||
|
||||
@@ -73,6 +73,7 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Rom.NameKey] = item.Name,
|
||||
[Models.Metadata.Rom.SizeKey] = item.Size,
|
||||
[Models.Metadata.Rom.CRCKey] = item.CRC,
|
||||
[Models.Metadata.Rom.SHA1Key] = item.SHA1,
|
||||
[Models.Metadata.Rom.DateKey] = item.Date,
|
||||
};
|
||||
return rom;
|
||||
|
||||
@@ -179,7 +179,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
}
|
||||
|
||||
// If we're in the doscenter block
|
||||
// If we're in the clrmamepro block
|
||||
else if (reader.TopLevel == "clrmamepro"
|
||||
&& reader.RowType == CmpRowType.Standalone)
|
||||
{
|
||||
|
||||
@@ -172,6 +172,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "crc":
|
||||
file.CRC = kvp.Value;
|
||||
break;
|
||||
case "sha1":
|
||||
file.SHA1 = kvp.Value;
|
||||
break;
|
||||
case "date":
|
||||
file.Date = kvp.Value;
|
||||
break;
|
||||
|
||||
121
SabreTools.Serialization/Deserializers/LZKWAJ.cs
Normal file
121
SabreTools.Serialization/Deserializers/LZKWAJ.cs
Normal file
@@ -0,0 +1,121 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.LZ;
|
||||
using static SabreTools.Models.LZ.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class LZKWAJ : BaseBinaryDeserializer<KWAJFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override KWAJFile? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new file to fill
|
||||
var file = new KWAJFile();
|
||||
|
||||
#region File Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extended Header
|
||||
|
||||
if (header.HeaderFlags != 0)
|
||||
{
|
||||
var extensions = new KWAJHeaderExtensions();
|
||||
|
||||
#if NET20 || NET35
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasDecompressedLength) != 0)
|
||||
extensions.DecompressedLength = data.ReadUInt32();
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasUnknownFlag) != 0)
|
||||
extensions.UnknownPurpose = data.ReadUInt16();
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
|
||||
{
|
||||
extensions.UnknownDataLength = data.ReadUInt16();
|
||||
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
|
||||
}
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileName) != 0)
|
||||
extensions.FileName = data.ReadNullTerminatedAnsiString();
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileExtension) != 0)
|
||||
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
|
||||
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
|
||||
{
|
||||
extensions.ArbitraryTextLength = data.ReadUInt16();
|
||||
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
|
||||
}
|
||||
#else
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasDecompressedLength))
|
||||
extensions.DecompressedLength = data.ReadUInt32();
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasUnknownFlag))
|
||||
extensions.UnknownPurpose = data.ReadUInt16();
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
|
||||
{
|
||||
extensions.UnknownDataLength = data.ReadUInt16();
|
||||
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
|
||||
}
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileName))
|
||||
extensions.FileName = data.ReadNullTerminatedAnsiString();
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileExtension))
|
||||
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
|
||||
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
|
||||
{
|
||||
extensions.ArbitraryTextLength = data.ReadUInt16();
|
||||
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
|
||||
}
|
||||
#endif
|
||||
|
||||
file.HeaderExtensions = extensions;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore the actual error
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static KWAJHeader? ParseHeader(Stream data)
|
||||
{
|
||||
var header = new KWAJHeader();
|
||||
|
||||
header.Magic = data.ReadBytes(8);
|
||||
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(KWAJSignatureBytes))
|
||||
return null;
|
||||
|
||||
header.CompressionType = (KWAJCompressionType)data.ReadUInt16();
|
||||
if (header.CompressionType > KWAJCompressionType.MSZIP)
|
||||
return null;
|
||||
|
||||
header.DataOffset = data.ReadUInt16();
|
||||
header.HeaderFlags = (KWAJHeaderFlags)data.ReadUInt16();
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
||||
}
|
||||
65
SabreTools.Serialization/Deserializers/LZQBasic.cs
Normal file
65
SabreTools.Serialization/Deserializers/LZQBasic.cs
Normal file
@@ -0,0 +1,65 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.LZ;
|
||||
using static SabreTools.Models.LZ.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class LZQBasic : BaseBinaryDeserializer<QBasicFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override QBasicFile? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new file to fill
|
||||
var file = new QBasicFile();
|
||||
|
||||
#region File Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore the actual error
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static QBasicHeader? ParseHeader(Stream data)
|
||||
{
|
||||
var header = new QBasicHeader();
|
||||
|
||||
header.Magic = data.ReadBytes(8);
|
||||
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(QBasicSignatureBytes))
|
||||
return null;
|
||||
|
||||
header.RealLength = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
||||
}
|
||||
70
SabreTools.Serialization/Deserializers/LZSZDD.cs
Normal file
70
SabreTools.Serialization/Deserializers/LZSZDD.cs
Normal file
@@ -0,0 +1,70 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.LZ;
|
||||
using static SabreTools.Models.LZ.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class LZSZDD : BaseBinaryDeserializer<SZDDFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override SZDDFile? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new file to fill
|
||||
var file = new SZDDFile();
|
||||
|
||||
#region File Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore the actual error
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static SZDDHeader? ParseHeader(Stream data)
|
||||
{
|
||||
var header = new SZDDHeader();
|
||||
|
||||
header.Magic = data.ReadBytes(8);
|
||||
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(SZDDSignatureBytes))
|
||||
return null;
|
||||
|
||||
header.CompressionType = (ExpandCompressionType)data.ReadByteValue();
|
||||
if (header.CompressionType != ExpandCompressionType.A)
|
||||
return null;
|
||||
|
||||
header.LastChar = (char)data.ReadByteValue();
|
||||
header.RealLength = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -42,9 +42,13 @@ namespace SabreTools.Serialization
|
||||
Wrapper.CHD item => item.PrettyPrint(),
|
||||
Wrapper.CIA item => item.PrettyPrint(),
|
||||
Wrapper.GCF item => item.PrettyPrint(),
|
||||
Wrapper.InstallShieldArchiveV3 item => item.PrettyPrint(),
|
||||
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
|
||||
Wrapper.IRD item => item.PrettyPrint(),
|
||||
Wrapper.LinearExecutable item => item.PrettyPrint(),
|
||||
Wrapper.LZKWAJ item => item.PrettyPrint(),
|
||||
Wrapper.LZQBasic item => item.PrettyPrint(),
|
||||
Wrapper.LZSZDD item => item.PrettyPrint(),
|
||||
Wrapper.MicrosoftCabinet item => item.PrettyPrint(),
|
||||
Wrapper.MoPaQ item => item.PrettyPrint(),
|
||||
Wrapper.MSDOS item => item.PrettyPrint(),
|
||||
@@ -87,9 +91,13 @@ namespace SabreTools.Serialization
|
||||
Wrapper.CHD item => item.ExportJSON(),
|
||||
Wrapper.CIA item => item.ExportJSON(),
|
||||
Wrapper.GCF item => item.ExportJSON(),
|
||||
Wrapper.InstallShieldArchiveV3 item => item.ExportJSON(),
|
||||
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
|
||||
Wrapper.IRD item => item.ExportJSON(),
|
||||
Wrapper.LinearExecutable item => item.ExportJSON(),
|
||||
Wrapper.LZKWAJ item => item.ExportJSON(),
|
||||
Wrapper.LZQBasic item => item.ExportJSON(),
|
||||
Wrapper.LZSZDD item => item.ExportJSON(),
|
||||
Wrapper.MicrosoftCabinet item => item.ExportJSON(),
|
||||
Wrapper.MoPaQ item => item.ExportJSON(),
|
||||
Wrapper.MSDOS item => item.ExportJSON(),
|
||||
@@ -199,6 +207,16 @@ namespace SabreTools.Serialization
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.InstallShieldArchiveV3 item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
InstallShieldArchiveV3.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
@@ -229,6 +247,36 @@ namespace SabreTools.Serialization
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.LZKWAJ item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
LZKWAJ.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.LZQBasic item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
LZQBasic.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.LZSZDD item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
LZSZDD.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
|
||||
114
SabreTools.Serialization/Printers/InstallShieldArchiveV3.cs
Normal file
114
SabreTools.Serialization/Printers/InstallShieldArchiveV3.cs
Normal file
@@ -0,0 +1,114 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.InstallShieldArchiveV3;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class InstallShieldArchiveV3 : IPrinter<Archive>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, Archive model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, Archive archive)
|
||||
{
|
||||
builder.AppendLine("InstallShield Archive V3 Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, archive.Header);
|
||||
Print(builder, archive.Directories);
|
||||
Print(builder, archive.Files);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Signature1, " Signature 1");
|
||||
builder.AppendLine(header.Signature2, " Signature 2");
|
||||
builder.AppendLine(header.Reserved0, " Reserved 0");
|
||||
builder.AppendLine(header.IsMultivolume, " Is multivolume");
|
||||
builder.AppendLine(header.FileCount, " File count");
|
||||
builder.AppendLine(header.DateTime, " Datetime");
|
||||
builder.AppendLine(header.CompressedSize, " Compressed size");
|
||||
builder.AppendLine(header.UncompressedSize, " Uncompressed size");
|
||||
builder.AppendLine(header.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(header.VolumeTotal, " Volume total");
|
||||
builder.AppendLine(header.VolumeNumber, " Volume number");
|
||||
builder.AppendLine(header.Reserved2, " Reserved 2");
|
||||
builder.AppendLine(header.SplitBeginAddress, " Split begin address");
|
||||
builder.AppendLine(header.SplitEndAddress, " Split end address");
|
||||
builder.AppendLine(header.TocAddress, " TOC address");
|
||||
builder.AppendLine(header.Reserved3, " Reserved 3");
|
||||
builder.AppendLine(header.DirCount, " Dir count");
|
||||
builder.AppendLine(header.Reserved4, " Reserved 4");
|
||||
builder.AppendLine(header.Reserved5, " Reserved 5");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Directory[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Directories:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No directories");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Directory {i}");
|
||||
builder.AppendLine(entry.FileCount, " File count");
|
||||
builder.AppendLine(entry.ChunkSize, " Chunk size");
|
||||
builder.AppendLine(entry.NameLength, " Name length");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, File[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Files:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No files");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" File {i}");
|
||||
builder.AppendLine(entry.VolumeEnd, " Volume end");
|
||||
builder.AppendLine(entry.Index, " Index");
|
||||
builder.AppendLine(entry.UncompressedSize, " Uncompressed size");
|
||||
builder.AppendLine(entry.CompressedSize, " Compressed size");
|
||||
builder.AppendLine(entry.Offset, " Offset");
|
||||
builder.AppendLine(entry.DateTime, " Datetime");
|
||||
builder.AppendLine(entry.Reserved0, " Reserved 0");
|
||||
builder.AppendLine(entry.ChunkSize, " Chunk size");
|
||||
builder.AppendLine($" Attrib: {entry.Attrib} (0x{entry.Attrib:X})");
|
||||
builder.AppendLine(entry.IsSplit, " Is split");
|
||||
builder.AppendLine(entry.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(entry.VolumeStart, " Volume start");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
63
SabreTools.Serialization/Printers/LZKWAJ.cs
Normal file
63
SabreTools.Serialization/Printers/LZKWAJ.cs
Normal file
@@ -0,0 +1,63 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.LZ;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class LZKWAJ : IPrinter<KWAJFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, KWAJFile model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, KWAJFile file)
|
||||
{
|
||||
builder.AppendLine("LZ-compressed File, KWAJ Variant Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.HeaderExtensions);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, KWAJHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Magic, " Magic number");
|
||||
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
|
||||
builder.AppendLine(header.DataOffset, " Data offset");
|
||||
builder.AppendLine($" Header flags: {header.HeaderFlags} (0x{header.HeaderFlags:X})");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, KWAJHeaderExtensions? header)
|
||||
{
|
||||
builder.AppendLine(" Header Extensions Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header extensions");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.DecompressedLength, " Decompressed length");
|
||||
builder.AppendLine(header.UnknownPurpose, " Unknown purpose");
|
||||
builder.AppendLine(header.UnknownDataLength, " Unknown data length");
|
||||
builder.AppendLine(header.UnknownData, " Unknown data");
|
||||
builder.AppendLine(header.FileName, " File name");
|
||||
builder.AppendLine(header.FileExtension, " File extension");
|
||||
builder.AppendLine(header.ArbitraryTextLength, " Arbitrary text length");
|
||||
builder.AppendLine(header.ArbitraryText, " Arbitrary text");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
38
SabreTools.Serialization/Printers/LZQBasic.cs
Normal file
38
SabreTools.Serialization/Printers/LZQBasic.cs
Normal file
@@ -0,0 +1,38 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.LZ;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class LZQBasic : IPrinter<QBasicFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, QBasicFile model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, QBasicFile file)
|
||||
{
|
||||
builder.AppendLine("LZ-compressed File, QBasic Variant Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, QBasicHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Magic, " Magic number");
|
||||
builder.AppendLine(header.RealLength, " Real length");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
40
SabreTools.Serialization/Printers/LZSZDD.cs
Normal file
40
SabreTools.Serialization/Printers/LZSZDD.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.LZ;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class LZSZDD : IPrinter<SZDDFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, SZDDFile model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, SZDDFile file)
|
||||
{
|
||||
builder.AppendLine("LZ-compressed File, SZDD Variant Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, SZDDHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Magic, " Magic number");
|
||||
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
|
||||
builder.AppendLine(header.LastChar, " Last char");
|
||||
builder.AppendLine(header.RealLength, " Real length");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.8.1</Version>
|
||||
<Version>1.8.3</Version>
|
||||
|
||||
<!-- Package Properties -->
|
||||
<Authors>Matt Nadareski</Authors>
|
||||
@@ -31,9 +31,10 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="SabreTools.ASN1" Version="1.5.0" />
|
||||
<PackageReference Include="SabreTools.Compression" Version="0.6.2" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.6.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.5" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.7" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -117,6 +117,7 @@ namespace SabreTools.Serialization.Serializers
|
||||
writer.WriteRequiredAttributeString("size", file.Size, throwOnError: true);
|
||||
writer.WriteOptionalAttributeString("date", file.Date);
|
||||
writer.WriteRequiredAttributeString("crc", file.CRC?.ToUpperInvariant(), throwOnError: true);
|
||||
writer.WriteRequiredAttributeString("sha1", file.SHA1?.ToUpperInvariant());
|
||||
|
||||
writer.WriteEndElement(); // file
|
||||
}
|
||||
|
||||
287
SabreTools.Serialization/Wrappers/InstallShieldArchiveV3.cs
Normal file
287
SabreTools.Serialization/Wrappers/InstallShieldArchiveV3.cs
Normal file
@@ -0,0 +1,287 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.Compression.Blast;
|
||||
using SabreTools.Models.InstallShieldArchiveV3;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
/// <remarks>
|
||||
/// Reference (de)compressor: https://www.sac.sk/download/pack/icomp95.zip
|
||||
/// </remarks>
|
||||
/// <see href="https://github.com/wfr/unshieldv3"/>
|
||||
public partial class InstallShieldArchiveV3 : WrapperBase<Archive>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string DescriptionString => "InstallShield Archive V3";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extension Properties
|
||||
|
||||
/// <inheritdoc cref="Header.DirCount"/>
|
||||
public ushort DirCount => Model.Header?.DirCount ?? 0;
|
||||
|
||||
/// <inheritdoc cref="Header.FileCount"/>
|
||||
public ushort FileCount => Model.Header?.FileCount ?? 0;
|
||||
|
||||
/// <inheritdoc cref="Archive.Directories"/>
|
||||
public Models.InstallShieldArchiveV3.Directory[] Directories => Model.Directories ?? [];
|
||||
|
||||
/// <inheritdoc cref="Archive.Files"/>
|
||||
public Models.InstallShieldArchiveV3.File[] Files => Model.Files ?? [];
|
||||
|
||||
/// <summary>
|
||||
/// Map of all files to their parent directories by index
|
||||
/// </summary>
|
||||
public Dictionary<int, int> FileDirMap
|
||||
{
|
||||
get
|
||||
{
|
||||
// Return the prebuilt map
|
||||
if (_fileDirMap != null)
|
||||
return _fileDirMap;
|
||||
|
||||
// Build the file map
|
||||
_fileDirMap = [];
|
||||
|
||||
int fileId = 0;
|
||||
for (int i = 0; i < Directories.Length; i++)
|
||||
{
|
||||
var dir = Directories[i];
|
||||
for (int j = 0; j < dir.FileCount; j++)
|
||||
{
|
||||
_fileDirMap[fileId++] = i;
|
||||
}
|
||||
}
|
||||
|
||||
return _fileDirMap;
|
||||
}
|
||||
}
|
||||
private Dictionary<int, int>? _fileDirMap = null;
|
||||
|
||||
/// <summary>
|
||||
/// Map of all files found in the archive
|
||||
/// </summary>
|
||||
public Dictionary<string, Models.InstallShieldArchiveV3.File> FileNameMap
|
||||
{
|
||||
get
|
||||
{
|
||||
// Return the prebuilt map
|
||||
if (_fileNameMap != null)
|
||||
return _fileNameMap;
|
||||
|
||||
// Build the file map
|
||||
_fileNameMap = [];
|
||||
for (int fileIndex = 0; fileIndex < Files.Length; fileIndex++)
|
||||
{
|
||||
// Get the current file
|
||||
var file = Files[fileIndex];
|
||||
|
||||
// Get the parent directory
|
||||
int dirIndex = FileDirMap[fileIndex];
|
||||
if (dirIndex < 0 || dirIndex >= DirCount)
|
||||
continue;
|
||||
|
||||
// Create the filename
|
||||
string filename = Path.Combine(
|
||||
Directories[dirIndex]?.Name ?? $"dir_{dirIndex}",
|
||||
file.Name ?? $"file_{fileIndex}"
|
||||
);
|
||||
|
||||
// Add to the map
|
||||
_fileNameMap[filename] = file;
|
||||
}
|
||||
|
||||
return _fileNameMap;
|
||||
}
|
||||
}
|
||||
private Dictionary<string, Models.InstallShieldArchiveV3.File>? _fileNameMap = null;
|
||||
|
||||
/// <summary>
|
||||
/// Data offset for all archives
|
||||
/// </summary>
|
||||
private const uint DataStart = 255;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public InstallShieldArchiveV3(Archive? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public InstallShieldArchiveV3(Archive? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an InstallShield Archive V3 from a byte array and offset
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array representing the archive</param>
|
||||
/// <param name="offset">Offset within the array to parse</param>
|
||||
/// <returns>A archive wrapper on success, null on failure</returns>
|
||||
public static InstallShieldArchiveV3? Create(byte[]? data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and use that
|
||||
var dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return Create(dataStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a InstallShield Archive V3 from a Stream
|
||||
/// </summary>
|
||||
/// <param name="data">Stream representing the archive</param>
|
||||
/// <returns>A archive wrapper on success, null on failure</returns>
|
||||
public static InstallShieldArchiveV3? Create(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
var archive = Deserializers.InstallShieldArchiveV3.DeserializeStream(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
return new InstallShieldArchiveV3(archive, data);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Extract all files from the ISAv3 to an output directory
|
||||
/// </summary>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if all files extracted, false otherwise</returns>
|
||||
public bool ExtractAll(string outputDirectory)
|
||||
{
|
||||
// Get the file count
|
||||
int fileCount = Files.Length;
|
||||
if (fileCount == 0)
|
||||
return false;
|
||||
|
||||
// Loop through and extract all files to the output
|
||||
bool allExtracted = true;
|
||||
for (int i = 0; i < fileCount; i++)
|
||||
{
|
||||
allExtracted &= ExtractFile(i, outputDirectory);
|
||||
}
|
||||
|
||||
return allExtracted;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract a file from the ISAv3 to an output directory by index
|
||||
/// </summary>
|
||||
/// <param name="index">File index to extract</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if the file extracted, false otherwise</returns>
|
||||
public bool ExtractFile(int index, string outputDirectory)
|
||||
{
|
||||
// If the files index is invalid
|
||||
if (index < 0 || index >= FileCount)
|
||||
return false;
|
||||
|
||||
// Get the file
|
||||
var file = Files[index];
|
||||
if (file == null)
|
||||
return false;
|
||||
|
||||
// Create the filename
|
||||
var filename = file.Name;
|
||||
if (filename == null)
|
||||
return false;
|
||||
|
||||
// Get the directory index
|
||||
int dirIndex = FileDirMap[index];
|
||||
if (dirIndex < 0 || dirIndex > DirCount)
|
||||
return false;
|
||||
|
||||
// Get the directory name
|
||||
var dirName = Directories[dirIndex].Name;
|
||||
if (dirName != null)
|
||||
filename = Path.Combine(dirName, filename);
|
||||
|
||||
// Get and adjust the file offset
|
||||
long fileOffset = file.Offset + DataStart;
|
||||
if (fileOffset < 0 || fileOffset >= Length)
|
||||
return false;
|
||||
|
||||
// Get the file sizes
|
||||
long fileSize = file.CompressedSize;
|
||||
long outputFileSize = file.UncompressedSize;
|
||||
|
||||
// Read the compressed data directly
|
||||
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
|
||||
if (compressedData == null)
|
||||
return false;
|
||||
|
||||
// If the compressed and uncompressed sizes match
|
||||
byte[] data;
|
||||
if (fileSize == outputFileSize)
|
||||
{
|
||||
data = compressedData;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Decompress the data
|
||||
var decomp = Decompressor.Create();
|
||||
var outData = new MemoryStream();
|
||||
decomp.CopyTo(compressedData, outData);
|
||||
data = outData.ToArray();
|
||||
}
|
||||
|
||||
// If we have an invalid output directory
|
||||
if (string.IsNullOrEmpty(outputDirectory))
|
||||
return false;
|
||||
|
||||
// Create the full output path
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
|
||||
// Ensure the output directory is created
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null)
|
||||
System.IO.Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Try to write the data
|
||||
try
|
||||
{
|
||||
// Open the output file for writing
|
||||
using Stream fs = System.IO.File.OpenWrite(filename);
|
||||
fs.Write(data, 0, data.Length);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
139
SabreTools.Serialization/Wrappers/LZKWAJ.cs
Normal file
139
SabreTools.Serialization/Wrappers/LZKWAJ.cs
Normal file
@@ -0,0 +1,139 @@
|
||||
using System.IO;
|
||||
using SabreTools.Compression.SZDD;
|
||||
using SabreTools.Models.LZ;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class LZKWAJ : WrapperBase<KWAJFile>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string DescriptionString => "LZ-compressed file, KWAJ variant";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZKWAJ(KWAJFile? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZKWAJ(KWAJFile? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an LZ (KWAJ variant) from a byte array and offset
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array representing the LZ (KWAJ variant)</param>
|
||||
/// <param name="offset">Offset within the array to parse</param>
|
||||
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
|
||||
public static LZKWAJ? Create(byte[]? data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and use that
|
||||
var dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return Create(dataStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a LZ (KWAJ variant) from a Stream
|
||||
/// </summary>
|
||||
/// <param name="data">Stream representing the LZ (KWAJ variant)</param>
|
||||
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
|
||||
public static LZKWAJ? Create(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
var file = Deserializers.LZKWAJ.DeserializeStream(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
return new LZKWAJ(file, data);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Extract the contents to an output directory
|
||||
/// </summary>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if the contents extracted, false otherwise</returns>
|
||||
public bool Extract(string outputDirectory)
|
||||
{
|
||||
// Get the length of the compressed data
|
||||
long compressedSize = Length - Model.Header!.DataOffset;
|
||||
if (compressedSize < Model.Header.DataOffset)
|
||||
return false;
|
||||
|
||||
// Read in the data as an array
|
||||
byte[]? contents = ReadFromDataSource(Model.Header.DataOffset, (int)compressedSize);
|
||||
if (contents == null)
|
||||
return false;
|
||||
|
||||
// Get the decompressor
|
||||
var decompressor = Decompressor.CreateKWAJ(contents, Model.Header!.CompressionType);
|
||||
if (decompressor == null)
|
||||
return false;
|
||||
|
||||
// If we have an invalid output directory
|
||||
if (string.IsNullOrEmpty(outputDirectory))
|
||||
return false;
|
||||
|
||||
// Create the full output path
|
||||
string filename = "tempfile";
|
||||
if (Model.HeaderExtensions?.FileName != null)
|
||||
filename = Model.HeaderExtensions.FileName;
|
||||
if (Model.HeaderExtensions?.FileExtension != null)
|
||||
filename += $".{Model.HeaderExtensions.FileExtension}";
|
||||
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
|
||||
// Ensure the output directory is created
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null)
|
||||
Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Try to write the data
|
||||
try
|
||||
{
|
||||
// Open the output file for writing
|
||||
using Stream fs = File.OpenWrite(filename);
|
||||
decompressor.CopyTo(fs);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
134
SabreTools.Serialization/Wrappers/LZQBasic.cs
Normal file
134
SabreTools.Serialization/Wrappers/LZQBasic.cs
Normal file
@@ -0,0 +1,134 @@
|
||||
using System.IO;
|
||||
using SabreTools.Compression.SZDD;
|
||||
using SabreTools.Models.LZ;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class LZQBasic : WrapperBase<QBasicFile>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string DescriptionString => "LZ-compressed file, QBasic variant";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZQBasic(QBasicFile? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZQBasic(QBasicFile? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an LZ (QBasic variant) from a byte array and offset
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array representing the LZ (QBasic variant)</param>
|
||||
/// <param name="offset">Offset within the array to parse</param>
|
||||
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
|
||||
public static LZQBasic? Create(byte[]? data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and use that
|
||||
var dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return Create(dataStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a LZ (QBasic variant) from a Stream
|
||||
/// </summary>
|
||||
/// <param name="data">Stream representing the LZ (QBasic variant)</param>
|
||||
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
|
||||
public static LZQBasic? Create(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
var file = Deserializers.LZQBasic.DeserializeStream(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
return new LZQBasic(file, data);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Extract the contents to an output directory
|
||||
/// </summary>
|
||||
/// <param name="filename">Original filename to use as a base</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if the contents extracted, false otherwise</returns>
|
||||
public bool Extract(string outputDirectory)
|
||||
{
|
||||
// Get the length of the compressed data
|
||||
long compressedSize = Length - 12;
|
||||
if (compressedSize < 12)
|
||||
return false;
|
||||
|
||||
// Read in the data as an array
|
||||
byte[]? contents = ReadFromDataSource(12, (int)compressedSize);
|
||||
if (contents == null)
|
||||
return false;
|
||||
|
||||
// Get the decompressor
|
||||
var decompressor = Decompressor.CreateQBasic(contents);
|
||||
if (decompressor == null)
|
||||
return false;
|
||||
|
||||
// If we have an invalid output directory
|
||||
if (string.IsNullOrEmpty(outputDirectory))
|
||||
return false;
|
||||
|
||||
// Create the full output path
|
||||
string filename = Path.Combine(outputDirectory, "tempfile.bin");
|
||||
|
||||
// Ensure the output directory is created
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null)
|
||||
Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Try to write the data
|
||||
try
|
||||
{
|
||||
// Open the output file for writing
|
||||
using Stream fs = File.OpenWrite(filename);
|
||||
decompressor.CopyTo(fs);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
166
SabreTools.Serialization/Wrappers/LZSZDD.cs
Normal file
166
SabreTools.Serialization/Wrappers/LZSZDD.cs
Normal file
@@ -0,0 +1,166 @@
|
||||
using System.IO;
|
||||
using SabreTools.Compression.SZDD;
|
||||
using SabreTools.Models.LZ;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class LZSZDD : WrapperBase<SZDDFile>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string DescriptionString => "LZ-compressed file, SZDD variant";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZSZDD(SZDDFile? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public LZSZDD(SZDDFile? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create an LZ (SZDD variant) from a byte array and offset
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array representing the LZ (SZDD variant)</param>
|
||||
/// <param name="offset">Offset within the array to parse</param>
|
||||
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
|
||||
public static LZSZDD? Create(byte[]? data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and use that
|
||||
var dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return Create(dataStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a LZ (SZDD variant) from a Stream
|
||||
/// </summary>
|
||||
/// <param name="data">Stream representing the LZ (SZDD variant)</param>
|
||||
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
|
||||
public static LZSZDD? Create(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || !data.CanRead)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
var file = Deserializers.LZSZDD.DeserializeStream(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
return new LZSZDD(file, data);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Extract the contents to an output directory
|
||||
/// </summary>
|
||||
/// <param name="filename">Original filename to use as a base</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if the contents extracted, false otherwise</returns>
|
||||
public bool Extract(string filename, string outputDirectory)
|
||||
{
|
||||
// Get the length of the compressed data
|
||||
long compressedSize = Length - 14;
|
||||
if (compressedSize < 14)
|
||||
return false;
|
||||
|
||||
// Read in the data as an array
|
||||
byte[]? contents = ReadFromDataSource(14, (int)compressedSize);
|
||||
if (contents == null)
|
||||
return false;
|
||||
|
||||
// Get the decompressor
|
||||
var decompressor = Decompressor.CreateSZDD(contents);
|
||||
if (decompressor == null)
|
||||
return false;
|
||||
|
||||
// Create the output file
|
||||
filename = GetExpandedName(filename).TrimEnd('\0');
|
||||
|
||||
// If we have an invalid output directory
|
||||
if (string.IsNullOrEmpty(outputDirectory))
|
||||
return false;
|
||||
|
||||
// Create the full output path
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
|
||||
// Ensure the output directory is created
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null)
|
||||
Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Try to write the data
|
||||
try
|
||||
{
|
||||
// Open the output file for writing
|
||||
using Stream fs = File.OpenWrite(filename);
|
||||
decompressor.CopyTo(fs);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the full name of the input file
|
||||
/// </summary>
|
||||
private string GetExpandedName(string input)
|
||||
{
|
||||
// If the extension is missing
|
||||
string extension = Path.GetExtension(input).TrimStart('.');
|
||||
if (string.IsNullOrEmpty(extension))
|
||||
return Path.GetFileNameWithoutExtension(input);
|
||||
|
||||
// If the extension is a single character
|
||||
if (extension.Length == 1)
|
||||
{
|
||||
if (extension == "_" || extension == "$")
|
||||
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(Model.Header!.LastChar)}";
|
||||
|
||||
return Path.GetFileNameWithoutExtension(input);
|
||||
}
|
||||
|
||||
// If the extension isn't formatted
|
||||
if (!extension.EndsWith("_"))
|
||||
return Path.GetFileNameWithoutExtension(input);
|
||||
|
||||
// Handle replacing characters
|
||||
char c = (char.IsUpper(input[0]) ? char.ToLower(Model.Header!.LastChar) : char.ToUpper(Model.Header!.LastChar));
|
||||
string text2 = extension.Substring(0, extension.Length - 1) + c;
|
||||
return Path.GetFileNameWithoutExtension(input) + "." + text2;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.Compression.zlib;
|
||||
using SabreTools.Models.SGA;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
@@ -120,6 +122,148 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Extract all files from the SGA to an output directory
|
||||
/// </summary>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if all files extracted, false otherwise</returns>
|
||||
public bool ExtractAll(string outputDirectory)
|
||||
{
|
||||
// Get the file count
|
||||
int fileCount = FileCount;
|
||||
if (fileCount == 0)
|
||||
return false;
|
||||
|
||||
// Loop through and extract all files to the output
|
||||
bool allExtracted = true;
|
||||
for (int i = 0; i < fileCount; i++)
|
||||
{
|
||||
allExtracted &= ExtractFile(i, outputDirectory);
|
||||
}
|
||||
|
||||
return allExtracted;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract a file from the SGA to an output directory by index
|
||||
/// </summary>
|
||||
/// <param name="index">File index to extract</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <returns>True if the file extracted, false otherwise</returns>
|
||||
public bool ExtractFile(int index, string outputDirectory)
|
||||
{
|
||||
// Get the file count
|
||||
int fileCount = FileCount;
|
||||
if (fileCount == 0)
|
||||
return false;
|
||||
|
||||
// If the files index is invalid
|
||||
if (index < 0 || index >= fileCount)
|
||||
return false;
|
||||
|
||||
// Create the filename
|
||||
var filename = GetFileName(index);
|
||||
if (filename == null)
|
||||
return false;
|
||||
|
||||
// Loop through and get all parent directories
|
||||
var parentNames = new List<string> { filename };
|
||||
|
||||
// Get the parent directory
|
||||
string? folderName = GetParentName(index);
|
||||
if (folderName != null)
|
||||
parentNames.Add(folderName);
|
||||
|
||||
// TODO: Should the section name/alias be used in the path as well?
|
||||
|
||||
// Reverse and assemble the filename
|
||||
parentNames.Reverse();
|
||||
#if NET20 || NET35
|
||||
filename = parentNames[0];
|
||||
for (int i = 1; i < parentNames.Count; i++)
|
||||
{
|
||||
filename = Path.Combine(filename, parentNames[i]);
|
||||
}
|
||||
#else
|
||||
filename = Path.Combine([.. parentNames]);
|
||||
#endif
|
||||
|
||||
// Get and adjust the file offset
|
||||
long fileOffset = GetFileOffset(index);
|
||||
fileOffset += FileDataOffset;
|
||||
if (fileOffset < 0)
|
||||
return false;
|
||||
|
||||
// Get the file sizes
|
||||
long fileSize = GetCompressedSize(index);
|
||||
long outputFileSize = GetUncompressedSize(index);
|
||||
|
||||
// Read the compressed data directly
|
||||
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
|
||||
if (compressedData == null)
|
||||
return false;
|
||||
|
||||
// If the compressed and uncompressed sizes match
|
||||
byte[] data;
|
||||
if (fileSize == outputFileSize)
|
||||
{
|
||||
data = compressedData;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Inflate the data into the buffer
|
||||
var zstream = new ZLib.z_stream_s();
|
||||
data = new byte[outputFileSize];
|
||||
unsafe
|
||||
{
|
||||
fixed (byte* payloadPtr = compressedData)
|
||||
fixed (byte* dataPtr = data)
|
||||
{
|
||||
zstream.next_in = payloadPtr;
|
||||
zstream.avail_in = (uint)compressedData.Length;
|
||||
zstream.total_in = (uint)compressedData.Length;
|
||||
zstream.next_out = dataPtr;
|
||||
zstream.avail_out = (uint)data.Length;
|
||||
zstream.total_out = 0;
|
||||
|
||||
ZLib.inflateInit_(zstream, ZLib.zlibVersion(), compressedData.Length);
|
||||
int zret = ZLib.inflate(zstream, 1);
|
||||
ZLib.inflateEnd(zstream);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we have an invalid output directory
|
||||
if (string.IsNullOrEmpty(outputDirectory))
|
||||
return false;
|
||||
|
||||
// Create the full output path
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
|
||||
// Ensure the output directory is created
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null)
|
||||
System.IO.Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Try to write the data
|
||||
try
|
||||
{
|
||||
// Open the output file for writing
|
||||
using Stream fs = System.IO.File.OpenWrite(filename);
|
||||
fs.Write(data, 0, data.Length);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -45,6 +45,24 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// </summary>
|
||||
public T Model { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// Length of the underlying data
|
||||
/// </summary>
|
||||
public long Length
|
||||
{
|
||||
get
|
||||
{
|
||||
return _dataSource switch
|
||||
{
|
||||
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
|
||||
DataSource.Stream => _streamData!.Length,
|
||||
|
||||
// Everything else is invalid
|
||||
_ => -1,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Variables
|
||||
@@ -202,27 +220,36 @@ namespace SabreTools.Serialization.Wrappers
|
||||
if (!SegmentValid(position, length))
|
||||
return null;
|
||||
|
||||
// Read and return the data
|
||||
byte[]? sectionData = null;
|
||||
switch (_dataSource)
|
||||
try
|
||||
{
|
||||
case DataSource.ByteArray:
|
||||
sectionData = new byte[length];
|
||||
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
|
||||
break;
|
||||
|
||||
case DataSource.Stream:
|
||||
lock (_streamDataLock)
|
||||
{
|
||||
long currentLocation = _streamData!.Position;
|
||||
_streamData.Seek(position, SeekOrigin.Begin);
|
||||
sectionData = _streamData.ReadBytes(length);
|
||||
_streamData.Seek(currentLocation, SeekOrigin.Begin);
|
||||
// Read and return the data
|
||||
byte[]? sectionData = null;
|
||||
switch (_dataSource)
|
||||
{
|
||||
case DataSource.ByteArray:
|
||||
sectionData = new byte[length];
|
||||
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return sectionData;
|
||||
case DataSource.Stream:
|
||||
lock (_streamDataLock)
|
||||
{
|
||||
long currentLocation = _streamData!.Position;
|
||||
_streamData.Seek(position, SeekOrigin.Begin);
|
||||
sectionData = _streamData.ReadBytes(length);
|
||||
_streamData.Seek(currentLocation, SeekOrigin.Begin);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return sectionData;
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Absorb the error
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -368,7 +395,7 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// <summary>
|
||||
/// Export the item information as JSON
|
||||
/// </summary>
|
||||
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
|
||||
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
|
||||
#endif
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -28,11 +28,13 @@ namespace SabreTools.Serialization.Wrappers
|
||||
WrapperType.GCF => GCF.Create(data),
|
||||
WrapperType.GZIP => null,// TODO: Implement wrapper
|
||||
WrapperType.IniFile => null,// TODO: Implement wrapper
|
||||
WrapperType.InstallShieldArchiveV3 => null,// TODO: Implement wrapper
|
||||
WrapperType.InstallShieldArchiveV3 => InstallShieldArchiveV3.Create(data),
|
||||
WrapperType.InstallShieldCAB => InstallShieldCabinet.Create(data),
|
||||
WrapperType.LDSCRYPT => null,// TODO: Implement wrapper
|
||||
WrapperType.LZKWAJ => LZKWAJ.Create(data),
|
||||
WrapperType.LZQBasic => LZQBasic.Create(data),
|
||||
WrapperType.LZSZDD => LZSZDD.Create(data),
|
||||
WrapperType.MicrosoftCAB => MicrosoftCabinet.Create(data),
|
||||
WrapperType.MicrosoftLZ => null,// TODO: Implement wrapper
|
||||
WrapperType.MoPaQ => MoPaQ.Create(data),
|
||||
WrapperType.N3DS => N3DS.Create(data),
|
||||
WrapperType.NCF => NCF.Create(data),
|
||||
@@ -330,6 +332,19 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
#endregion
|
||||
|
||||
#region LZ
|
||||
|
||||
if (magic.StartsWith([0x4B, 0x57, 0x41, 0x4A, 0x88, 0xF0, 0x27, 0xD1]))
|
||||
return WrapperType.LZKWAJ;
|
||||
|
||||
if (magic.StartsWith([0x53, 0x5A, 0x20, 0x88, 0xF0, 0x27, 0x33, 0xD1]))
|
||||
return WrapperType.LZQBasic;
|
||||
|
||||
if (magic.StartsWith([0x53, 0x5A, 0x44, 0x44, 0x88, 0xF0, 0x27, 0x33]))
|
||||
return WrapperType.LZSZDD;
|
||||
|
||||
#endregion
|
||||
|
||||
#region MicrosoftCAB
|
||||
|
||||
if (magic.StartsWith([0x4d, 0x53, 0x43, 0x46]))
|
||||
@@ -339,13 +354,6 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
#endregion
|
||||
|
||||
#region MicrosoftLZ
|
||||
|
||||
if (magic.StartsWith([0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33]))
|
||||
return WrapperType.MicrosoftLZ;
|
||||
|
||||
#endregion
|
||||
|
||||
#region MoPaQ
|
||||
|
||||
if (magic.StartsWith([0x4d, 0x50, 0x51, 0x1a]))
|
||||
|
||||
@@ -77,7 +77,6 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// <summary>
|
||||
/// InstallShield archive v3
|
||||
/// </summary>
|
||||
/// <remarks>Currently has no IWrapper implementation</remarks>
|
||||
InstallShieldArchiveV3,
|
||||
|
||||
/// <summary>
|
||||
@@ -91,17 +90,26 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// <remarks>Currently has no IWrapper implementation</remarks>
|
||||
LDSCRYPT,
|
||||
|
||||
/// <summary>
|
||||
/// LZ-compressed file, KWAJ variant
|
||||
/// </summary>
|
||||
LZKWAJ,
|
||||
|
||||
/// <summary>
|
||||
/// LZ-compressed file, QBasic variant
|
||||
/// </summary>
|
||||
LZQBasic,
|
||||
|
||||
/// <summary>
|
||||
/// LZ-compressed file, SZDD variant
|
||||
/// </summary>
|
||||
LZSZDD,
|
||||
|
||||
/// <summary>
|
||||
/// Microsoft cabinet file
|
||||
/// </summary>
|
||||
MicrosoftCAB,
|
||||
|
||||
/// <summary>
|
||||
/// Microsoft LZ-compressed file
|
||||
/// </summary>
|
||||
/// <remarks>Currently has no IWrapper implementation</remarks>
|
||||
MicrosoftLZ,
|
||||
|
||||
/// <summary>
|
||||
/// MPQ game data archive
|
||||
/// </summary>
|
||||
|
||||
@@ -10,13 +10,17 @@
|
||||
|
||||
# Optional parameters
|
||||
USE_ALL=false
|
||||
INCLUDE_DEBUG=false
|
||||
NO_BUILD=false
|
||||
NO_ARCHIVE=false
|
||||
while getopts "uba" OPTION; do
|
||||
while getopts "udba" OPTION; do
|
||||
case $OPTION in
|
||||
u)
|
||||
USE_ALL=true
|
||||
;;
|
||||
d)
|
||||
INCLUDE_DEBUG=true
|
||||
;;
|
||||
b)
|
||||
NO_BUILD=true
|
||||
;;
|
||||
@@ -39,6 +43,7 @@ COMMIT=$(git log --pretty=%H -1)
|
||||
# Output the selected options
|
||||
echo "Selected Options:"
|
||||
echo " Use all frameworks (-u) $USE_ALL"
|
||||
echo " Include debug builds (-d) $INCLUDE_DEBUG"
|
||||
echo " No build (-b) $NO_BUILD"
|
||||
echo " No archive (-a) $NO_ARCHIVE"
|
||||
echo " "
|
||||
@@ -91,14 +96,14 @@ if [ $NO_BUILD = false ]; then
|
||||
|
||||
# Only .NET 5 and above can publish to a single file
|
||||
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
|
||||
# Only include Debug if building all
|
||||
if [ $USE_ALL = true ]; then
|
||||
# Only include Debug if set
|
||||
if [ $INCLUDE_DEBUG = true ]; then
|
||||
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
|
||||
fi
|
||||
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
|
||||
else
|
||||
# Only include Debug if building all
|
||||
if [ $USE_ALL = true ]; then
|
||||
# Only include Debug if set
|
||||
if [ $INCLUDE_DEBUG = true ]; then
|
||||
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
|
||||
fi
|
||||
dotnet publish InfoPrint/InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
|
||||
@@ -131,8 +136,8 @@ if [ $NO_ARCHIVE = false ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# Only include Debug if building all
|
||||
if [ $USE_ALL = true ]; then
|
||||
# Only include Debug if set
|
||||
if [ $INCLUDE_DEBUG = true ]; then
|
||||
cd $BUILD_FOLDER/InfoPrint/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
|
||||
zip -r $BUILD_FOLDER/InfoPrint_${FRAMEWORK}_${RUNTIME}_debug.zip .
|
||||
fi
|
||||
|
||||
@@ -12,6 +12,10 @@ param(
|
||||
[Alias("UseAll")]
|
||||
[switch]$USE_ALL,
|
||||
|
||||
[Parameter(Mandatory = $false)]
|
||||
[Alias("IncludeDebug")]
|
||||
[switch]$INCLUDE_DEBUG,
|
||||
|
||||
[Parameter(Mandatory = $false)]
|
||||
[Alias("NoBuild")]
|
||||
[switch]$NO_BUILD,
|
||||
@@ -30,6 +34,7 @@ $COMMIT = git log --pretty=format:"%H" -1
|
||||
# Output the selected options
|
||||
Write-Host "Selected Options:"
|
||||
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
|
||||
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
|
||||
Write-Host " No build (-NoBuild) $NO_BUILD"
|
||||
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
|
||||
Write-Host " "
|
||||
@@ -78,15 +83,15 @@ if (!$NO_BUILD.IsPresent) {
|
||||
|
||||
# Only .NET 5 and above can publish to a single file
|
||||
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
|
||||
# Only include Debug if building all
|
||||
if ($USE_ALL.IsPresent) {
|
||||
# Only include Debug if set
|
||||
if ($INCLUDE_DEBUG.IsPresent) {
|
||||
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
|
||||
}
|
||||
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
|
||||
}
|
||||
else {
|
||||
# Only include Debug if building all
|
||||
if ($USE_ALL.IsPresent) {
|
||||
# Only include Debug if set
|
||||
if ($INCLUDE_DEBUG.IsPresent) {
|
||||
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
|
||||
}
|
||||
dotnet publish InfoPrint\InfoPrint.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
|
||||
@@ -115,8 +120,8 @@ if (!$NO_ARCHIVE.IsPresent) {
|
||||
continue
|
||||
}
|
||||
|
||||
# Only include Debug if building all
|
||||
if ($USE_ALL.IsPresent) {
|
||||
# Only include Debug if set
|
||||
if ($INCLUDE_DEBUG.IsPresent) {
|
||||
Set-Location -Path $BUILD_FOLDER\InfoPrint\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
|
||||
7z a -tzip $BUILD_FOLDER\InfoPrint_${FRAMEWORK}_${RUNTIME}_debug.zip *
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user