mirror of
https://github.com/SabreTools/SabreTools.IO.git
synced 2026-02-10 05:44:31 +00:00
Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5054aeb077 | ||
|
|
d2e9b8d6e5 | ||
|
|
2c29aee834 | ||
|
|
576bafcb87 | ||
|
|
2b310ac528 | ||
|
|
4f6b6d7b59 | ||
|
|
17e55ee233 | ||
|
|
8b78906d1d | ||
|
|
cff2dcf4cc | ||
|
|
a56942cb73 | ||
|
|
5ed661b77c | ||
|
|
a0a0cd0386 | ||
|
|
bcc0fca4ad | ||
|
|
843e821e5f | ||
|
|
630b01283e | ||
|
|
22abb96013 | ||
|
|
314de12661 | ||
|
|
a0b24031b5 | ||
|
|
b4628485c3 | ||
|
|
4610ddc9b9 | ||
|
|
e392ddc8d7 | ||
|
|
1908d1b32e | ||
|
|
9d73195f86 | ||
|
|
335a486f17 | ||
|
|
d3e41ac187 | ||
|
|
8ddd9f3f78 | ||
|
|
54ad538c08 | ||
|
|
e6bc9ab3e3 | ||
|
|
94934b00a9 | ||
|
|
e49f56fccc | ||
|
|
79c64ddfa8 | ||
|
|
b22384d5f3 | ||
|
|
955c1b5641 | ||
|
|
535f9f928d | ||
|
|
f0cb15c2e4 | ||
|
|
ec99304c51 | ||
|
|
aefc931055 | ||
|
|
e7fe342379 | ||
|
|
f372999b1b | ||
|
|
2679975945 | ||
|
|
54dd7f2f8f | ||
|
|
aee5891c50 | ||
|
|
b81d3314ea | ||
|
|
4a3ffa5f90 | ||
|
|
a20c7529d6 | ||
|
|
baea5cb0d7 | ||
|
|
659674dd4a | ||
|
|
5c199a143b | ||
|
|
99ec814808 | ||
|
|
ea1f02798c | ||
|
|
e3d4cc5e45 | ||
|
|
c98eb5c42a | ||
|
|
d0392be2d8 | ||
|
|
8761629828 | ||
|
|
a3b258dfeb | ||
|
|
f7505effa1 |
7
LICENSE
Normal file
7
LICENSE
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright (c) 2018-2025 Matt Nadareski
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class ByteArrayExtensionsTests
|
||||
{
|
||||
#region Is Null or Empty
|
||||
#region IsNullOrEmpty
|
||||
|
||||
[Fact]
|
||||
public void IsNullOrEmpty_Null_True()
|
||||
@@ -35,7 +36,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region To Hex String
|
||||
#region ToHexString
|
||||
|
||||
[Fact]
|
||||
public void ToHexString_Null()
|
||||
@@ -58,7 +59,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region From Hex String
|
||||
#region FromHexString
|
||||
|
||||
[Fact]
|
||||
public void FromHexString_Null()
|
||||
@@ -88,5 +89,385 @@ namespace SabreTools.IO.Test.Extensions
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReadStringsFrom
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Null_Null()
|
||||
{
|
||||
byte[]? arr = null;
|
||||
var actual = arr.ReadStringsFrom(3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Empty_Null()
|
||||
{
|
||||
byte[]? arr = [];
|
||||
var actual = arr.ReadStringsFrom(3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(0)]
|
||||
[InlineData(2048)]
|
||||
public void ReadStringsFrom_InvalidLimit_Empty(int charLimit)
|
||||
{
|
||||
byte[]? arr = new byte[1024];
|
||||
var actual = arr.ReadStringsFrom(charLimit);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_NoValidStrings_Empty()
|
||||
{
|
||||
byte[]? arr = new byte[1024];
|
||||
var actual = arr.ReadStringsFrom(4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_AsciiStrings_Filled()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = arr.ReadStringsFrom(4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Latin1Strings_Filled()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
.. Encoding.Latin1.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = arr.ReadStringsFrom(4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_UTF16_Filled()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
.. Encoding.Unicode.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = arr.ReadStringsFrom(4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Mixed_Filled()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TEST2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TEST3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA3"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = arr.ReadStringsFrom(5);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(6, actual.Count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This test is here mainly for performance testing
|
||||
/// and should not be enabled unless there are changes
|
||||
/// to the core reading methods that need comparison.
|
||||
/// </summary>
|
||||
// [Fact]
|
||||
// public void ReadStringsFrom_Mixed_MASSIVE()
|
||||
// {
|
||||
// byte[]? arr =
|
||||
// [
|
||||
// .. Encoding.ASCII.GetBytes("TEST1"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.ASCII.GetBytes("TWO1"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.ASCII.GetBytes("DATA1"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.UTF8.GetBytes("TEST2"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.UTF8.GetBytes("TWO2"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.UTF8.GetBytes("DATA2"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.Unicode.GetBytes("TEST3"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.Unicode.GetBytes("TWO3"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// .. Encoding.Unicode.GetBytes("DATA3"),
|
||||
// .. new byte[] { 0x00 },
|
||||
// ];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// // arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
// // arr = [.. arr, .. arr, .. arr, .. arr];
|
||||
|
||||
// var actual = arr.ReadStringsFrom(5);
|
||||
// Assert.NotNull(actual);
|
||||
// Assert.NotEmpty(actual);
|
||||
// }
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReadStringsWithEncoding
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Null_Empty()
|
||||
{
|
||||
byte[]? bytes = null;
|
||||
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Empty_Empty()
|
||||
{
|
||||
byte[]? bytes = [];
|
||||
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(0)]
|
||||
[InlineData(2048)]
|
||||
public void ReadStringsWithEncoding_InvalidLimit_Empty(int charLimit)
|
||||
{
|
||||
byte[]? bytes = new byte[1024];
|
||||
var actual = bytes.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_NoValidStrings_Empty()
|
||||
{
|
||||
byte[]? bytes = new byte[1024];
|
||||
var actual = bytes.ReadStringsWithEncoding(5, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_AsciiStrings_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("ONE"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.ASCII);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_InvalidAsciiChars_Empty()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F,
|
||||
.. Enumerable.Range(0x80, 0x80).Select(i => (byte)i),
|
||||
];
|
||||
var actual = arr.ReadStringsWithEncoding(1, Encoding.ASCII);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Latin1_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Latin1.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("ONE"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Latin1);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_InvalidLatin1Chars_Empty()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F,
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F,
|
||||
];
|
||||
var actual = arr.ReadStringsWithEncoding(1, Encoding.Latin1);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF8_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.UTF8.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("ONE"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF8);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_InvalidUTF8Chars_Empty()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F,
|
||||
.. Enumerable.Range(0x80, 0x42).Select(i => (byte)i),
|
||||
0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFB, 0xFC,
|
||||
0xFD, 0xFE, 0xFF,
|
||||
];
|
||||
var actual = arr.ReadStringsWithEncoding(1, Encoding.UTF8);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF16_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Unicode.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("ONE"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Unicode);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_InvalidUTF16Chars_Empty()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F,
|
||||
];
|
||||
var actual = arr.ReadStringsWithEncoding(1, Encoding.Unicode);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF32_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.UTF32.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF32.GetBytes("ONE"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF32.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF32.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF32);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_InvalidUTF32Chars_Empty()
|
||||
{
|
||||
byte[]? arr =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F,
|
||||
];
|
||||
var actual = arr.ReadStringsWithEncoding(1, Encoding.UTF32);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
@@ -9,6 +10,22 @@ namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class EnumerableExtensionsTests
|
||||
{
|
||||
#region IterateWithAction
|
||||
|
||||
[Fact]
|
||||
public void IterateWithActionTest()
|
||||
{
|
||||
List<int> source = [1, 2, 3, 4];
|
||||
int actual = 0;
|
||||
|
||||
source.IterateWithAction(i => Interlocked.Add(ref actual, i));
|
||||
Assert.Equal(10, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SafeEnumerate
|
||||
|
||||
[Fact]
|
||||
public void SafeEnumerate_Empty()
|
||||
{
|
||||
@@ -60,6 +77,8 @@ namespace SabreTools.IO.Test.Extensions
|
||||
Assert.Equal(2, list.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Fake enumerable that uses <see cref="ErrorEnumerator"/>
|
||||
/// </summary>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class StreamExtensionsTests
|
||||
{
|
||||
#region Align to Boundary
|
||||
#region AlignToBoundary
|
||||
|
||||
[Fact]
|
||||
public void AlignToBoundary_Null_False()
|
||||
@@ -62,7 +63,215 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region Seek If Possible
|
||||
#region ReadFrom
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Null_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = null;
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_NonSeekable_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new NonSeekableStream();
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Empty_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream([]);
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1, true)]
|
||||
[InlineData(2048, true)]
|
||||
[InlineData(-1, false)]
|
||||
[InlineData(2048, false)]
|
||||
public void ReadFrom_InvalidOffset_Null(long offset, bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(offset, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1, true)]
|
||||
[InlineData(2048, true)]
|
||||
[InlineData(-1, false)]
|
||||
[InlineData(2048, false)]
|
||||
public void ReadFrom_InvalidLength_Null(int length, bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(0, length, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Valid_Filled(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(0, 512, retainPosition);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(512, actual.Length);
|
||||
|
||||
if (retainPosition)
|
||||
Assert.Equal(0, stream.Position);
|
||||
else
|
||||
Assert.Equal(512, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReadStringsFrom
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Null_Null()
|
||||
{
|
||||
Stream? stream = null;
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_NonSeekable_Null()
|
||||
{
|
||||
Stream? stream = new NonSeekableStream();
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Empty_Null()
|
||||
{
|
||||
Stream? stream = new MemoryStream([]);
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(0)]
|
||||
[InlineData(2048)]
|
||||
public void ReadStringsFrom_InvalidLimit_Empty(int charLimit)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
var actual = stream.ReadStringsFrom(0, 1024, charLimit);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_NoValidStrings_Empty()
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
var actual = stream.ReadStringsFrom(0, 1024, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_AsciiStrings_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Latin1Strings_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Latin1.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_UTF16_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Unicode.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Mixed_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TEST2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TEST3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA3"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 5);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(6, actual.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SeekIfPossible
|
||||
|
||||
[Fact]
|
||||
public void SeekIfPossible_NonSeekable_CurrentPosition()
|
||||
@@ -106,6 +315,46 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region SegmentValid
|
||||
|
||||
[Fact]
|
||||
public void SegmentValid_Null_False()
|
||||
{
|
||||
Stream? stream = null;
|
||||
bool actual = stream.SegmentValid(0, 1);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(2048)]
|
||||
public void SegmentValid_InvalidOffset_False(long offset)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(offset, 1);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(2048)]
|
||||
public void SegmentValid_InvalidLength_False(int length)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(0, length);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SegmentValid_ValidSegment_True()
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(0, 1024);
|
||||
Assert.True(actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Represents a hidden non-seekable stream
|
||||
/// </summary>
|
||||
|
||||
76
SabreTools.IO.Test/Extensions/StringExtensionsTests.cs
Normal file
76
SabreTools.IO.Test/Extensions/StringExtensionsTests.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class StringExtensionsTests
|
||||
{
|
||||
#region OptionalContains
|
||||
|
||||
[Theory]
|
||||
[InlineData(null, "ANY", false)]
|
||||
[InlineData("", "ANY", false)]
|
||||
[InlineData("ANY", "ANY", true)]
|
||||
[InlineData("ANYTHING", "ANY", true)]
|
||||
[InlineData("THING", "ANY", false)]
|
||||
[InlineData("THINGANY", "ANY", true)]
|
||||
public void OptionalContainsTest(string? haystack, string needle, bool expected)
|
||||
{
|
||||
bool actual = haystack.OptionalContains(needle);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OptionalEndsWith
|
||||
|
||||
[Theory]
|
||||
[InlineData(null, "ANY", false)]
|
||||
[InlineData("", "ANY", false)]
|
||||
[InlineData("ANY", "ANY", true)]
|
||||
[InlineData("ANYTHING", "ANY", false)]
|
||||
[InlineData("THING", "ANY", false)]
|
||||
[InlineData("THINGANY", "ANY", true)]
|
||||
public void OptionalEndsWithTest(string? haystack, string needle, bool expected)
|
||||
{
|
||||
bool actual = haystack.OptionalEndsWith(needle);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OptionalEquals
|
||||
|
||||
[Theory]
|
||||
[InlineData(null, "ANY", false)]
|
||||
[InlineData("", "ANY", false)]
|
||||
[InlineData("ANY", "ANY", true)]
|
||||
[InlineData("ANYTHING", "ANY", false)]
|
||||
[InlineData("THING", "ANY", false)]
|
||||
[InlineData("THINGANY", "ANY", false)]
|
||||
public void OptionalEqualsTest(string? haystack, string needle, bool expected)
|
||||
{
|
||||
bool actual = haystack.OptionalEquals(needle);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OptionalStartsWith
|
||||
|
||||
[Theory]
|
||||
[InlineData(null, "ANY", false)]
|
||||
[InlineData("", "ANY", false)]
|
||||
[InlineData("ANY", "ANY", true)]
|
||||
[InlineData("ANYTHING", "ANY", true)]
|
||||
[InlineData("THING", "ANY", false)]
|
||||
[InlineData("THINGANY", "ANY", false)]
|
||||
public void OptionalStartsWithTest(string? haystack, string needle, bool expected)
|
||||
{
|
||||
bool actual = haystack.OptionalStartsWith(needle);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -26,7 +26,7 @@
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3">
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
|
||||
40
SabreTools.IO.Test/Streams/BufferedStreamTests.cs
Normal file
40
SabreTools.IO.Test/Streams/BufferedStreamTests.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
using System.IO;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.IO.Test.Streams
|
||||
{
|
||||
public class BufferedStreamTests
|
||||
{
|
||||
#region ReadNextByte
|
||||
|
||||
[Fact]
|
||||
public void ReadNextByte_Empty_Null()
|
||||
{
|
||||
var source = new MemoryStream();
|
||||
var stream = new IO.Streams.BufferedStream(source);
|
||||
byte? actual = stream.ReadNextByte();
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadNextByte_Filled_ValidPosition_Byte()
|
||||
{
|
||||
var source = new MemoryStream(new byte[1024]);
|
||||
var stream = new IO.Streams.BufferedStream(source);
|
||||
byte? actual = stream.ReadNextByte();
|
||||
Assert.Equal((byte)0x00, actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadNextByte_Filled_InvalidPosition_Null()
|
||||
{
|
||||
var source = new MemoryStream(new byte[1024]);
|
||||
source.Seek(0, SeekOrigin.End);
|
||||
var stream = new IO.Streams.BufferedStream(source);
|
||||
byte? actual = stream.ReadNextByte();
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Streams;
|
||||
@@ -7,8 +8,10 @@ namespace SabreTools.IO.Test.Streams
|
||||
{
|
||||
public class ReadOnlyCompositeStreamTests
|
||||
{
|
||||
#region Constructor
|
||||
|
||||
[Fact]
|
||||
public void DefaultConstructorTest()
|
||||
public void Constructor_Default()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Equal(0, stream.Length);
|
||||
@@ -16,7 +19,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArrayConstructorTest()
|
||||
public void Constructor_EmptyArray()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream()];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -25,9 +28,8 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyEnumerableConstructorTest()
|
||||
public void Constructor_EmptyEnumerable()
|
||||
{
|
||||
// Empty enumerable constructor
|
||||
List<Stream> list = [new MemoryStream()];
|
||||
var stream = new ReadOnlyCompositeStream(list);
|
||||
Assert.Equal(0, stream.Length);
|
||||
@@ -35,7 +37,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamConstructorTest()
|
||||
public void Constructor_SingleStream()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream(new MemoryStream(new byte[1024]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
@@ -43,7 +45,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FilledArrayConstructorTest()
|
||||
public void Constructor_FilledArray()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -52,7 +54,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FilledEnumerableConstructorTest()
|
||||
public void Constructor_FilledEnumerable()
|
||||
{
|
||||
List<Stream> list = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(list);
|
||||
@@ -60,6 +62,10 @@ namespace SabreTools.IO.Test.Streams
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region AddStream
|
||||
|
||||
[Fact]
|
||||
public void AddStreamTest()
|
||||
{
|
||||
@@ -70,10 +76,18 @@ namespace SabreTools.IO.Test.Streams
|
||||
stream.AddStream(new MemoryStream(new byte[1024]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
|
||||
stream.AddStream(new MemoryStream([]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Read
|
||||
|
||||
[Fact]
|
||||
public void EmptyStreamReadTest()
|
||||
public void Read_EmptyStream()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
|
||||
@@ -84,7 +98,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamReadTest()
|
||||
public void Read_SingleStream()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -96,7 +110,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamSingleContainedReadTest()
|
||||
public void Read_MultipleStream_SingleContained()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -108,7 +122,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamMultipleContainedReadTest()
|
||||
public void Read_MultipleStream_MultipleContained()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[256]), new MemoryStream(new byte[256])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -120,7 +134,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamExtraReadTest()
|
||||
public void Read_SingleStream_Extra()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[256])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -132,7 +146,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamExtraReadTest()
|
||||
public void Read_MultipleStream_Extra()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[128]), new MemoryStream(new byte[128])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -142,5 +156,32 @@ namespace SabreTools.IO.Test.Streams
|
||||
|
||||
Assert.Equal(256, read);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unimplemented
|
||||
|
||||
[Fact]
|
||||
public void Flush_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
419
SabreTools.IO.Test/Streams/ViewStreamTests.cs
Normal file
419
SabreTools.IO.Test/Streams/ViewStreamTests.cs
Normal file
@@ -0,0 +1,419 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.IO.Streams;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.IO.Test.Streams
|
||||
{
|
||||
public class ViewStreamTests
|
||||
{
|
||||
#region Constructor
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024)]
|
||||
[InlineData(1024, 256, 768)]
|
||||
public void Constructor_Array(int size, long offset, long expectedLength)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 1024)]
|
||||
[InlineData(1024, 256, 512, 512)]
|
||||
public void Constructor_Array_Length(int size, long offset, long length, long expectedLength)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, -1, 0)]
|
||||
[InlineData(0, 2048, 0)]
|
||||
[InlineData(1024, -1, 1024)]
|
||||
[InlineData(1024, 2048, 1024)]
|
||||
[InlineData(1024, -1, 512)]
|
||||
[InlineData(1024, 2048, 512)]
|
||||
public void Constructor_Array_InvalidOffset(int size, long offset, long length)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, -1)]
|
||||
[InlineData(0, 0, 2048)]
|
||||
[InlineData(1024, 0, -1)]
|
||||
[InlineData(1024, 0, 2048)]
|
||||
[InlineData(1024, 256, -1)]
|
||||
[InlineData(1024, 256, 2048)]
|
||||
public void Constructor_Array_InvalidLength(int size, long offset, long length)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024)]
|
||||
[InlineData(1024, 256, 768)]
|
||||
public void Constructor_Stream(int size, long offset, long expectedLength)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 1024)]
|
||||
[InlineData(1024, 256, 512, 512)]
|
||||
public void Constructor_Stream_Length(int size, long offset, long length, long expectedLength)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, -1, 0)]
|
||||
[InlineData(0, 2048, 0)]
|
||||
[InlineData(1024, -1, 1024)]
|
||||
[InlineData(1024, 2048, 1024)]
|
||||
[InlineData(1024, -1, 512)]
|
||||
[InlineData(1024, 2048, 512)]
|
||||
public void Constructor_Stream_InvalidOffset(int size, long offset, long length)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, -1)]
|
||||
[InlineData(0, 0, 2048)]
|
||||
[InlineData(1024, 0, -1)]
|
||||
[InlineData(1024, 0, 2048)]
|
||||
[InlineData(1024, 256, -1)]
|
||||
[InlineData(1024, 256, 2048)]
|
||||
public void Constructor_Stream_InvalidLength(int size, long offset, long length)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Position
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 256, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, 511)]
|
||||
public void Position_Array(int size, long offset, long length, long position, long expectedPosition)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Position = position;
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 256, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, 511)]
|
||||
public void Position_Stream(int size, long offset, long length, long position, long expectedPosition)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Position = position;
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SegmentValid
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0, false)]
|
||||
[InlineData(0, 0, 0, 2048, 0, false)]
|
||||
[InlineData(0, 0, 0, 0, 0, true)]
|
||||
[InlineData(0, 0, 0, 0, -1, false)]
|
||||
[InlineData(0, 0, 0, 0, 2048, false)]
|
||||
[InlineData(1024, 0, 1024, -1, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 256, true)]
|
||||
[InlineData(1024, 0, 1024, 0, -1, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 2048, false)]
|
||||
[InlineData(1024, 256, 512, -1, 0, false)]
|
||||
[InlineData(1024, 256, 512, 2048, 0, false)]
|
||||
[InlineData(1024, 256, 512, 0, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 256, true)]
|
||||
[InlineData(1024, 256, 512, 0, -1, false)]
|
||||
[InlineData(1024, 256, 512, 0, 2048, false)]
|
||||
public void SegmentValid_Array(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
bool actual = stream.SegmentValid(segmentStart, segmentLength);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0, false)]
|
||||
[InlineData(0, 0, 0, 2048, 0, false)]
|
||||
[InlineData(0, 0, 0, 0, 0, true)]
|
||||
[InlineData(0, 0, 0, 0, -1, false)]
|
||||
[InlineData(0, 0, 0, 0, 2048, false)]
|
||||
[InlineData(1024, 0, 1024, -1, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 256, true)]
|
||||
[InlineData(1024, 0, 1024, 0, -1, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 2048, false)]
|
||||
[InlineData(1024, 256, 512, -1, 0, false)]
|
||||
[InlineData(1024, 256, 512, 2048, 0, false)]
|
||||
[InlineData(1024, 256, 512, 0, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 256, true)]
|
||||
[InlineData(1024, 256, 512, 0, -1, false)]
|
||||
[InlineData(1024, 256, 512, 0, 2048, false)]
|
||||
public void SegmentValid_Stream(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
bool actual = stream.SegmentValid(segmentStart, segmentLength);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Read
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 1024, 1024)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 512, 512)]
|
||||
[InlineData(1024, 256, 512, 2048, 0)]
|
||||
public void Read_Array(int size, long offset, long length, int count, int expectedRead)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int actual = stream.Read(buffer, 0, count);
|
||||
Assert.Equal(expectedRead, actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 1024, 1024)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 512, 512)]
|
||||
[InlineData(1024, 256, 512, 2048, 0)]
|
||||
public void Read_Stream(int size, long offset, long length, int count, int expectedRead)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int actual = stream.Read(buffer, 0, count);
|
||||
Assert.Equal(expectedRead, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Seek
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
|
||||
public void Seek_Array(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Seek(position, seekOrigin);
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
|
||||
public void Seek_Stream(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Seek(position, seekOrigin);
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unimplemented
|
||||
|
||||
[Fact]
|
||||
public void Flush_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Flush_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
26
SabreTools.IO/Compression/Deflate/DeflateInfo.cs
Normal file
26
SabreTools.IO/Compression/Deflate/DeflateInfo.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents information about a DEFLATE stream
|
||||
/// </summary>
|
||||
public class DeflateInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Size of the deflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value less than 0 to ignore</remarks>
|
||||
public long InputSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the inflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value less than 0 to ignore</remarks>
|
||||
public long OutputSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// CRC-32 of the inflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value of 0 to ignore</remarks>
|
||||
public uint Crc32 { get; set; }
|
||||
}
|
||||
}
|
||||
34
SabreTools.IO/Compression/Deflate/Enums.cs
Normal file
34
SabreTools.IO/Compression/Deflate/Enums.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents the status returned from extracting
|
||||
/// </summary>
|
||||
public enum ExtractionStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Extraction wasn't performed because the inputs were invalid
|
||||
/// </summary>
|
||||
INVALID,
|
||||
|
||||
/// <summary>
|
||||
/// No issues with the extraction
|
||||
/// </summary>
|
||||
GOOD,
|
||||
|
||||
/// <summary>
|
||||
/// File extracted but was the wrong size
|
||||
/// </summary>
|
||||
/// <remarks>Rewinds the stream and deletes the bad file</remarks>
|
||||
WRONG_SIZE,
|
||||
|
||||
/// <summary>
|
||||
/// File extracted but had the wrong CRC-32 value
|
||||
/// </summary>
|
||||
BAD_CRC,
|
||||
|
||||
/// <summary>
|
||||
/// Extraction failed entirely
|
||||
/// </summary>
|
||||
FAIL,
|
||||
}
|
||||
}
|
||||
470
SabreTools.IO/Compression/Deflate/InflateWrapper.cs
Normal file
470
SabreTools.IO/Compression/Deflate/InflateWrapper.cs
Normal file
@@ -0,0 +1,470 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.IO.Extensions;
|
||||
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Wrapper to handle DEFLATE decompression with data verification
|
||||
/// </summary>
|
||||
public class InflateWrapper
|
||||
{
|
||||
#region Constants
|
||||
|
||||
/// <summary>
|
||||
/// Buffer size for decompression
|
||||
/// </summary>
|
||||
private const int BufferSize = 1024 * 1024;
|
||||
|
||||
/// <summary>
|
||||
/// Local file header signature
|
||||
/// </summary>
|
||||
private const uint LocalFileHeaderSignature = 0x04034B50;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Classes
|
||||
|
||||
/// <summary>
|
||||
/// Minimal PKZIP local file header information
|
||||
/// </summary>
|
||||
private class MinLocalFileHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// Signature (0x04034B50)
|
||||
/// </summary>
|
||||
public uint Signature { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// CRC-32
|
||||
/// </summary>
|
||||
public uint CRC32 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Compressed size
|
||||
/// </summary>
|
||||
public uint CompressedSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncompressed size
|
||||
/// </summary>
|
||||
public uint UncompressedSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// File name (variable size)
|
||||
/// </summary>
|
||||
public string? FileName { get; set; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Attempt to extract a file defined by a filename
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="filename">Output filename, null to auto-generate</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
|
||||
public static ExtractionStatus ExtractFile(Stream source,
|
||||
string? filename,
|
||||
string outputDirectory,
|
||||
DeflateInfo expected,
|
||||
bool pkzip,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Attempting to extract {filename}");
|
||||
|
||||
// Extract the file
|
||||
var destination = new MemoryStream();
|
||||
ExtractionStatus status = ExtractStream(source,
|
||||
destination,
|
||||
expected,
|
||||
pkzip,
|
||||
includeDebug,
|
||||
out var foundFilename);
|
||||
|
||||
// If the extracted data is invalid
|
||||
if (status != ExtractionStatus.GOOD || destination == null)
|
||||
return status;
|
||||
|
||||
// Ensure directory separators are consistent
|
||||
filename ??= foundFilename ?? $"FILE_[{expected.InputSize}, {expected.OutputSize}, {expected.Crc32}]";
|
||||
if (Path.DirectorySeparatorChar == '\\')
|
||||
filename = filename.Replace('/', '\\');
|
||||
else if (Path.DirectorySeparatorChar == '/')
|
||||
filename = filename.Replace('\\', '/');
|
||||
|
||||
// Ensure the full output directory exists
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null && !Directory.Exists(directoryName))
|
||||
Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Write the output file
|
||||
File.WriteAllBytes(filename, destination.ToArray());
|
||||
return status;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attempt to extract a file to a stream
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <param name="filename">Output filename if extracted from the data, null otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
|
||||
public static ExtractionStatus ExtractStream(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool pkzip,
|
||||
bool includeDebug,
|
||||
out string? filename)
|
||||
{
|
||||
// If PKZIP containers are used
|
||||
if (pkzip)
|
||||
return ExtractStreamWithContainer(source, destination, expected, includeDebug, out filename);
|
||||
|
||||
// If post-data checksums are used
|
||||
filename = null;
|
||||
return ExtractStreamWithChecksum(source, destination, expected, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract source data in a PKZIP container
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <param name="filename">Filename from the PKZIP header, if it exists</param>
|
||||
/// <returns></returns>
|
||||
public static ExtractionStatus ExtractStreamWithContainer(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool includeDebug,
|
||||
out string? filename)
|
||||
{
|
||||
// Set default values
|
||||
filename = null;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
|
||||
|
||||
// Check the validity of the inputs
|
||||
if (expected.InputSize == 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
else if (expected.InputSize > (source.Length - source.Position))
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
|
||||
// Cache the current offset
|
||||
long current = source.Position;
|
||||
|
||||
// Parse the PKZIP header, if it exists
|
||||
MinLocalFileHeader? zipHeader = ParseLocalFileHeader(source);
|
||||
long zipHeaderBytes = source.Position - current;
|
||||
|
||||
// Always trust the PKZIP CRC-32 value over what is supplied
|
||||
if (zipHeader != null)
|
||||
expected.Crc32 = zipHeader.CRC32;
|
||||
|
||||
// If the filename is [NULL], replace with the zip filename
|
||||
if (zipHeader?.FileName != null)
|
||||
{
|
||||
filename = zipHeader.FileName;
|
||||
if (includeDebug) Console.WriteLine($"Filename from PKZIP header: {filename}");
|
||||
}
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"PKZIP Filename: {zipHeader?.FileName}, PKZIP Expected Read: {zipHeader?.CompressedSize}, PKZIP Expected Write: {zipHeader?.UncompressedSize}, PKZIP Expected CRC-32: {zipHeader?.CRC32:X4}");
|
||||
|
||||
// Extract the file
|
||||
var actual = Inflate(source, destination);
|
||||
if (actual == null)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Could not extract {filename}");
|
||||
return ExtractionStatus.FAIL;
|
||||
}
|
||||
|
||||
// Account for the header bytes read
|
||||
actual.InputSize += zipHeaderBytes;
|
||||
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
|
||||
|
||||
// Verify the extracted data
|
||||
return VerifyExtractedData(source, current, expected, actual, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract source data with a trailing CRC-32 checksum
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns></returns>
|
||||
public static ExtractionStatus ExtractStreamWithChecksum(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
|
||||
|
||||
// Check the validity of the inputs
|
||||
if (expected.InputSize == 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
else if (expected.InputSize > (source.Length - source.Position))
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
|
||||
// Cache the current offset
|
||||
long current = source.Position;
|
||||
|
||||
// Extract the file
|
||||
var actual = Inflate(source, destination);
|
||||
if (actual == null)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Could not extract");
|
||||
return ExtractionStatus.FAIL;
|
||||
}
|
||||
|
||||
// Seek to the true end of the data
|
||||
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
|
||||
|
||||
// If the read value is off-by-one after checksum
|
||||
if (actual.InputSize == expected.InputSize - 5)
|
||||
{
|
||||
// If not at the end of the file, get the corrected offset
|
||||
if (source.Position + 5 < source.Length)
|
||||
{
|
||||
// TODO: What does this byte represent?
|
||||
byte padding = source.ReadByteValue();
|
||||
actual.InputSize += 1;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Off-by-one padding byte detected: 0x{padding:X2}");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Not enough data to adjust offset");
|
||||
}
|
||||
}
|
||||
|
||||
// If there is enough data to read the full CRC
|
||||
uint deflateCrc;
|
||||
if (source.Position + 4 < source.Length)
|
||||
{
|
||||
deflateCrc = source.ReadUInt32LittleEndian();
|
||||
actual.InputSize += 4;
|
||||
}
|
||||
// Otherwise, read what is possible and pad with 0x00
|
||||
else
|
||||
{
|
||||
byte[] deflateCrcBytes = new byte[4];
|
||||
int realCrcLength = source.Read(deflateCrcBytes, 0, (int)(source.Length - source.Position));
|
||||
|
||||
// Parse as a little-endian 32-bit value
|
||||
deflateCrc = (uint)(deflateCrcBytes[0]
|
||||
| (deflateCrcBytes[1] << 8)
|
||||
| (deflateCrcBytes[2] << 16)
|
||||
| (deflateCrcBytes[3] << 24));
|
||||
|
||||
actual.InputSize += realCrcLength;
|
||||
}
|
||||
|
||||
// If the CRC to check isn't set
|
||||
if (expected.Crc32 == 0)
|
||||
expected.Crc32 = deflateCrc;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"DeflateStream CRC-32: {deflateCrc:X8}");
|
||||
|
||||
// Verify the extracted data
|
||||
return VerifyExtractedData(source, current, expected, actual, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a minimal local file header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled minimal local file header on success, null on error</returns>
|
||||
/// <remarks>Partial mirror of method in Serialization</remarks>
|
||||
private static MinLocalFileHeader? ParseLocalFileHeader(Stream data)
|
||||
{
|
||||
var header = new MinLocalFileHeader();
|
||||
|
||||
header.Signature = data.ReadUInt32LittleEndian();
|
||||
if (header.Signature != LocalFileHeaderSignature)
|
||||
return null;
|
||||
|
||||
_ = data.ReadUInt16LittleEndian(); // Version
|
||||
_ = data.ReadUInt16LittleEndian(); // Flags
|
||||
_ = data.ReadUInt16LittleEndian(); // CompressionMethod
|
||||
_ = data.ReadUInt16LittleEndian(); // LastModifedFileTime
|
||||
_ = data.ReadUInt16LittleEndian(); // LastModifiedFileDate
|
||||
header.CRC32 = data.ReadUInt32LittleEndian();
|
||||
header.CompressedSize = data.ReadUInt32LittleEndian();
|
||||
header.UncompressedSize = data.ReadUInt32LittleEndian();
|
||||
ushort fileNameLength = data.ReadUInt16LittleEndian();
|
||||
ushort extraFieldLength = data.ReadUInt16LittleEndian();
|
||||
|
||||
if (fileNameLength > 0 && data.Position + fileNameLength <= data.Length)
|
||||
{
|
||||
byte[] filenameBytes = data.ReadBytes(fileNameLength);
|
||||
header.FileName = Encoding.ASCII.GetString(filenameBytes);
|
||||
}
|
||||
|
||||
// Parsing extras is skipped here, unlike in Serialization
|
||||
if (extraFieldLength > 0 && data.Position + extraFieldLength <= data.Length)
|
||||
_ = data.ReadBytes(extraFieldLength);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify the extracted stream data, seeking to the original location on failure
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="start">Position representing the start of the deflated data</param>
|
||||
/// <param name="expected">Expected deflation info</param>
|
||||
/// <param name="actual">Actual deflation info</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
private static ExtractionStatus VerifyExtractedData(Stream source,
|
||||
long start,
|
||||
DeflateInfo expected,
|
||||
DeflateInfo actual,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Actual Read: {actual.InputSize}, Actual Write: {actual.OutputSize}, Actual CRC-32: {actual.Crc32:X8}");
|
||||
|
||||
// If there's a mismatch during both reading and writing
|
||||
if (expected.InputSize >= 0 && expected.InputSize != actual.InputSize)
|
||||
{
|
||||
// This in/out check helps catch false positives, such as
|
||||
// files that have an off-by-one mismatch for read values
|
||||
// but properly match the output written values.
|
||||
|
||||
// If the written bytes not correct as well
|
||||
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
|
||||
// If the written bytes are not being verified
|
||||
else if (expected.OutputSize < 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
// If there's just a mismatch during only writing
|
||||
if (expected.InputSize >= 0 && expected.InputSize == actual.InputSize)
|
||||
{
|
||||
// We want to log this but ignore the error
|
||||
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.WriteLine($"Ignoring mismatched write values because read values match!");
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, the write size should be checked normally
|
||||
else if (expected.InputSize == 0 && expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
|
||||
// If there's a mismatch with the CRC-32
|
||||
if (expected.Crc32 != 0 && expected.Crc32 != actual.Crc32)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched CRC-32 values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.BAD_CRC;
|
||||
}
|
||||
|
||||
return ExtractionStatus.GOOD;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Inflation
|
||||
|
||||
/// <summary>
|
||||
/// Inflate an input stream to an output stream
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <returns>Deflate information representing the processed data on success, null on error</returns>
|
||||
public static DeflateInfo? Inflate(Stream source, Stream destination)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Setup the hasher for CRC-32 calculation
|
||||
using var hasher = new HashWrapper(HashType.CRC32);
|
||||
|
||||
// Create a DeflateStream from the input
|
||||
using var ds = new DeflateStream(source, CompressionMode.Decompress, leaveOpen: true);
|
||||
|
||||
// Decompress in blocks
|
||||
while (true)
|
||||
{
|
||||
byte[] buf = new byte[BufferSize];
|
||||
int read = ds.Read(buf, 0, buf.Length);
|
||||
if (read == 0)
|
||||
break;
|
||||
|
||||
hasher.Process(buf, 0, read);
|
||||
destination.Write(buf, 0, read);
|
||||
}
|
||||
|
||||
// Finalize the hash
|
||||
hasher.Terminate();
|
||||
byte[] hashBytes = hasher.CurrentHashBytes!;
|
||||
|
||||
// Save the deflate values
|
||||
return new DeflateInfo
|
||||
{
|
||||
InputSize = ds.TotalIn,
|
||||
OutputSize = ds.TotalOut,
|
||||
Crc32 = BitConverter.ToUInt32(hashBytes, 0),
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
58
SabreTools.IO/Compression/LZX/AlignedOffsetBlockData.cs
Normal file
58
SabreTools.IO/Compression/LZX/AlignedOffsetBlockData.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// An aligned offset block is identical to the verbatim block except for the presence of the aligned offset
|
||||
/// tree preceding the other trees.
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal class AlignedOffsetBlockData : BlockData
|
||||
{
|
||||
/// <summary>
|
||||
/// Aligned offset tree
|
||||
/// </summary>
|
||||
/// <remarks>8 elements, 3 bits each</remarks>
|
||||
public byte[]? AlignedOffsetTree { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Pretree for first 256 elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeFirst256 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of first 256 elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsFirst256 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Pretree for remainder of main tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeRemainder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of remaining elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsRemainder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Pretree for length tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeLengthTree { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of elements in length tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsLengthTree { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Token sequence (matches and literals)
|
||||
/// </summary>
|
||||
/// <remarks>Variable</remarks>
|
||||
public byte[]? TokenSequence { get; set; }
|
||||
}
|
||||
}
|
||||
24
SabreTools.IO/Compression/LZX/Block.cs
Normal file
24
SabreTools.IO/Compression/LZX/Block.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// An LZXD block represents a sequence of compressed data that is encoded with the same set of
|
||||
/// Huffman trees, or a sequence of uncompressed data. There can be one or more LZXD blocks in a
|
||||
/// compressed stream, each with its own set of Huffman trees. Blocks do not have to start or end on a
|
||||
/// chunk boundary; blocks can span multiple chunks, or a single chunk can contain multiple blocks. The
|
||||
/// number of chunks is related to the size of the data being compressed, while the number of blocks is
|
||||
/// related to how well the data is compressed.
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal class Block
|
||||
{
|
||||
/// <summary>
|
||||
/// Block header
|
||||
/// </summary>
|
||||
public BlockHeader? Header { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Block data
|
||||
/// </summary>
|
||||
public BlockData? BlockData { get; set; }
|
||||
}
|
||||
}
|
||||
8
SabreTools.IO/Compression/LZX/BlockData.cs
Normal file
8
SabreTools.IO/Compression/LZX/BlockData.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal abstract class BlockData
|
||||
{
|
||||
// No common fields between all block data
|
||||
}
|
||||
}
|
||||
33
SabreTools.IO/Compression/LZX/BlockHeader.cs
Normal file
33
SabreTools.IO/Compression/LZX/BlockHeader.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// The Block Type field, as specified in section 2.3.1.1, indicates which type of block follows,
|
||||
/// and the Block Size field, as specified in section 2.3.1.2, indicates the number of
|
||||
/// uncompressed bytes represented by the block. Following the generic block
|
||||
/// header is a type-specific header that describes the remainder of the block.
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal class BlockHeader
|
||||
{
|
||||
/// <remarks>3 bits</remarks>
|
||||
public BlockType BlockType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Block size is the high 8 bits of 24
|
||||
/// </summary>
|
||||
/// <remarks>8 bits</remarks>
|
||||
public byte BlockSizeMSB { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Block size is the middle 8 bits of 24
|
||||
/// </summary>
|
||||
/// <remarks>8 bits</remarks>
|
||||
public byte BlockSizeByte2 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Block size is the low 8 bits of 24
|
||||
/// </summary>
|
||||
/// <remarks>8 bits</remarks>
|
||||
public byte BlocksizeLSB { get; set; }
|
||||
}
|
||||
}
|
||||
25
SabreTools.IO/Compression/LZX/Chunk.cs
Normal file
25
SabreTools.IO/Compression/LZX/Chunk.cs
Normal file
@@ -0,0 +1,25 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// The LZXD compressor emits chunks of compressed data. A chunk represents exactly 32 KB of
|
||||
/// uncompressed data until the last chunk in the stream, which can represent less than 32 KB. To
|
||||
/// ensure that an exact number of input bytes represent an exact number of output bytes for each
|
||||
/// chunk, after each 32 KB of uncompressed data is represented in the output compressed bitstream, the
|
||||
/// output bitstream is padded with up to 15 bits of zeros to realign the bitstream on a 16-bit boundary
|
||||
/// (even byte boundary) for the next 32 KB of data. This results in a compressed chunk of a byte-aligned
|
||||
/// size. The compressed chunk could be smaller than 32 KB or larger than 32 KB if the data is
|
||||
/// incompressible when the chunk is not the last one.
|
||||
/// </summary>
|
||||
internal class Chunk
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk header
|
||||
/// </summary>
|
||||
public ChunkHeader? Header { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Block headers and data
|
||||
/// </summary>
|
||||
public Block[]? Blocks { get; set; }
|
||||
}
|
||||
}
|
||||
46
SabreTools.IO/Compression/LZX/ChunkHeader.cs
Normal file
46
SabreTools.IO/Compression/LZX/ChunkHeader.cs
Normal file
@@ -0,0 +1,46 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// The LZXD compressor emits chunks of compressed data. A chunk represents exactly 32 KB of
|
||||
/// uncompressed data until the last chunk in the stream, which can represent less than 32 KB. To
|
||||
/// ensure that an exact number of input bytes represent an exact number of output bytes for each
|
||||
/// chunk, after each 32 KB of uncompressed data is represented in the output compressed bitstream, the
|
||||
/// output bitstream is padded with up to 15 bits of zeros to realign the bitstream on a 16-bit boundary
|
||||
/// (even byte boundary) for the next 32 KB of data. This results in a compressed chunk of a byte-aligned
|
||||
/// size. The compressed chunk could be smaller than 32 KB or larger than 32 KB if the data is
|
||||
/// incompressible when the chunk is not the last one.
|
||||
/// </summary>
|
||||
internal class ChunkHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// The LZXD engine encodes a compressed, chunk-size prefix field preceding each compressed chunk in
|
||||
/// the compressed byte stream. The compressed, chunk-size prefix field is a byte aligned, little-endian,
|
||||
/// 16-bit field. The chunk prefix chain could be followed in the compressed stream without
|
||||
/// decompressing any data. The next chunk prefix is at a location computed by the absolute byte offset
|
||||
/// location of this chunk prefix plus 2 (for the size of the chunk-size prefix field) plus the current chunk
|
||||
/// size.
|
||||
/// </summary>
|
||||
public ushort ChunkSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The first bit in the first chunk in the LZXD bitstream (following the 2-byte, chunk-size prefix described
|
||||
/// in section 2.2.1) indicates the presence or absence of two 16-bit fields immediately following the
|
||||
/// single bit. If the bit is set, E8 translation is enabled for all the following chunks in the stream using the
|
||||
/// 32-bit value derived from the two 16-bit fields as the E8_file_size provided to the compressor when E8
|
||||
/// translation was enabled. Note that E8_file_size is completely independent of the length of the
|
||||
/// uncompressed data. E8 call translation is disabled after the 32,768th chunk (after 1 gigabyte (GB) of
|
||||
/// uncompressed data).
|
||||
/// </summary>
|
||||
public byte E8Translation { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// E8 translation size, high WORD
|
||||
/// </summary>
|
||||
public ushort? TranslationSizeHighWord { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// E8 translation size, low WORD
|
||||
/// </summary>
|
||||
public ushort? TranslationSizeLowWord { get; set; }
|
||||
}
|
||||
}
|
||||
38
SabreTools.IO/Compression/LZX/Constants.cs
Normal file
38
SabreTools.IO/Compression/LZX/Constants.cs
Normal file
@@ -0,0 +1,38 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
internal static class Constants
|
||||
{
|
||||
/* some constants defined by the LZX specification */
|
||||
public const int LZX_MIN_MATCH = 2;
|
||||
public const int LZX_MAX_MATCH = 257;
|
||||
public const int LZX_NUM_CHARS = 256;
|
||||
public const int LZX_PRETREE_NUM_ELEMENTS = 20;
|
||||
|
||||
/// <summary>
|
||||
/// aligned offset tree #elements
|
||||
/// </summary>
|
||||
public const int LZX_ALIGNED_NUM_ELEMENTS = 8;
|
||||
|
||||
/// <summary>
|
||||
/// this one missing from spec!
|
||||
/// </summary>
|
||||
public const int LZX_NUM_PRIMARY_LENGTHS = 7;
|
||||
|
||||
/// <summary>
|
||||
/// length tree #elements
|
||||
/// </summary>
|
||||
public const int LZX_NUM_SECONDARY_LENGTHS = 249;
|
||||
|
||||
/* LZX huffman defines: tweak tablebits as desired */
|
||||
public const int LZX_PRETREE_MAXSYMBOLS = LZX_PRETREE_NUM_ELEMENTS;
|
||||
public const int LZX_PRETREE_TABLEBITS = 6;
|
||||
public const int LZX_MAINTREE_MAXSYMBOLS = LZX_NUM_CHARS + 50 * 8;
|
||||
public const int LZX_MAINTREE_TABLEBITS = 12;
|
||||
public const int LZX_LENGTH_MAXSYMBOLS = LZX_NUM_SECONDARY_LENGTHS + 1;
|
||||
public const int LZX_LENGTH_TABLEBITS = 12;
|
||||
public const int LZX_ALIGNED_MAXSYMBOLS = LZX_ALIGNED_NUM_ELEMENTS;
|
||||
public const int LZX_ALIGNED_TABLEBITS = 7;
|
||||
|
||||
public const int LZX_LENTABLE_SAFETY = 64; /* we allow length table decoding overruns */
|
||||
}
|
||||
}
|
||||
48
SabreTools.IO/Compression/LZX/Enums.cs
Normal file
48
SabreTools.IO/Compression/LZX/Enums.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// 3-bit block type
|
||||
/// </summary>
|
||||
internal enum BlockType : byte
|
||||
{
|
||||
/// <summary>
|
||||
/// Not valid
|
||||
/// </summary>
|
||||
INVALID_0 = 0b000,
|
||||
|
||||
/// <summary>
|
||||
/// Verbatim block
|
||||
/// </summary>
|
||||
Verbatim = 0b001,
|
||||
|
||||
/// <summary>
|
||||
/// Aligned offset block
|
||||
/// </summary>
|
||||
AlignedOffset = 0b010,
|
||||
|
||||
/// <summary>
|
||||
/// Uncompressed block
|
||||
/// </summary>
|
||||
Uncompressed = 0b011,
|
||||
|
||||
/// <summary>
|
||||
/// Not valid
|
||||
/// </summary>
|
||||
INVALID_4 = 0b100,
|
||||
|
||||
/// <summary>
|
||||
/// Not valid
|
||||
/// </summary>
|
||||
INVALID_5 = 0b101,
|
||||
|
||||
/// <summary>
|
||||
/// Not valid
|
||||
/// </summary>
|
||||
INVALID_6 = 0b110,
|
||||
|
||||
/// <summary>
|
||||
/// Not valid
|
||||
/// </summary>
|
||||
INVALID_7 = 0b111,
|
||||
}
|
||||
}
|
||||
54
SabreTools.IO/Compression/LZX/UncompressedBlockData.cs
Normal file
54
SabreTools.IO/Compression/LZX/UncompressedBlockData.cs
Normal file
@@ -0,0 +1,54 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// Following the generic block header, an uncompressed block begins with 1 to 16 bits of zero padding
|
||||
/// to align the bit buffer on a 16-bit boundary. At this point, the bitstream ends and a byte stream
|
||||
/// begins. Following the zero padding, new 32-bit values for R0, R1, and R2 are output in little-endian
|
||||
/// form, followed by the uncompressed data bytes themselves. Finally, if the uncompressed data length
|
||||
/// is odd, one extra byte of zero padding is encoded to realign the following bitstream.
|
||||
///
|
||||
/// Then the bitstream of byte-swapped 16-bit integers resumes for the next Block Type field (if there
|
||||
/// are subsequent blocks).
|
||||
///
|
||||
/// The decoded R0, R1, and R2 values are used as initial repeated offset values to decode the
|
||||
/// subsequent compressed block if present.
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal class UncompressedBlockData : BlockData
|
||||
{
|
||||
/// <summary>
|
||||
/// Padding to align following field on 16-bit boundary
|
||||
/// </summary>
|
||||
/// <remarks>Bits have a value of zero</remarks>
|
||||
public ushort PaddingBits { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Least significant to most significant byte (little-endian DWORD ([MS-DTYP]))
|
||||
/// </summary>
|
||||
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
|
||||
public uint R0 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Least significant to most significant byte (little-endian DWORD)
|
||||
/// </summary>
|
||||
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
|
||||
public uint R1 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Least significant to most significant byte (little-endian DWORD)
|
||||
/// </summary>
|
||||
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
|
||||
public uint R2 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Can use the direct memcpy function, as specified in [IEEE1003.1]
|
||||
/// </summary>
|
||||
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
|
||||
public byte[]? RawDataBytes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Only if uncompressed size is odd
|
||||
/// </summary>
|
||||
public byte AlignmentByte { get; set; }
|
||||
}
|
||||
}
|
||||
51
SabreTools.IO/Compression/LZX/VerbatimBlockData.cs
Normal file
51
SabreTools.IO/Compression/LZX/VerbatimBlockData.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
namespace SabreTools.IO.Compression.LZX
|
||||
{
|
||||
/// <summary>
|
||||
/// The fields of a verbatim block that follow the generic block header
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
|
||||
internal class VerbatimBlockData : BlockData
|
||||
{
|
||||
/// <summary>
|
||||
/// Pretree for first 256 elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeFirst256 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of first 256 elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsFirst256 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Pretree for remainder of main tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeRemainder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of remaining elements of main tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsRemainder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Pretree for length tree
|
||||
/// </summary>
|
||||
/// <remarks>20 elements, 4 bits each</remarks>
|
||||
public byte[]? PretreeLengthTree { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path lengths of elements in length tree
|
||||
/// </summary>
|
||||
/// <remarks>Encoded using pretree</remarks>
|
||||
public int[]? PathLengthsLengthTree { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Token sequence (matches and literals)
|
||||
/// </summary>
|
||||
/// <remarks>Variable</remarks>
|
||||
public byte[]? TokenSequence { get; set; }
|
||||
}
|
||||
}
|
||||
28
SabreTools.IO/Compression/MSZIP/BlockHeader.cs
Normal file
28
SabreTools.IO/Compression/MSZIP/BlockHeader.cs
Normal file
@@ -0,0 +1,28 @@
|
||||
namespace SabreTools.IO.Compression.MSZIP
|
||||
{
|
||||
/// <summary>
|
||||
/// Each MSZIP block MUST consist of a 2-byte MSZIP signature and one or more RFC 1951 blocks. The
|
||||
/// 2-byte MSZIP signature MUST consist of the bytes 0x43 and 0x4B. The MSZIP signature MUST be
|
||||
/// the first 2 bytes in the MSZIP block. The MSZIP signature is shown in the following packet diagram.
|
||||
///
|
||||
/// Each MSZIP block is the result of a single deflate compression operation, as defined in [RFC1951].
|
||||
/// The compressor that performs the compression operation MUST generate one or more RFC 1951
|
||||
/// blocks, as defined in [RFC1951]. The number, deflation mode, and type of RFC 1951 blocks in each
|
||||
/// MSZIP block is determined by the compressor, as defined in [RFC1951]. The last RFC 1951 block in
|
||||
/// each MSZIP block MUST be marked as the "end" of the stream(1), as defined by [RFC1951]
|
||||
/// section 3.2.3. Decoding trees MUST be discarded after each RFC 1951 block, but the history buffer
|
||||
/// MUST be maintained.Each MSZIP block MUST represent no more than 32 KB of uncompressed data.
|
||||
///
|
||||
/// The maximum compressed size of each MSZIP block is 32 KB + 12 bytes. This enables the MSZIP
|
||||
/// block to contain 32 KB of data split between two noncompressed RFC 1951 blocks, each of which
|
||||
/// has a value of BTYPE = 00.
|
||||
/// </summary>
|
||||
/// <see href="https://interoperability.blob.core.windows.net/files/MS-MCI/%5bMS-MCI%5d.pdf"/>
|
||||
internal class BlockHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// 'CK'
|
||||
/// </summary>
|
||||
public ushort Signature { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.Compression.MSZIP;
|
||||
|
||||
namespace SabreTools.IO.Compression.MSZIP
|
||||
{
|
||||
|
||||
50
SabreTools.IO/Compression/Quantum/Constants.cs
Normal file
50
SabreTools.IO/Compression/Quantum/Constants.cs
Normal file
@@ -0,0 +1,50 @@
|
||||
namespace SabreTools.IO.Compression.Quantum
|
||||
{
|
||||
/// <see href="http://www.russotto.net/quantumcomp.html"/>
|
||||
internal static class Constants
|
||||
{
|
||||
public static readonly int[] PositionSlot =
|
||||
[
|
||||
0x00000, 0x00001, 0x00002, 0x00003, 0x00004, 0x00006, 0x00008, 0x0000c,
|
||||
0x00010, 0x00018, 0x00020, 0x00030, 0x00040, 0x00060, 0x00080, 0x000c0,
|
||||
0x00100, 0x00180, 0x00200, 0x00300, 0x00400, 0x00600, 0x00800, 0x00c00,
|
||||
0x01000, 0x01800, 0x02000, 0x03000, 0x04000, 0x06000, 0x08000, 0x0c000,
|
||||
0x10000, 0x18000, 0x20000, 0x30000, 0x40000, 0x60000, 0x80000, 0xc0000,
|
||||
0x100000, 0x180000
|
||||
];
|
||||
|
||||
public static readonly int[] PositionExtraBits =
|
||||
[
|
||||
0, 0, 0, 0, 1, 1, 2, 2,
|
||||
3, 3, 4, 4, 5, 5, 6, 6,
|
||||
7, 7, 8, 8, 9, 9, 10, 10,
|
||||
11, 11, 12, 12, 13, 13, 14, 14,
|
||||
15, 15, 16, 16, 17, 17, 18, 18,
|
||||
19, 19
|
||||
];
|
||||
|
||||
public static readonly int[] LengthSlot =
|
||||
[
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x08,
|
||||
0x0a, 0x0c, 0x0e, 0x12, 0x16, 0x1a, 0x1e, 0x26,
|
||||
0x2e, 0x36, 0x3e, 0x4e, 0x5e, 0x6e, 0x7e, 0x9e,
|
||||
0xbe, 0xde, 0xfe
|
||||
];
|
||||
|
||||
public static readonly int[] LengthExtraBits =
|
||||
[
|
||||
0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 2, 2, 2, 2, 3, 3,
|
||||
3, 3, 4, 4, 4, 4, 5, 5,
|
||||
5, 5, 0
|
||||
];
|
||||
|
||||
/// <summary>
|
||||
/// Number of position slots for (tsize - 10)
|
||||
/// </summary>
|
||||
public static readonly int[] NumPositionSlots =
|
||||
[
|
||||
20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42
|
||||
];
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Streams;
|
||||
using SabreTools.Models.Compression.Quantum;
|
||||
using static SabreTools.Models.Compression.Quantum.Constants;
|
||||
using static SabreTools.IO.Compression.Quantum.Constants;
|
||||
|
||||
namespace SabreTools.IO.Compression.Quantum
|
||||
{
|
||||
|
||||
45
SabreTools.IO/Compression/Quantum/Enums.cs
Normal file
45
SabreTools.IO/Compression/Quantum/Enums.cs
Normal file
@@ -0,0 +1,45 @@
|
||||
namespace SabreTools.IO.Compression.Quantum
|
||||
{
|
||||
internal enum SelectorModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Literal model, 64 entries, start at symbol 0
|
||||
/// </summary>
|
||||
SELECTOR_0 = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Literal model, 64 entries, start at symbol 64
|
||||
/// </summary>
|
||||
SELECTOR_1 = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Literal model, 64 entries, start at symbol 128
|
||||
/// </summary>
|
||||
SELECTOR_2 = 2,
|
||||
|
||||
/// <summary>
|
||||
/// Literal model, 64 entries, start at symbol 192
|
||||
/// </summary>
|
||||
SELECTOR_3 = 3,
|
||||
|
||||
/// <summary>
|
||||
/// LZ model, 3 character matches, max 24 entries, start at symbol 0
|
||||
/// </summary>
|
||||
SELECTOR_4 = 4,
|
||||
|
||||
/// <summary>
|
||||
/// LZ model, 4 character matches, max 36 entries, start at symbol 0
|
||||
/// </summary>
|
||||
SELECTOR_5 = 5,
|
||||
|
||||
/// <summary>
|
||||
/// LZ model, 5+ character matches, max 42 entries, start at symbol 0
|
||||
/// </summary>
|
||||
SELECTOR_6_POSITION = 6,
|
||||
|
||||
/// <summary>
|
||||
/// LZ model, 5+ character matches, 27 entries, start at symbol 0
|
||||
/// </summary>
|
||||
SELECTOR_6_LENGTH = 7,
|
||||
}
|
||||
}
|
||||
24
SabreTools.IO/Compression/Quantum/Model.cs
Normal file
24
SabreTools.IO/Compression/Quantum/Model.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
namespace SabreTools.IO.Compression.Quantum
|
||||
{
|
||||
/// <see href="http://www.russotto.net/quantumcomp.html"/>
|
||||
internal sealed class Model
|
||||
{
|
||||
public int Entries { get; set; }
|
||||
|
||||
/// <remarks>
|
||||
/// All the models are initialized with the symbols in symbol
|
||||
/// order in the table, and with every symbol in the table
|
||||
/// having a frequency of 1
|
||||
/// </remarks>
|
||||
public ModelSymbol[]? Symbols { get; set; }
|
||||
|
||||
/// <remarks>
|
||||
/// The initial total frequency is equal to the number of entries
|
||||
/// in the table
|
||||
/// </remarks>
|
||||
public int TotalFrequency { get; set; }
|
||||
|
||||
/// <remarks>The initial time_to_reorder value is 4</remarks>
|
||||
public int TimeToReorder { get; set; }
|
||||
}
|
||||
}
|
||||
15
SabreTools.IO/Compression/Quantum/ModelSymbol.cs
Normal file
15
SabreTools.IO/Compression/Quantum/ModelSymbol.cs
Normal file
@@ -0,0 +1,15 @@
|
||||
namespace SabreTools.IO.Compression.Quantum
|
||||
{
|
||||
/// <see href="http://www.russotto.net/quantumcomp.html"/>
|
||||
internal sealed class ModelSymbol
|
||||
{
|
||||
public ushort Symbol { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The cumulative frequency is the frequency of all the symbols
|
||||
/// which are at a higher index in the table than that symbol —
|
||||
/// thus the last entry in the table has a cumulative frequency of 0.
|
||||
/// </summary>
|
||||
public ushort CumulativeFrequency { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.Models.LZ;
|
||||
|
||||
namespace SabreTools.IO.Compression.SZDD
|
||||
{
|
||||
@@ -15,7 +14,7 @@ namespace SabreTools.IO.Compression.SZDD
|
||||
/// <summary>
|
||||
/// Source stream for the decompressor
|
||||
/// </summary>
|
||||
private readonly BufferedStream _source;
|
||||
private readonly Streams.BufferedStream _source;
|
||||
|
||||
/// <summary>
|
||||
/// SZDD format being decompressed
|
||||
@@ -37,19 +36,19 @@ namespace SabreTools.IO.Compression.SZDD
|
||||
|
||||
// Initialize the window with space characters
|
||||
_window = Array.ConvertAll(_window, b => (byte)0x20);
|
||||
_source = new BufferedStream(source);
|
||||
_source = new Streams.BufferedStream(source);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a KWAJ decompressor
|
||||
/// </summary>
|
||||
public static Decompressor CreateKWAJ(byte[] source, KWAJCompressionType compressionType)
|
||||
public static Decompressor CreateKWAJ(byte[] source, ushort compressionType)
|
||||
=> CreateKWAJ(new MemoryStream(source), compressionType);
|
||||
|
||||
/// <summary>
|
||||
/// Create a KWAJ decompressor
|
||||
/// </summary>
|
||||
public static Decompressor CreateKWAJ(Stream source, KWAJCompressionType compressionType)
|
||||
public static Decompressor CreateKWAJ(Stream source, ushort compressionType)
|
||||
{
|
||||
// Create the decompressor
|
||||
var decompressor = new Decompressor(source);
|
||||
@@ -57,11 +56,11 @@ namespace SabreTools.IO.Compression.SZDD
|
||||
// Set the format and return
|
||||
decompressor._format = compressionType switch
|
||||
{
|
||||
KWAJCompressionType.NoCompression => Format.KWAJNoCompression,
|
||||
KWAJCompressionType.NoCompressionXor => Format.KWAJXor,
|
||||
KWAJCompressionType.QBasic => Format.KWAJQBasic,
|
||||
KWAJCompressionType.LZH => Format.KWAJLZH,
|
||||
KWAJCompressionType.MSZIP => Format.KWAJMSZIP,
|
||||
0x0000 => Format.KWAJNoCompression,
|
||||
0x0001 => Format.KWAJXor,
|
||||
0x0002 => Format.KWAJQBasic,
|
||||
0x0003 => Format.KWAJLZH,
|
||||
0x0004 => Format.KWAJMSZIP,
|
||||
_ => throw new IndexOutOfRangeException(nameof(source)),
|
||||
};
|
||||
return decompressor;
|
||||
@@ -229,77 +228,5 @@ namespace SabreTools.IO.Compression.SZDD
|
||||
dest.Flush();
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Buffered stream that reads in blocks
|
||||
/// </summary>
|
||||
private class BufferedStream
|
||||
{
|
||||
/// <summary>
|
||||
/// Source stream for populating the buffer
|
||||
/// </summary>
|
||||
private readonly Stream _source;
|
||||
|
||||
/// <summary>
|
||||
/// Internal buffer to read
|
||||
/// </summary>
|
||||
private readonly byte[] _buffer = new byte[2048];
|
||||
|
||||
/// <summary>
|
||||
/// Current pointer into the buffer
|
||||
/// </summary>
|
||||
private int _bufferPtr = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the number of available bytes
|
||||
/// </summary>
|
||||
private int _available = -1;
|
||||
|
||||
/// <summary>
|
||||
/// Create a new buffered stream
|
||||
/// </summary>
|
||||
public BufferedStream(Stream source)
|
||||
{
|
||||
_source = source;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read the next byte from the buffer, if possible
|
||||
/// </summary>
|
||||
public byte? ReadNextByte()
|
||||
{
|
||||
// Ensure the buffer first
|
||||
if (!EnsureBuffer())
|
||||
return null;
|
||||
|
||||
// Return the next available value
|
||||
return _buffer[_bufferPtr++];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure the buffer has data to read
|
||||
/// </summary>
|
||||
private bool EnsureBuffer()
|
||||
{
|
||||
// Force an update if in the initial state
|
||||
if (_available == -1)
|
||||
{
|
||||
_available = _source.Read(_buffer, 0, _buffer.Length);
|
||||
_bufferPtr = 0;
|
||||
return _available != 0;
|
||||
}
|
||||
|
||||
// If the pointer is out of range
|
||||
if (_bufferPtr >= _available)
|
||||
{
|
||||
_available = _source.Read(_buffer, 0, _buffer.Length);
|
||||
_bufferPtr = 0;
|
||||
return _available != 0;
|
||||
}
|
||||
|
||||
// Otherwise, assume data is available
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
179
SabreTools.IO/Encryption/MoPaQDecrypter.cs
Normal file
179
SabreTools.IO/Encryption/MoPaQDecrypter.cs
Normal file
@@ -0,0 +1,179 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.Matching;
|
||||
|
||||
namespace SabreTools.IO.Encryption
|
||||
{
|
||||
/// <summary>
|
||||
/// Handler for decrypting MoPaQ block and table data
|
||||
/// </summary>
|
||||
public class MoPaQDecrypter
|
||||
{
|
||||
#region Constants
|
||||
|
||||
private const uint MPQ_HASH_KEY2_MIX = 0x400;
|
||||
|
||||
private const uint STORM_BUFFER_SIZE = 0x500;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Buffer for encryption and decryption
|
||||
/// </summary>
|
||||
private readonly uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
|
||||
|
||||
#endregion
|
||||
|
||||
public MoPaQDecrypter()
|
||||
{
|
||||
PrepareCryptTable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prepare the encryption table
|
||||
/// </summary>
|
||||
private void PrepareCryptTable()
|
||||
{
|
||||
uint seed = 0x00100001;
|
||||
for (uint index1 = 0; index1 < 0x100; index1++)
|
||||
{
|
||||
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
|
||||
{
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp1 = (seed & 0xFFFF) << 0x10;
|
||||
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp2 = (seed & 0xFFFF);
|
||||
|
||||
_stormBuffer[index2] = (temp1 | temp2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Load a table block by optionally decompressing and
|
||||
/// decrypting before returning the data.
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="offset">Data offset to parse</param>
|
||||
/// <param name="expectedHash">Optional MD5 hash for validation</param>
|
||||
/// <param name="compressedSize">Size of the table in the file</param>
|
||||
/// <param name="tableSize">Expected size of the table</param>
|
||||
/// <param name="key">Encryption key to use</param>
|
||||
/// <param name="realTableSize">Output represening the real table size</param>
|
||||
/// <returns>Byte array representing the processed table</returns>
|
||||
public byte[]? LoadTable(Stream data,
|
||||
long offset,
|
||||
byte[]? expectedHash,
|
||||
uint compressedSize,
|
||||
uint tableSize,
|
||||
uint key,
|
||||
out long realTableSize)
|
||||
{
|
||||
byte[]? tableData;
|
||||
byte[]? readBytes;
|
||||
long bytesToRead = tableSize;
|
||||
|
||||
// Allocate the MPQ table
|
||||
tableData = readBytes = new byte[tableSize];
|
||||
|
||||
// Check if the MPQ table is compressed
|
||||
if (compressedSize != 0 && compressedSize < tableSize)
|
||||
{
|
||||
// Allocate temporary buffer for holding compressed data
|
||||
readBytes = new byte[compressedSize];
|
||||
bytesToRead = compressedSize;
|
||||
}
|
||||
|
||||
// Get the file offset from which we will read the table
|
||||
// Note: According to Storm.dll from Warcraft III (version 2002),
|
||||
// if the hash table position is 0xFFFFFFFF, no SetFilePointer call is done
|
||||
// and the table is loaded from the current file offset
|
||||
if (offset == 0xFFFFFFFF)
|
||||
offset = data.Position;
|
||||
|
||||
// Is the sector table within the file?
|
||||
if (offset >= data.Length)
|
||||
{
|
||||
realTableSize = 0;
|
||||
return null;
|
||||
}
|
||||
|
||||
// The hash table and block table can go beyond EOF.
|
||||
// Storm.dll reads as much as possible, then fills the missing part with zeros.
|
||||
// Abused by Spazzler map protector which sets hash table size to 0x00100000
|
||||
// Abused by NP_Protect in MPQs v4 as well
|
||||
if ((offset + bytesToRead) > data.Length)
|
||||
bytesToRead = (uint)(data.Length - offset);
|
||||
|
||||
// Give the caller information that the table was cut
|
||||
realTableSize = bytesToRead;
|
||||
|
||||
// If everything succeeded, read the raw table from the MPQ
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
_ = data.Read(readBytes, 0, (int)bytesToRead);
|
||||
|
||||
// Verify the MD5 of the table, if present
|
||||
byte[]? actualHash = HashTool.GetByteArrayHashArray(readBytes, HashType.MD5);
|
||||
if (expectedHash != null && actualHash != null && !actualHash.EqualsExactly(expectedHash))
|
||||
{
|
||||
Console.WriteLine("Table is corrupt!");
|
||||
return null;
|
||||
}
|
||||
|
||||
// First of all, decrypt the table
|
||||
if (key != 0)
|
||||
tableData = DecryptBlock(readBytes, bytesToRead, key);
|
||||
|
||||
// If the table is compressed, decompress it
|
||||
if (compressedSize != 0 && compressedSize < tableSize)
|
||||
{
|
||||
Console.WriteLine("Table is compressed, it will not read properly!");
|
||||
return null;
|
||||
|
||||
// TODO: Handle decompression
|
||||
// int cbOutBuffer = (int)tableSize;
|
||||
// int cbInBuffer = (int)compressedSize;
|
||||
|
||||
// if (!SCompDecompress2(readBytes, &cbOutBuffer, tableData, cbInBuffer))
|
||||
// errorCode = SErrGetLastError();
|
||||
|
||||
// tableData = readBytes;
|
||||
}
|
||||
|
||||
// Return the MPQ table
|
||||
return tableData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decrypt a single block of data
|
||||
/// </summary>
|
||||
public unsafe byte[] DecryptBlock(byte[] block, long length, uint key)
|
||||
{
|
||||
uint seed = 0xEEEEEEEE;
|
||||
|
||||
uint[] castBlock = new uint[length >> 2];
|
||||
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
|
||||
int castBlockPtr = 0;
|
||||
|
||||
// Round to uints
|
||||
length >>= 2;
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
|
||||
uint ch = castBlock[castBlockPtr] ^ (key + seed);
|
||||
|
||||
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
|
||||
seed = ch + seed + (seed << 5) + 3;
|
||||
castBlock[castBlockPtr++] = ch;
|
||||
}
|
||||
|
||||
Buffer.BlockCopy(castBlock, 0, block, 0, block.Length >> 2);
|
||||
return block;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SabreTools.IO.Extensions
|
||||
{
|
||||
@@ -50,5 +53,272 @@ namespace SabreTools.IO.Extensions
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from a byte array
|
||||
/// </summary>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
#if NET5_0_OR_GREATER
|
||||
/// <remarks>This reads both Latin1 and UTF-16 strings from the input data</remarks>
|
||||
#else
|
||||
/// <remarks>This reads both ASCII and UTF-16 strings from the input data</remarks>
|
||||
#endif
|
||||
public static List<string>? ReadStringsFrom(this byte[]? input, int charLimit = 5)
|
||||
{
|
||||
// Validate the data
|
||||
if (input == null || input.Length == 0)
|
||||
return null;
|
||||
|
||||
#if NET5_0_OR_GREATER
|
||||
// Check for Latin1 strings
|
||||
var asciiStrings = input.ReadStringsWithEncoding(charLimit, Encoding.Latin1);
|
||||
#else
|
||||
// Check for ASCII strings
|
||||
var asciiStrings = input.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
|
||||
#endif
|
||||
|
||||
// Check for Unicode strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var unicodeStrings = input.ReadStringsWithEncoding(charLimit, Encoding.Unicode);
|
||||
|
||||
// Ignore duplicate strings across encodings
|
||||
List<string> sourceStrings = [.. asciiStrings, .. unicodeStrings];
|
||||
|
||||
// Sort the strings and return
|
||||
sourceStrings.Sort();
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from a byte array with an encoding
|
||||
/// </summary>
|
||||
/// <param name="bytes">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <param name="encoding">Character encoding to use for checking</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>Characters with the higher bytes set are unused</remarks>
|
||||
#if NET20
|
||||
public static List<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
|
||||
#else
|
||||
public static HashSet<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
|
||||
#endif
|
||||
{
|
||||
if (bytes == null || bytes.Length == 0)
|
||||
return [];
|
||||
if (charLimit <= 0 || charLimit > bytes.Length)
|
||||
return [];
|
||||
|
||||
// Short-circuit for some encoding types
|
||||
if (encoding.CodePage == Encoding.ASCII.CodePage)
|
||||
return bytes.ReadAsciiStrings(charLimit);
|
||||
#if NET5_0_OR_GREATER
|
||||
else if (encoding.CodePage == Encoding.Latin1.CodePage)
|
||||
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.Latin1, 1);
|
||||
#endif
|
||||
else if (encoding.IsSingleByte)
|
||||
return bytes.ReadFixedWidthEncodingStrings(charLimit, encoding, 1);
|
||||
else if (encoding.CodePage == Encoding.Unicode.CodePage)
|
||||
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.Unicode, 2);
|
||||
else if (encoding.CodePage == Encoding.BigEndianUnicode.CodePage)
|
||||
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.BigEndianUnicode, 2);
|
||||
else if (encoding.CodePage == Encoding.UTF32.CodePage)
|
||||
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.UTF32, 4);
|
||||
|
||||
// Create the string set to return
|
||||
#if NET20
|
||||
var strings = new List<string>();
|
||||
#else
|
||||
var strings = new HashSet<string>();
|
||||
#endif
|
||||
|
||||
// Open the text reader with the correct encoding
|
||||
using var ms = new MemoryStream(bytes);
|
||||
using var reader = new StreamReader(ms, encoding);
|
||||
|
||||
// Create a string builder for the loop
|
||||
var sb = new StringBuilder();
|
||||
|
||||
// Check for strings
|
||||
long lastOffset = 0;
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// Read the next character from the stream
|
||||
char c = (char)reader.Read();
|
||||
|
||||
// If the character is invalid
|
||||
if (char.IsControl(c) || (c & 0xFFFFFF00) != 0)
|
||||
{
|
||||
// Seek to the end of the last found string
|
||||
string str = sb.ToString();
|
||||
lastOffset += encoding.GetByteCount(str) + 1;
|
||||
ms.Seek(lastOffset, SeekOrigin.Begin);
|
||||
reader.DiscardBufferedData();
|
||||
|
||||
// If there is no cached string
|
||||
if (str.Length == 0)
|
||||
continue;
|
||||
|
||||
// Add the string if long enough
|
||||
if (str.Length >= charLimit)
|
||||
strings.Add(str);
|
||||
|
||||
// Clear the builder and continue
|
||||
#if NET20 || NET35
|
||||
sb = new();
|
||||
#else
|
||||
sb.Clear();
|
||||
#endif
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, add the character to the builder and continue
|
||||
sb.Append(c);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (sb.Length >= charLimit)
|
||||
strings.Add(sb.ToString());
|
||||
|
||||
return strings;
|
||||
}
|
||||
|
||||
#region Fixed Byte-Width Encoding Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from a byte array using an encoding with a fixed width
|
||||
/// </summary>
|
||||
/// <param name="bytes">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <param name="encoding">Character encoding to use for checking</param>
|
||||
/// <param name="width">Character width of the encoding</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>Characters with the higher bytes set are unused</remarks>
|
||||
#if NET20
|
||||
private static List<string> ReadFixedWidthEncodingStrings(this byte[] bytes, int charLimit, Encoding encoding, int width)
|
||||
#else
|
||||
private static HashSet<string> ReadFixedWidthEncodingStrings(this byte[] bytes, int charLimit, Encoding encoding, int width)
|
||||
#endif
|
||||
{
|
||||
if (charLimit <= 0 || charLimit > bytes.Length)
|
||||
return [];
|
||||
|
||||
// Create the string set to return
|
||||
#if NET20
|
||||
var strings = new List<string>();
|
||||
#else
|
||||
var strings = new HashSet<string>();
|
||||
#endif
|
||||
|
||||
// Create a string builder for the loop
|
||||
var sb = new StringBuilder();
|
||||
|
||||
// Check for strings
|
||||
int offset = 0;
|
||||
while (offset <= bytes.Length - width)
|
||||
{
|
||||
// Read the next character from the stream
|
||||
char c = encoding.GetChars(bytes, offset, width)[0];
|
||||
offset += width;
|
||||
|
||||
// If the character is invalid
|
||||
if (char.IsControl(c) || (c & 0xFFFFFF00) != 0)
|
||||
{
|
||||
// Pretend only one byte was read
|
||||
offset -= width - 1;
|
||||
|
||||
// If there is no cached string
|
||||
if (sb.Length == 0)
|
||||
continue;
|
||||
|
||||
// Add the string if long enough
|
||||
if (sb.Length >= charLimit)
|
||||
strings.Add(sb.ToString());
|
||||
|
||||
// Clear the builder and continue
|
||||
#if NET20 || NET35
|
||||
sb = new();
|
||||
#else
|
||||
sb.Clear();
|
||||
#endif
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, add the character to the builder and continue
|
||||
sb.Append(c);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (sb.Length >= charLimit)
|
||||
strings.Add(sb.ToString());
|
||||
|
||||
return strings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from a byte array using ASCII encoding
|
||||
/// </summary>
|
||||
/// <param name="bytes">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>Handling for 7-bit ASCII needs to be done differently than other fixed-width encodings</remarks>
|
||||
#if NET20
|
||||
private static List<string> ReadAsciiStrings(this byte[] bytes, int charLimit)
|
||||
#else
|
||||
private static HashSet<string> ReadAsciiStrings(this byte[] bytes, int charLimit)
|
||||
#endif
|
||||
{
|
||||
if (charLimit <= 0 || charLimit > bytes.Length)
|
||||
return [];
|
||||
|
||||
// Create the string set to return
|
||||
#if NET20
|
||||
var strings = new List<string>();
|
||||
#else
|
||||
var strings = new HashSet<string>();
|
||||
#endif
|
||||
|
||||
// Create a string builder for the loop
|
||||
var sb = new StringBuilder();
|
||||
|
||||
// Check for strings
|
||||
int offset = 0;
|
||||
while (offset < bytes.Length)
|
||||
{
|
||||
// Read the next character from the stream
|
||||
char c = bytes.ReadChar(ref offset);
|
||||
|
||||
// If the character is invalid
|
||||
if (char.IsControl(c) || c > 0x7F)
|
||||
{
|
||||
// If there is no cached string
|
||||
if (sb.Length == 0)
|
||||
continue;
|
||||
|
||||
// Add the string if long enough
|
||||
if (sb.Length >= charLimit)
|
||||
strings.Add(sb.ToString());
|
||||
|
||||
// Clear the builder and continue
|
||||
#if NET20 || NET35
|
||||
sb = new();
|
||||
#else
|
||||
sb.Clear();
|
||||
#endif
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, add the character to the builder and continue
|
||||
sb.Append(c);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (sb.Length >= charLimit)
|
||||
strings.Add(sb.ToString());
|
||||
|
||||
return strings;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1065,6 +1065,10 @@ namespace SabreTools.IO.Extensions
|
||||
/// </summary>
|
||||
private static byte[] ReadExactlyToBuffer(byte[] content, ref int offset, int length)
|
||||
{
|
||||
// If we have an invalid offset
|
||||
if (offset < 0 || offset >= content.Length)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(offset)} must be between 0 and {content.Length}, {offset} provided");
|
||||
|
||||
// If we have an invalid length
|
||||
if (length < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(length)} must be 0 or a positive value, {length} requested");
|
||||
|
||||
@@ -5,6 +5,25 @@ namespace SabreTools.IO.Extensions
|
||||
{
|
||||
public static class EnumerableExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Wrap iterating through an enumerable with an action
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// .NET Frameworks 2.0 and 3.5 process in series.
|
||||
/// .NET Frameworks 4.0 onward process in parallel.
|
||||
/// </remarks>
|
||||
public static void IterateWithAction<T>(this IEnumerable<T> source, Action<T> action)
|
||||
{
|
||||
#if NET20 || NET35
|
||||
foreach (var item in source)
|
||||
{
|
||||
action(item);
|
||||
}
|
||||
#else
|
||||
System.Threading.Tasks.Parallel.ForEach(source, action);
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Safely iterate through an enumerable, skipping any errors
|
||||
/// </summary>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
namespace SabreTools.IO.Extensions
|
||||
@@ -10,7 +11,7 @@ namespace SabreTools.IO.Extensions
|
||||
/// <param name="input">Input stream to try aligning</param>
|
||||
/// <param name="alignment">Number of bytes to align on</param>
|
||||
/// <returns>True if the stream could be aligned, false otherwise</returns>
|
||||
public static bool AlignToBoundary(this Stream? input, byte alignment)
|
||||
public static bool AlignToBoundary(this Stream? input, int alignment)
|
||||
{
|
||||
// If the stream is invalid
|
||||
if (input == null || input.Length == 0 || !input.CanRead)
|
||||
@@ -30,6 +31,69 @@ namespace SabreTools.IO.Extensions
|
||||
return input.Position % alignment == 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read a number of bytes from an offset in a stream, if possible
|
||||
/// </summary>
|
||||
/// <param name="input">Input stream to read from</param>
|
||||
/// <param name="offset">Offset within the stream to start reading</param>
|
||||
/// <param name="length">Number of bytes to read from the offset</param>
|
||||
/// <param name="retainPosition">Indicates if the original position of the stream should be retained after reading</param>
|
||||
/// <returns>Filled byte array on success, null on error</returns>
|
||||
/// <remarks>
|
||||
/// This method will return a null array if the length is greater than what is left
|
||||
/// in the stream. This is different behavior than a normal stream read that would
|
||||
/// attempt to read as much as possible, returning the amount of bytes read.
|
||||
/// </remarks>
|
||||
public static byte[]? ReadFrom(this Stream? input, long offset, int length, bool retainPosition)
|
||||
{
|
||||
if (input == null || !input.CanRead || !input.CanSeek)
|
||||
return null;
|
||||
if (offset < 0 || offset >= input.Length)
|
||||
return null;
|
||||
if (length < 0 || offset + length > input.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current location
|
||||
long currentLocation = input.Position;
|
||||
|
||||
// Seek to the requested offset
|
||||
long newPosition = input.SeekIfPossible(offset);
|
||||
if (newPosition != offset)
|
||||
return null;
|
||||
|
||||
// Read from the position
|
||||
byte[] data = input.ReadBytes(length);
|
||||
|
||||
// Seek back if requested
|
||||
if (retainPosition)
|
||||
_ = input.SeekIfPossible(currentLocation);
|
||||
|
||||
// Return the read data
|
||||
return data;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from a Stream
|
||||
/// </summary>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
#if NET5_0_OR_GREATER
|
||||
/// <remarks>This reads both Latin1 and UTF-16 strings from the input data</remarks>
|
||||
#else
|
||||
/// <remarks>This reads both ASCII and UTF-16 strings from the input data</remarks>
|
||||
#endif
|
||||
public static List<string>? ReadStringsFrom(this Stream? input, int position, int length, int charLimit = 5)
|
||||
{
|
||||
// Read the data as a byte array first
|
||||
byte[]? data = input.ReadFrom(position, length, retainPosition: true);
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
return data.ReadStringsFrom(charLimit);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Seek to a specific point in the stream, if possible
|
||||
/// </summary>
|
||||
@@ -62,5 +126,24 @@ namespace SabreTools.IO.Extensions
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a segment is valid in the stream
|
||||
/// </summary>
|
||||
/// <param name="input">Input stream to validate</param>
|
||||
/// <param name="offset">Position in the source</param>
|
||||
/// <param name="count">Length of the data to check</param>
|
||||
/// <returns>True if segment could be read fully, false otherwise</returns>
|
||||
public static bool SegmentValid(this Stream? input, long offset, long count)
|
||||
{
|
||||
if (input == null)
|
||||
return false;
|
||||
if (offset < 0 || offset > input.Length)
|
||||
return false;
|
||||
if (count < 0 || offset + count > input.Length)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
63
SabreTools.IO/Extensions/StringExtensions.cs
Normal file
63
SabreTools.IO/Extensions/StringExtensions.cs
Normal file
@@ -0,0 +1,63 @@
|
||||
using System;
|
||||
|
||||
namespace SabreTools.IO.Extensions
|
||||
{
|
||||
public static class StringExtensions
|
||||
{
|
||||
/// <inheritdoc cref="string.Contains(string)"/>
|
||||
public static bool OptionalContains(this string? self, string value)
|
||||
=> OptionalContains(self, value, StringComparison.Ordinal);
|
||||
|
||||
/// <inheritdoc cref="string.Contains(string, StringComparison)"/>
|
||||
public static bool OptionalContains(this string? self, string value, StringComparison comparisonType)
|
||||
{
|
||||
if (self == null)
|
||||
return false;
|
||||
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
return self.Contains(value);
|
||||
#else
|
||||
return self.Contains(value, comparisonType);
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <inheritdoc cref="string.EndsWith(string)"/>
|
||||
public static bool OptionalEndsWith(this string? self, string value)
|
||||
=> OptionalEndsWith(self, value, StringComparison.Ordinal);
|
||||
|
||||
/// <inheritdoc cref="string.EndsWith(string, StringComparison)"/>
|
||||
public static bool OptionalEndsWith(this string? self, string value, StringComparison comparisonType)
|
||||
{
|
||||
if (self == null)
|
||||
return false;
|
||||
|
||||
return self.EndsWith(value, comparisonType);
|
||||
}
|
||||
|
||||
/// <inheritdoc cref="string.Equals(string)"/>
|
||||
public static bool OptionalEquals(this string? self, string value)
|
||||
=> OptionalEquals(self, value, StringComparison.Ordinal);
|
||||
|
||||
/// <inheritdoc cref="string.Equals(string, StringComparison)"/>
|
||||
public static bool OptionalEquals(this string? self, string value, StringComparison comparisonType)
|
||||
{
|
||||
if (self == null)
|
||||
return false;
|
||||
|
||||
return self.Equals(value, comparisonType);
|
||||
}
|
||||
|
||||
/// <inheritdoc cref="string.StartsWith(string)"/>
|
||||
public static bool OptionalStartsWith(this string? self, string value)
|
||||
=> OptionalStartsWith(self, value, StringComparison.Ordinal);
|
||||
|
||||
/// <inheritdoc cref="string.StartsWith(string, StringComparison)"/>
|
||||
public static bool OptionalStartsWith(this string? self, string value, StringComparison comparisonType)
|
||||
{
|
||||
if (self == null)
|
||||
return false;
|
||||
|
||||
return self.StartsWith(value, comparisonType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,9 +8,10 @@
|
||||
<LangVersion>latest</LangVersion>
|
||||
<NoWarn>CS0618</NoWarn>
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.7.0</Version>
|
||||
<Version>1.7.4</Version>
|
||||
|
||||
<!-- Package Properties -->
|
||||
<Authors>Matt Nadareski</Authors>
|
||||
@@ -30,7 +31,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.Matching" Version="1.6.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.6.0" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
77
SabreTools.IO/Streams/BufferedStream.cs
Normal file
77
SabreTools.IO/Streams/BufferedStream.cs
Normal file
@@ -0,0 +1,77 @@
|
||||
using System.IO;
|
||||
|
||||
namespace SabreTools.IO.Streams
|
||||
{
|
||||
/// <summary>
|
||||
/// Buffered stream that reads in blocks
|
||||
/// </summary>
|
||||
/// <remarks>Not a true <see cref="Stream"/> implementation yet</remarks>
|
||||
public class BufferedStream
|
||||
{
|
||||
/// <summary>
|
||||
/// Source stream for populating the buffer
|
||||
/// </summary>
|
||||
private readonly Stream _source;
|
||||
|
||||
/// <summary>
|
||||
/// Internal buffer to read
|
||||
/// </summary>
|
||||
private readonly byte[] _buffer = new byte[2048];
|
||||
|
||||
/// <summary>
|
||||
/// Current pointer into the buffer
|
||||
/// </summary>
|
||||
private int _bufferPtr = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the number of available bytes
|
||||
/// </summary>
|
||||
private int _available = -1;
|
||||
|
||||
/// <summary>
|
||||
/// Create a new buffered stream
|
||||
/// </summary>
|
||||
public BufferedStream(Stream source)
|
||||
{
|
||||
_source = source;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read the next byte from the buffer, if possible
|
||||
/// </summary>
|
||||
public byte? ReadNextByte()
|
||||
{
|
||||
// Ensure the buffer first
|
||||
if (!EnsureBuffer())
|
||||
return null;
|
||||
|
||||
// Return the next available value
|
||||
return _buffer[_bufferPtr++];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure the buffer has data to read
|
||||
/// </summary>
|
||||
private bool EnsureBuffer()
|
||||
{
|
||||
// Force an update if in the initial state
|
||||
if (_available == -1)
|
||||
{
|
||||
_available = _source.Read(_buffer, 0, _buffer.Length);
|
||||
_bufferPtr = 0;
|
||||
return _available != 0;
|
||||
}
|
||||
|
||||
// If the pointer is out of range
|
||||
if (_bufferPtr >= _available)
|
||||
{
|
||||
_available = _source.Read(_buffer, 0, _buffer.Length);
|
||||
_bufferPtr = 0;
|
||||
return _available != 0;
|
||||
}
|
||||
|
||||
// Otherwise, assume data is available
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -39,7 +39,7 @@ namespace SabreTools.IO.Streams
|
||||
|
||||
#endregion
|
||||
|
||||
#region Internal State
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Internal collection of streams to read from
|
||||
@@ -58,6 +58,8 @@ namespace SabreTools.IO.Streams
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a new, empty ReadOnlyCompositeStream
|
||||
/// </summary>
|
||||
@@ -109,7 +111,7 @@ namespace SabreTools.IO.Streams
|
||||
/// </summary>
|
||||
public ReadOnlyCompositeStream(IEnumerable<Stream> streams)
|
||||
{
|
||||
_streams = new List<Stream>(streams);
|
||||
_streams = [.. streams];
|
||||
_length = 0;
|
||||
_position = 0;
|
||||
|
||||
@@ -123,6 +125,10 @@ namespace SabreTools.IO.Streams
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Add a new stream to the collection
|
||||
/// </summary>
|
||||
@@ -138,10 +144,13 @@ namespace SabreTools.IO.Streams
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Implementations
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Flush() => throw new NotImplementedException();
|
||||
public override void Flush()
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
@@ -198,27 +207,22 @@ namespace SabreTools.IO.Streams
|
||||
// Handle the "seek"
|
||||
switch (origin)
|
||||
{
|
||||
case SeekOrigin.Begin: _position = offset; break;
|
||||
case SeekOrigin.Current: _position += offset; break;
|
||||
case SeekOrigin.End: _position = _length - offset - 1; break;
|
||||
case SeekOrigin.Begin: Position = offset; break;
|
||||
case SeekOrigin.Current: Position += offset; break;
|
||||
case SeekOrigin.End: Position = _length + offset - 1; break;
|
||||
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
|
||||
}
|
||||
;
|
||||
|
||||
// Handle out-of-bounds seeks
|
||||
if (_position < 0)
|
||||
_position = 0;
|
||||
else if (_position >= _length)
|
||||
_position = _length - 1;
|
||||
|
||||
return _position;
|
||||
return Position;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetLength(long value) => throw new NotImplementedException();
|
||||
public override void SetLength(long value)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
254
SabreTools.IO/Streams/ViewStream.cs
Normal file
254
SabreTools.IO/Streams/ViewStream.cs
Normal file
@@ -0,0 +1,254 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace SabreTools.IO.Streams
|
||||
{
|
||||
/// <summary>
|
||||
/// Stream representing a view into a source
|
||||
/// </summary>
|
||||
public class ViewStream : Stream
|
||||
{
|
||||
#region Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanRead => true;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanSeek => _source.CanSeek;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanWrite => false;
|
||||
|
||||
/// <summary>
|
||||
/// Filename from the source, if possible
|
||||
/// </summary>
|
||||
public string? Filename
|
||||
{
|
||||
get
|
||||
{
|
||||
// A subset of streams have a filename
|
||||
if (_source is FileStream fs)
|
||||
return fs.Name;
|
||||
else if (_source is ViewStream vs)
|
||||
return vs.Filename;
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Length => _length;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Position
|
||||
{
|
||||
get
|
||||
{
|
||||
// Handle 0-length sources
|
||||
if (_length <= 0)
|
||||
return 0;
|
||||
|
||||
return _source.Position - _initialPosition;
|
||||
}
|
||||
set
|
||||
{
|
||||
// Handle 0-length sources
|
||||
if (_length <= 0)
|
||||
{
|
||||
_source.Position = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
long position = value;
|
||||
|
||||
// Handle out-of-bounds seeks
|
||||
if (position < 0)
|
||||
position = 0;
|
||||
else if (position >= _length)
|
||||
position = _length - 1;
|
||||
|
||||
_source.Position = _initialPosition + position;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Initial position within the underlying data
|
||||
/// </summary>
|
||||
protected long _initialPosition;
|
||||
|
||||
/// <summary>
|
||||
/// Usable length in the underlying data
|
||||
/// </summary>
|
||||
protected long _length;
|
||||
|
||||
/// <summary>
|
||||
/// Source data
|
||||
/// </summary>
|
||||
protected Stream _source;
|
||||
|
||||
/// <summary>
|
||||
/// Lock object for reading from the source
|
||||
/// </summary>
|
||||
private readonly object _sourceLock = new();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a Stream
|
||||
/// </summary>
|
||||
public ViewStream(Stream data, long offset)
|
||||
{
|
||||
if (!data.CanRead)
|
||||
throw new ArgumentException(nameof(data));
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
_source = data;
|
||||
_initialPosition = offset;
|
||||
_length = data.Length - offset;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a Stream
|
||||
/// </summary>
|
||||
public ViewStream(Stream data, long offset, long length)
|
||||
{
|
||||
if (!data.CanRead)
|
||||
throw new ArgumentException(nameof(data));
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
if (length < 0 || offset + length > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
|
||||
_source = data;
|
||||
_initialPosition = offset;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a byte array
|
||||
/// </summary>
|
||||
public ViewStream(byte[] data, long offset)
|
||||
{
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
long length = data.Length - offset;
|
||||
_source = new MemoryStream(data, (int)offset, (int)length);
|
||||
_initialPosition = 0;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a byte array
|
||||
/// </summary>
|
||||
public ViewStream(byte[] data, long offset, long length)
|
||||
{
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
if (length < 0 || offset + length > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
|
||||
_source = new MemoryStream(data, (int)offset, (int)length);
|
||||
_initialPosition = 0;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Check if a data segment is valid in the data source
|
||||
/// </summary>
|
||||
/// <param name="offset">Position in the source</param>
|
||||
/// <param name="count">Length of the data to check</param>
|
||||
/// <returns>True if the positional data is valid, false otherwise</returns>
|
||||
public bool SegmentValid(long offset, long count)
|
||||
{
|
||||
if (offset < 0 || offset > Length)
|
||||
return false;
|
||||
if (count < 0 || offset + count > Length)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Implementations
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Flush()
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Invalid cases always return 0
|
||||
if (buffer.Length == 0)
|
||||
return 0;
|
||||
if (offset < 0 || offset >= buffer.Length)
|
||||
return 0;
|
||||
if (count < 0 || offset + count > buffer.Length)
|
||||
return 0;
|
||||
|
||||
// Short-circuit 0-byte reads
|
||||
if (count == 0)
|
||||
return 0;
|
||||
|
||||
try
|
||||
{
|
||||
lock (_sourceLock)
|
||||
{
|
||||
return _source.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Absorb the error
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
// Handle the "seek"
|
||||
switch (origin)
|
||||
{
|
||||
case SeekOrigin.Begin: Position = offset; break;
|
||||
case SeekOrigin.Current: Position += offset; break;
|
||||
case SeekOrigin.End: Position = _length + offset - 1; break;
|
||||
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
|
||||
}
|
||||
|
||||
return Position;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetLength(long value)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user