mirror of
https://github.com/SabreTools/SabreTools.IO.git
synced 2026-02-08 13:49:55 +00:00
Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b81d3314ea | ||
|
|
4a3ffa5f90 | ||
|
|
a20c7529d6 | ||
|
|
baea5cb0d7 | ||
|
|
659674dd4a | ||
|
|
5c199a143b | ||
|
|
99ec814808 | ||
|
|
ea1f02798c | ||
|
|
e3d4cc5e45 | ||
|
|
c98eb5c42a | ||
|
|
d0392be2d8 | ||
|
|
8761629828 | ||
|
|
a3b258dfeb | ||
|
|
f7505effa1 |
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class ByteArrayExtensionsTests
|
||||
{
|
||||
#region Is Null or Empty
|
||||
#region IsNullOrEmpty
|
||||
|
||||
[Fact]
|
||||
public void IsNullOrEmpty_Null_True()
|
||||
@@ -35,7 +36,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region To Hex String
|
||||
#region ToHexString
|
||||
|
||||
[Fact]
|
||||
public void ToHexString_Null()
|
||||
@@ -58,7 +59,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region From Hex String
|
||||
#region FromHexString
|
||||
|
||||
[Fact]
|
||||
public void FromHexString_Null()
|
||||
@@ -88,5 +89,124 @@ namespace SabreTools.IO.Test.Extensions
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReadStringsWithEncoding
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Null_Empty()
|
||||
{
|
||||
byte[]? bytes = null;
|
||||
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Empty_Empty()
|
||||
{
|
||||
byte[]? bytes = [];
|
||||
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(0)]
|
||||
[InlineData(2048)]
|
||||
public void ReadStringsWithEncoding_InvalidLimit_Empty(int charLimit)
|
||||
{
|
||||
byte[]? bytes = new byte[1024];
|
||||
var actual = bytes.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_NoValidStrings_Empty()
|
||||
{
|
||||
byte[]? bytes = new byte[1024];
|
||||
var actual = bytes.ReadStringsWithEncoding(5, Encoding.ASCII);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_AsciiStrings_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.ASCII);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_Latin1_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Latin1.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Latin1.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Latin1);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF8_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.UTF8.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF8);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF16_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Unicode.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Unicode);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsWithEncoding_UTF32_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.UTF32.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF32.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF32.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF32);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using Xunit;
|
||||
|
||||
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
|
||||
{
|
||||
public class StreamExtensionsTests
|
||||
{
|
||||
#region Align to Boundary
|
||||
#region AlignToBoundary
|
||||
|
||||
[Fact]
|
||||
public void AlignToBoundary_Null_False()
|
||||
@@ -62,7 +63,221 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region Seek If Possible
|
||||
#region ReadFrom
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Null_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = null;
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_NonSeekable_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new NonSeekableStream();
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Empty_Null(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream([]);
|
||||
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1, true)]
|
||||
[InlineData(2048, true)]
|
||||
[InlineData(-1, false)]
|
||||
[InlineData(2048, false)]
|
||||
public void ReadFrom_InvalidOffset_Null(long offset, bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(offset, 1, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1, true)]
|
||||
[InlineData(2048, true)]
|
||||
[InlineData(-1, false)]
|
||||
[InlineData(2048, false)]
|
||||
public void ReadFrom_InvalidLength_Null(int length, bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(0, length, retainPosition);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
public void ReadFrom_Valid_Filled(bool retainPosition)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
byte[]? actual = stream.ReadFrom(0, 512, retainPosition);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(512, actual.Length);
|
||||
|
||||
if (retainPosition)
|
||||
Assert.Equal(0, stream.Position);
|
||||
else
|
||||
Assert.Equal(512, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReadStringsFrom
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Null_Null()
|
||||
{
|
||||
Stream? stream = null;
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_NonSeekable_Null()
|
||||
{
|
||||
Stream? stream = new NonSeekableStream();
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Empty_Null()
|
||||
{
|
||||
Stream? stream = new MemoryStream([]);
|
||||
var actual = stream.ReadStringsFrom(0, 1, 3);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(0)]
|
||||
[InlineData(2048)]
|
||||
public void ReadStringsFrom_InvalidLimit_Empty(int charLimit)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
var actual = stream.ReadStringsFrom(0, 1024, charLimit);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_NoValidStrings_Empty()
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
var actual = stream.ReadStringsFrom(0, 1024, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Empty(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_AsciiStrings_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
// ASCII and UTF-8 are identical for the character range
|
||||
Assert.Equal(4, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_UTF8_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.UTF8.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
// ASCII and UTF-8 are identical for the character range
|
||||
Assert.Equal(4, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_UTF16_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.Unicode.GetBytes("TEST"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal(2, actual.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReadStringsFrom_Mixed_Filled()
|
||||
{
|
||||
byte[]? bytes =
|
||||
[
|
||||
.. Encoding.ASCII.GetBytes("TEST1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("TWO1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.ASCII.GetBytes("DATA1"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("TEST2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("TWO2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.UTF8.GetBytes("DATA2"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TEST3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("TWO3"),
|
||||
.. new byte[] { 0x00 },
|
||||
.. Encoding.Unicode.GetBytes("DATA3"),
|
||||
.. new byte[] { 0x00 },
|
||||
];
|
||||
Stream? stream = new MemoryStream(bytes);
|
||||
var actual = stream.ReadStringsFrom(0, bytes.Length, 5);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
// ASCII and UTF-8 are identical for the character range
|
||||
Assert.Equal(10, actual.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SeekIfPossible
|
||||
|
||||
[Fact]
|
||||
public void SeekIfPossible_NonSeekable_CurrentPosition()
|
||||
@@ -106,6 +321,46 @@ namespace SabreTools.IO.Test.Extensions
|
||||
|
||||
#endregion
|
||||
|
||||
#region SegmentValid
|
||||
|
||||
[Fact]
|
||||
public void SegmentValid_Null_False()
|
||||
{
|
||||
Stream? stream = null;
|
||||
bool actual = stream.SegmentValid(0, 1);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(2048)]
|
||||
public void SegmentValid_InvalidOffset_False(long offset)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(offset, 1);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(-1)]
|
||||
[InlineData(2048)]
|
||||
public void SegmentValid_InvalidLength_False(int length)
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(0, length);
|
||||
Assert.False(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SegmentValid_ValidSegment_True()
|
||||
{
|
||||
Stream? stream = new MemoryStream(new byte[1024]);
|
||||
bool actual = stream.SegmentValid(0, 1024);
|
||||
Assert.True(actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Represents a hidden non-seekable stream
|
||||
/// </summary>
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3">
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Streams;
|
||||
@@ -7,8 +8,10 @@ namespace SabreTools.IO.Test.Streams
|
||||
{
|
||||
public class ReadOnlyCompositeStreamTests
|
||||
{
|
||||
#region Constructor
|
||||
|
||||
[Fact]
|
||||
public void DefaultConstructorTest()
|
||||
public void Constructor_Default()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Equal(0, stream.Length);
|
||||
@@ -16,7 +19,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyArrayConstructorTest()
|
||||
public void Constructor_EmptyArray()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream()];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -25,9 +28,8 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmptyEnumerableConstructorTest()
|
||||
public void Constructor_EmptyEnumerable()
|
||||
{
|
||||
// Empty enumerable constructor
|
||||
List<Stream> list = [new MemoryStream()];
|
||||
var stream = new ReadOnlyCompositeStream(list);
|
||||
Assert.Equal(0, stream.Length);
|
||||
@@ -35,7 +37,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamConstructorTest()
|
||||
public void Constructor_SingleStream()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream(new MemoryStream(new byte[1024]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
@@ -43,7 +45,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FilledArrayConstructorTest()
|
||||
public void Constructor_FilledArray()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -52,7 +54,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FilledEnumerableConstructorTest()
|
||||
public void Constructor_FilledEnumerable()
|
||||
{
|
||||
List<Stream> list = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(list);
|
||||
@@ -60,6 +62,10 @@ namespace SabreTools.IO.Test.Streams
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region AddStream
|
||||
|
||||
[Fact]
|
||||
public void AddStreamTest()
|
||||
{
|
||||
@@ -70,10 +76,18 @@ namespace SabreTools.IO.Test.Streams
|
||||
stream.AddStream(new MemoryStream(new byte[1024]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
|
||||
stream.AddStream(new MemoryStream([]));
|
||||
Assert.Equal(1024, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Read
|
||||
|
||||
[Fact]
|
||||
public void EmptyStreamReadTest()
|
||||
public void Read_EmptyStream()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
|
||||
@@ -84,7 +98,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamReadTest()
|
||||
public void Read_SingleStream()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -96,7 +110,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamSingleContainedReadTest()
|
||||
public void Read_MultipleStream_SingleContained()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -108,7 +122,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamMultipleContainedReadTest()
|
||||
public void Read_MultipleStream_MultipleContained()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[256]), new MemoryStream(new byte[256])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -120,7 +134,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SingleStreamExtraReadTest()
|
||||
public void Read_SingleStream_Extra()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[256])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -132,7 +146,7 @@ namespace SabreTools.IO.Test.Streams
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleStreamExtraReadTest()
|
||||
public void Read_MultipleStream_Extra()
|
||||
{
|
||||
Stream[] arr = [new MemoryStream(new byte[128]), new MemoryStream(new byte[128])];
|
||||
var stream = new ReadOnlyCompositeStream(arr);
|
||||
@@ -142,5 +156,32 @@ namespace SabreTools.IO.Test.Streams
|
||||
|
||||
Assert.Equal(256, read);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unimplemented
|
||||
|
||||
[Fact]
|
||||
public void Flush_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Throws()
|
||||
{
|
||||
var stream = new ReadOnlyCompositeStream();
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
419
SabreTools.IO.Test/Streams/ViewStreamTests.cs
Normal file
419
SabreTools.IO.Test/Streams/ViewStreamTests.cs
Normal file
@@ -0,0 +1,419 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.IO.Streams;
|
||||
using Xunit;
|
||||
|
||||
namespace SabreTools.IO.Test.Streams
|
||||
{
|
||||
public class ViewStreamTests
|
||||
{
|
||||
#region Constructor
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024)]
|
||||
[InlineData(1024, 256, 768)]
|
||||
public void Constructor_Array(int size, long offset, long expectedLength)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 1024)]
|
||||
[InlineData(1024, 256, 512, 512)]
|
||||
public void Constructor_Array_Length(int size, long offset, long length, long expectedLength)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, -1, 0)]
|
||||
[InlineData(0, 2048, 0)]
|
||||
[InlineData(1024, -1, 1024)]
|
||||
[InlineData(1024, 2048, 1024)]
|
||||
[InlineData(1024, -1, 512)]
|
||||
[InlineData(1024, 2048, 512)]
|
||||
public void Constructor_Array_InvalidOffset(int size, long offset, long length)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, -1)]
|
||||
[InlineData(0, 0, 2048)]
|
||||
[InlineData(1024, 0, -1)]
|
||||
[InlineData(1024, 0, 2048)]
|
||||
[InlineData(1024, 256, -1)]
|
||||
[InlineData(1024, 256, 2048)]
|
||||
public void Constructor_Array_InvalidLength(int size, long offset, long length)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024)]
|
||||
[InlineData(1024, 256, 768)]
|
||||
public void Constructor_Stream(int size, long offset, long expectedLength)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 1024)]
|
||||
[InlineData(1024, 256, 512, 512)]
|
||||
public void Constructor_Stream_Length(int size, long offset, long length, long expectedLength)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
Assert.Equal(expectedLength, stream.Length);
|
||||
Assert.Equal(0, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, -1, 0)]
|
||||
[InlineData(0, 2048, 0)]
|
||||
[InlineData(1024, -1, 1024)]
|
||||
[InlineData(1024, 2048, 1024)]
|
||||
[InlineData(1024, -1, 512)]
|
||||
[InlineData(1024, 2048, 512)]
|
||||
public void Constructor_Stream_InvalidOffset(int size, long offset, long length)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, -1)]
|
||||
[InlineData(0, 0, 2048)]
|
||||
[InlineData(1024, 0, -1)]
|
||||
[InlineData(1024, 0, 2048)]
|
||||
[InlineData(1024, 256, -1)]
|
||||
[InlineData(1024, 256, 2048)]
|
||||
public void Constructor_Stream_InvalidLength(int size, long offset, long length)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Position
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 256, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, 511)]
|
||||
public void Position_Array(int size, long offset, long length, long position, long expectedPosition)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Position = position;
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 256, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, 511)]
|
||||
public void Position_Stream(int size, long offset, long length, long position, long expectedPosition)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Position = position;
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SegmentValid
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0, false)]
|
||||
[InlineData(0, 0, 0, 2048, 0, false)]
|
||||
[InlineData(0, 0, 0, 0, 0, true)]
|
||||
[InlineData(0, 0, 0, 0, -1, false)]
|
||||
[InlineData(0, 0, 0, 0, 2048, false)]
|
||||
[InlineData(1024, 0, 1024, -1, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 256, true)]
|
||||
[InlineData(1024, 0, 1024, 0, -1, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 2048, false)]
|
||||
[InlineData(1024, 256, 512, -1, 0, false)]
|
||||
[InlineData(1024, 256, 512, 2048, 0, false)]
|
||||
[InlineData(1024, 256, 512, 0, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 256, true)]
|
||||
[InlineData(1024, 256, 512, 0, -1, false)]
|
||||
[InlineData(1024, 256, 512, 0, 2048, false)]
|
||||
public void SegmentValid_Array(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
bool actual = stream.SegmentValid(segmentStart, segmentLength);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0, false)]
|
||||
[InlineData(0, 0, 0, 2048, 0, false)]
|
||||
[InlineData(0, 0, 0, 0, 0, true)]
|
||||
[InlineData(0, 0, 0, 0, -1, false)]
|
||||
[InlineData(0, 0, 0, 0, 2048, false)]
|
||||
[InlineData(1024, 0, 1024, -1, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 0, true)]
|
||||
[InlineData(1024, 0, 1024, 256, 256, true)]
|
||||
[InlineData(1024, 0, 1024, 0, -1, false)]
|
||||
[InlineData(1024, 0, 1024, 0, 2048, false)]
|
||||
[InlineData(1024, 256, 512, -1, 0, false)]
|
||||
[InlineData(1024, 256, 512, 2048, 0, false)]
|
||||
[InlineData(1024, 256, 512, 0, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 0, true)]
|
||||
[InlineData(1024, 256, 512, 256, 256, true)]
|
||||
[InlineData(1024, 256, 512, 0, -1, false)]
|
||||
[InlineData(1024, 256, 512, 0, 2048, false)]
|
||||
public void SegmentValid_Stream(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
bool actual = stream.SegmentValid(segmentStart, segmentLength);
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Read
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 1024, 1024)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 512, 512)]
|
||||
[InlineData(1024, 256, 512, 2048, 0)]
|
||||
public void Read_Array(int size, long offset, long length, int count, int expectedRead)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int actual = stream.Read(buffer, 0, count);
|
||||
Assert.Equal(expectedRead, actual);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, 0)]
|
||||
[InlineData(0, 0, 0, 0, 0)]
|
||||
[InlineData(0, 0, 0, 2048, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, 256)]
|
||||
[InlineData(1024, 0, 1024, 1024, 1024)]
|
||||
[InlineData(1024, 0, 1024, 2048, 0)]
|
||||
[InlineData(1024, 256, 512, -1, 0)]
|
||||
[InlineData(1024, 256, 512, 0, 0)]
|
||||
[InlineData(1024, 256, 512, 256, 256)]
|
||||
[InlineData(1024, 256, 512, 512, 512)]
|
||||
[InlineData(1024, 256, 512, 2048, 0)]
|
||||
public void Read_Stream(int size, long offset, long length, int count, int expectedRead)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int actual = stream.Read(buffer, 0, count);
|
||||
Assert.Equal(expectedRead, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Seek
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
|
||||
public void Seek_Array(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
|
||||
{
|
||||
byte[] data = new byte[size];
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Seek(position, seekOrigin);
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
|
||||
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
|
||||
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
|
||||
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
|
||||
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
|
||||
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
|
||||
public void Seek_Stream(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[size]);
|
||||
var stream = new ViewStream(data, offset, length);
|
||||
stream.Seek(position, seekOrigin);
|
||||
Assert.Equal(expectedPosition, stream.Position);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unimplemented
|
||||
|
||||
[Fact]
|
||||
public void Flush_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Flush_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Flush());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetLength_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Array_Throws()
|
||||
{
|
||||
byte[] data = new byte[1024];
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Write_Stream_Throws()
|
||||
{
|
||||
Stream data = new MemoryStream(new byte[1024]);
|
||||
var stream = new ViewStream(data, 0, 1024);
|
||||
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
26
SabreTools.IO/Compression/Deflate/DeflateInfo.cs
Normal file
26
SabreTools.IO/Compression/Deflate/DeflateInfo.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents information about a DEFLATE stream
|
||||
/// </summary>
|
||||
public class DeflateInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Size of the deflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value less than 0 to ignore</remarks>
|
||||
public long InputSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the inflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value less than 0 to ignore</remarks>
|
||||
public long OutputSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// CRC-32 of the inflated data
|
||||
/// </summary>
|
||||
/// <remarks>Set to a value of 0 to ignore</remarks>
|
||||
public uint Crc32 { get; set; }
|
||||
}
|
||||
}
|
||||
34
SabreTools.IO/Compression/Deflate/Enums.cs
Normal file
34
SabreTools.IO/Compression/Deflate/Enums.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents the status returned from extracting
|
||||
/// </summary>
|
||||
public enum ExtractionStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Extraction wasn't performed because the inputs were invalid
|
||||
/// </summary>
|
||||
INVALID,
|
||||
|
||||
/// <summary>
|
||||
/// No issues with the extraction
|
||||
/// </summary>
|
||||
GOOD,
|
||||
|
||||
/// <summary>
|
||||
/// File extracted but was the wrong size
|
||||
/// </summary>
|
||||
/// <remarks>Rewinds the stream and deletes the bad file</remarks>
|
||||
WRONG_SIZE,
|
||||
|
||||
/// <summary>
|
||||
/// File extracted but had the wrong CRC-32 value
|
||||
/// </summary>
|
||||
BAD_CRC,
|
||||
|
||||
/// <summary>
|
||||
/// Extraction failed entirely
|
||||
/// </summary>
|
||||
FAIL,
|
||||
}
|
||||
}
|
||||
439
SabreTools.IO/Compression/Deflate/InflateWrapper.cs
Normal file
439
SabreTools.IO/Compression/Deflate/InflateWrapper.cs
Normal file
@@ -0,0 +1,439 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PKZIP;
|
||||
using static SabreTools.Models.PKZIP.Constants;
|
||||
|
||||
namespace SabreTools.IO.Compression.Deflate
|
||||
{
|
||||
/// <summary>
|
||||
/// Wrapper to handle DEFLATE decompression with data verification
|
||||
/// </summary>
|
||||
public class InflateWrapper
|
||||
{
|
||||
#region Constants
|
||||
|
||||
/// <summary>
|
||||
/// Buffer size for decompression
|
||||
/// </summary>
|
||||
private const int BufferSize = 1024 * 1024;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extraction
|
||||
|
||||
/// <summary>
|
||||
/// Attempt to extract a file defined by a filename
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="filename">Output filename, null to auto-generate</param>
|
||||
/// <param name="outputDirectory">Output directory to write to</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
|
||||
public static ExtractionStatus ExtractFile(Stream source,
|
||||
string? filename,
|
||||
string outputDirectory,
|
||||
DeflateInfo expected,
|
||||
bool pkzip,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Attempting to extract {filename}");
|
||||
|
||||
// Extract the file
|
||||
var destination = new MemoryStream();
|
||||
ExtractionStatus status = ExtractStream(source,
|
||||
destination,
|
||||
expected,
|
||||
pkzip,
|
||||
includeDebug,
|
||||
out var foundFilename);
|
||||
|
||||
// If the extracted data is invalid
|
||||
if (status != ExtractionStatus.GOOD || destination == null)
|
||||
return status;
|
||||
|
||||
// Ensure directory separators are consistent
|
||||
filename ??= foundFilename ?? $"FILE_[{expected.InputSize}, {expected.OutputSize}, {expected.Crc32}]";
|
||||
if (Path.DirectorySeparatorChar == '\\')
|
||||
filename = filename.Replace('/', '\\');
|
||||
else if (Path.DirectorySeparatorChar == '/')
|
||||
filename = filename.Replace('\\', '/');
|
||||
|
||||
// Ensure the full output directory exists
|
||||
filename = Path.Combine(outputDirectory, filename);
|
||||
var directoryName = Path.GetDirectoryName(filename);
|
||||
if (directoryName != null && !Directory.Exists(directoryName))
|
||||
Directory.CreateDirectory(directoryName);
|
||||
|
||||
// Write the output file
|
||||
File.WriteAllBytes(filename, destination.ToArray());
|
||||
return status;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attempt to extract a file to a stream
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <param name="filename">Output filename if extracted from the data, null otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
|
||||
public static ExtractionStatus ExtractStream(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool pkzip,
|
||||
bool includeDebug,
|
||||
out string? filename)
|
||||
{
|
||||
// If PKZIP containers are used
|
||||
if (pkzip)
|
||||
return ExtractStreamWithContainer(source, destination, expected, includeDebug, out filename);
|
||||
|
||||
// If post-data checksums are used
|
||||
filename = null;
|
||||
return ExtractStreamWithChecksum(source, destination, expected, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract source data in a PKZIP container
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <param name="filename">Filename from the PKZIP header, if it exists</param>
|
||||
/// <returns></returns>
|
||||
public static ExtractionStatus ExtractStreamWithContainer(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool includeDebug,
|
||||
out string? filename)
|
||||
{
|
||||
// Set default values
|
||||
filename = null;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
|
||||
|
||||
// Check the validity of the inputs
|
||||
if (expected.InputSize == 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
else if (expected.InputSize > (source.Length - source.Position))
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
|
||||
// Cache the current offset
|
||||
long current = source.Position;
|
||||
|
||||
// Parse the PKZIP header, if it exists
|
||||
LocalFileHeader? zipHeader = ParseLocalFileHeader(source);
|
||||
long zipHeaderBytes = source.Position - current;
|
||||
|
||||
// Always trust the PKZIP CRC-32 value over what is supplied
|
||||
if (zipHeader != null)
|
||||
expected.Crc32 = zipHeader.CRC32;
|
||||
|
||||
// If the filename is [NULL], replace with the zip filename
|
||||
if (zipHeader?.FileName != null)
|
||||
{
|
||||
filename = zipHeader.FileName;
|
||||
if (includeDebug) Console.WriteLine($"Filename from PKZIP header: {filename}");
|
||||
}
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"PKZIP Filename: {zipHeader?.FileName}, PKZIP Expected Read: {zipHeader?.CompressedSize}, PKZIP Expected Write: {zipHeader?.UncompressedSize}, PKZIP Expected CRC-32: {zipHeader?.CRC32:X4}");
|
||||
|
||||
// Extract the file
|
||||
var actual = Inflate(source, destination);
|
||||
if (actual == null)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Could not extract {filename}");
|
||||
return ExtractionStatus.FAIL;
|
||||
}
|
||||
|
||||
// Account for the header bytes read
|
||||
actual.InputSize += zipHeaderBytes;
|
||||
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
|
||||
|
||||
// Verify the extracted data
|
||||
return VerifyExtractedData(source, current, expected, actual, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract source data with a trailing CRC-32 checksum
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <param name="expected">Expected DEFLATE stream information</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns></returns>
|
||||
public static ExtractionStatus ExtractStreamWithChecksum(Stream source,
|
||||
Stream destination,
|
||||
DeflateInfo expected,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
|
||||
|
||||
// Check the validity of the inputs
|
||||
if (expected.InputSize == 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
else if (expected.InputSize > (source.Length - source.Position))
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
|
||||
return ExtractionStatus.INVALID;
|
||||
}
|
||||
|
||||
// Cache the current offset
|
||||
long current = source.Position;
|
||||
|
||||
// Extract the file
|
||||
var actual = Inflate(source, destination);
|
||||
if (actual == null)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Could not extract");
|
||||
return ExtractionStatus.FAIL;
|
||||
}
|
||||
|
||||
// Seek to the true end of the data
|
||||
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
|
||||
|
||||
// If the read value is off-by-one after checksum
|
||||
if (actual.InputSize == expected.InputSize - 5)
|
||||
{
|
||||
// If not at the end of the file, get the corrected offset
|
||||
if (source.Position + 5 < source.Length)
|
||||
{
|
||||
// TODO: What does this byte represent?
|
||||
byte padding = source.ReadByteValue();
|
||||
actual.InputSize += 1;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Off-by-one padding byte detected: 0x{padding:X2}");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Not enough data to adjust offset");
|
||||
}
|
||||
}
|
||||
|
||||
// If there is enough data to read the full CRC
|
||||
uint deflateCrc;
|
||||
if (source.Position + 4 < source.Length)
|
||||
{
|
||||
deflateCrc = source.ReadUInt32LittleEndian();
|
||||
actual.InputSize += 4;
|
||||
}
|
||||
// Otherwise, read what is possible and pad with 0x00
|
||||
else
|
||||
{
|
||||
byte[] deflateCrcBytes = new byte[4];
|
||||
int realCrcLength = source.Read(deflateCrcBytes, 0, (int)(source.Length - source.Position));
|
||||
|
||||
// Parse as a little-endian 32-bit value
|
||||
deflateCrc = (uint)(deflateCrcBytes[0]
|
||||
| (deflateCrcBytes[1] << 8)
|
||||
| (deflateCrcBytes[2] << 16)
|
||||
| (deflateCrcBytes[3] << 24));
|
||||
|
||||
actual.InputSize += realCrcLength;
|
||||
}
|
||||
|
||||
// If the CRC to check isn't set
|
||||
if (expected.Crc32 == 0)
|
||||
expected.Crc32 = deflateCrc;
|
||||
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"DeflateStream CRC-32: {deflateCrc:X8}");
|
||||
|
||||
// Verify the extracted data
|
||||
return VerifyExtractedData(source, current, expected, actual, includeDebug);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a local file header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled local file header on success, null on error</returns>
|
||||
/// <remarks>Mirror of method in Serialization</remarks>
|
||||
private static LocalFileHeader? ParseLocalFileHeader(Stream data)
|
||||
{
|
||||
var header = new LocalFileHeader();
|
||||
|
||||
header.Signature = data.ReadUInt32LittleEndian();
|
||||
if (header.Signature != LocalFileHeaderSignature)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt16LittleEndian();
|
||||
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16LittleEndian();
|
||||
header.CompressionMethod = (CompressionMethod)data.ReadUInt16LittleEndian();
|
||||
header.LastModifedFileTime = data.ReadUInt16LittleEndian();
|
||||
header.LastModifiedFileDate = data.ReadUInt16LittleEndian();
|
||||
header.CRC32 = data.ReadUInt32LittleEndian();
|
||||
header.CompressedSize = data.ReadUInt32LittleEndian();
|
||||
header.UncompressedSize = data.ReadUInt32LittleEndian();
|
||||
header.FileNameLength = data.ReadUInt16LittleEndian();
|
||||
header.ExtraFieldLength = data.ReadUInt16LittleEndian();
|
||||
|
||||
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
|
||||
{
|
||||
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
|
||||
if (filenameBytes.Length != header.FileNameLength)
|
||||
return null;
|
||||
|
||||
header.FileName = Encoding.ASCII.GetString(filenameBytes);
|
||||
}
|
||||
|
||||
// Parsing extras is skipped here, unlike in Serialization
|
||||
if (header.ExtraFieldLength > 0 && data.Position + header.ExtraFieldLength <= data.Length)
|
||||
{
|
||||
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
|
||||
if (extraBytes.Length != header.ExtraFieldLength)
|
||||
return null;
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify the extracted stream data, seeking to the original location on failure
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="start">Position representing the start of the deflated data</param>
|
||||
/// <param name="expected">Expected deflation info</param>
|
||||
/// <param name="actual">Actual deflation info</param>
|
||||
/// <param name="includeDebug">True to include debug data, false otherwise</param>
|
||||
/// <returns>Extraction status representing the final state</returns>
|
||||
private static ExtractionStatus VerifyExtractedData(Stream source,
|
||||
long start,
|
||||
DeflateInfo expected,
|
||||
DeflateInfo actual,
|
||||
bool includeDebug)
|
||||
{
|
||||
// Debug output
|
||||
if (includeDebug) Console.WriteLine($"Actual Read: {actual.InputSize}, Actual Write: {actual.OutputSize}, Actual CRC-32: {actual.Crc32:X8}");
|
||||
|
||||
// If there's a mismatch during both reading and writing
|
||||
if (expected.InputSize >= 0 && expected.InputSize != actual.InputSize)
|
||||
{
|
||||
// This in/out check helps catch false positives, such as
|
||||
// files that have an off-by-one mismatch for read values
|
||||
// but properly match the output written values.
|
||||
|
||||
// If the written bytes not correct as well
|
||||
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
|
||||
// If the written bytes are not being verified
|
||||
else if (expected.OutputSize < 0)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
// If there's just a mismatch during only writing
|
||||
if (expected.InputSize >= 0 && expected.InputSize == actual.InputSize)
|
||||
{
|
||||
// We want to log this but ignore the error
|
||||
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.WriteLine($"Ignoring mismatched write values because read values match!");
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, the write size should be checked normally
|
||||
else if (expected.InputSize == 0 && expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched write values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.WRONG_SIZE;
|
||||
}
|
||||
|
||||
// If there's a mismatch with the CRC-32
|
||||
if (expected.Crc32 != 0 && expected.Crc32 != actual.Crc32)
|
||||
{
|
||||
if (includeDebug) Console.Error.WriteLine($"Mismatched CRC-32 values!");
|
||||
source.Seek(start, SeekOrigin.Begin);
|
||||
return ExtractionStatus.BAD_CRC;
|
||||
}
|
||||
|
||||
return ExtractionStatus.GOOD;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Inflation
|
||||
|
||||
/// <summary>
|
||||
/// Inflate an input stream to an output stream
|
||||
/// </summary>
|
||||
/// <param name="source">Stream representing the deflated data</param>
|
||||
/// <param name="destination">Stream where the inflated data will be written</param>
|
||||
/// <returns>Deflate information representing the processed data on success, null on error</returns>
|
||||
public static DeflateInfo? Inflate(Stream source, Stream destination)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Setup the hasher for CRC-32 calculation
|
||||
using var hasher = new HashWrapper(HashType.CRC32);
|
||||
|
||||
// Create a DeflateStream from the input
|
||||
using var ds = new DeflateStream(source, CompressionMode.Decompress, leaveOpen: true);
|
||||
|
||||
// Decompress in blocks
|
||||
while (true)
|
||||
{
|
||||
byte[] buf = new byte[BufferSize];
|
||||
int read = ds.Read(buf, 0, buf.Length);
|
||||
if (read == 0)
|
||||
break;
|
||||
|
||||
hasher.Process(buf, 0, read);
|
||||
destination.Write(buf, 0, read);
|
||||
}
|
||||
|
||||
// Finalize the hash
|
||||
hasher.Terminate();
|
||||
byte[] hashBytes = hasher.CurrentHashBytes!;
|
||||
|
||||
// Save the deflate values
|
||||
return new DeflateInfo
|
||||
{
|
||||
InputSize = ds.TotalIn,
|
||||
OutputSize = ds.TotalOut,
|
||||
Crc32 = BitConverter.ToUInt32(hashBytes, 0),
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
172
SabreTools.IO/Encryption/MoPaQDecrypter.cs
Normal file
172
SabreTools.IO/Encryption/MoPaQDecrypter.cs
Normal file
@@ -0,0 +1,172 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.Matching;
|
||||
using static SabreTools.Models.MoPaQ.Constants;
|
||||
|
||||
namespace SabreTools.IO.Encryption
|
||||
{
|
||||
/// <summary>
|
||||
/// Handler for decrypting MoPaQ block and table data
|
||||
/// </summary>
|
||||
public class MoPaQDecrypter
|
||||
{
|
||||
#region Private Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Buffer for encryption and decryption
|
||||
/// </summary>
|
||||
private readonly uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
|
||||
|
||||
#endregion
|
||||
|
||||
public MoPaQDecrypter()
|
||||
{
|
||||
PrepareCryptTable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prepare the encryption table
|
||||
/// </summary>
|
||||
private void PrepareCryptTable()
|
||||
{
|
||||
uint seed = 0x00100001;
|
||||
for (uint index1 = 0; index1 < 0x100; index1++)
|
||||
{
|
||||
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
|
||||
{
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp1 = (seed & 0xFFFF) << 0x10;
|
||||
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp2 = (seed & 0xFFFF);
|
||||
|
||||
_stormBuffer[index2] = (temp1 | temp2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Load a table block by optionally decompressing and
|
||||
/// decrypting before returning the data.
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="offset">Data offset to parse</param>
|
||||
/// <param name="expectedHash">Optional MD5 hash for validation</param>
|
||||
/// <param name="compressedSize">Size of the table in the file</param>
|
||||
/// <param name="tableSize">Expected size of the table</param>
|
||||
/// <param name="key">Encryption key to use</param>
|
||||
/// <param name="realTableSize">Output represening the real table size</param>
|
||||
/// <returns>Byte array representing the processed table</returns>
|
||||
public byte[]? LoadTable(Stream data,
|
||||
long offset,
|
||||
byte[]? expectedHash,
|
||||
uint compressedSize,
|
||||
uint tableSize,
|
||||
uint key,
|
||||
out long realTableSize)
|
||||
{
|
||||
byte[]? tableData;
|
||||
byte[]? readBytes;
|
||||
long bytesToRead = tableSize;
|
||||
|
||||
// Allocate the MPQ table
|
||||
tableData = readBytes = new byte[tableSize];
|
||||
|
||||
// Check if the MPQ table is compressed
|
||||
if (compressedSize != 0 && compressedSize < tableSize)
|
||||
{
|
||||
// Allocate temporary buffer for holding compressed data
|
||||
readBytes = new byte[compressedSize];
|
||||
bytesToRead = compressedSize;
|
||||
}
|
||||
|
||||
// Get the file offset from which we will read the table
|
||||
// Note: According to Storm.dll from Warcraft III (version 2002),
|
||||
// if the hash table position is 0xFFFFFFFF, no SetFilePointer call is done
|
||||
// and the table is loaded from the current file offset
|
||||
if (offset == 0xFFFFFFFF)
|
||||
offset = data.Position;
|
||||
|
||||
// Is the sector table within the file?
|
||||
if (offset >= data.Length)
|
||||
{
|
||||
realTableSize = 0;
|
||||
return null;
|
||||
}
|
||||
|
||||
// The hash table and block table can go beyond EOF.
|
||||
// Storm.dll reads as much as possible, then fills the missing part with zeros.
|
||||
// Abused by Spazzler map protector which sets hash table size to 0x00100000
|
||||
// Abused by NP_Protect in MPQs v4 as well
|
||||
if ((offset + bytesToRead) > data.Length)
|
||||
bytesToRead = (uint)(data.Length - offset);
|
||||
|
||||
// Give the caller information that the table was cut
|
||||
realTableSize = bytesToRead;
|
||||
|
||||
// If everything succeeded, read the raw table from the MPQ
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
_ = data.Read(readBytes, 0, (int)bytesToRead);
|
||||
|
||||
// Verify the MD5 of the table, if present
|
||||
byte[]? actualHash = HashTool.GetByteArrayHashArray(readBytes, HashType.MD5);
|
||||
if (expectedHash != null && actualHash != null && !actualHash.EqualsExactly(expectedHash))
|
||||
{
|
||||
Console.WriteLine("Table is corrupt!");
|
||||
return null;
|
||||
}
|
||||
|
||||
// First of all, decrypt the table
|
||||
if (key != 0)
|
||||
tableData = DecryptBlock(readBytes, bytesToRead, key);
|
||||
|
||||
// If the table is compressed, decompress it
|
||||
if (compressedSize != 0 && compressedSize < tableSize)
|
||||
{
|
||||
Console.WriteLine("Table is compressed, it will not read properly!");
|
||||
return null;
|
||||
|
||||
// TODO: Handle decompression
|
||||
// int cbOutBuffer = (int)tableSize;
|
||||
// int cbInBuffer = (int)compressedSize;
|
||||
|
||||
// if (!SCompDecompress2(readBytes, &cbOutBuffer, tableData, cbInBuffer))
|
||||
// errorCode = SErrGetLastError();
|
||||
|
||||
// tableData = readBytes;
|
||||
}
|
||||
|
||||
// Return the MPQ table
|
||||
return tableData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decrypt a single block of data
|
||||
/// </summary>
|
||||
public unsafe byte[] DecryptBlock(byte[] block, long length, uint key)
|
||||
{
|
||||
uint seed = 0xEEEEEEEE;
|
||||
|
||||
uint[] castBlock = new uint[length >> 2];
|
||||
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
|
||||
int castBlockPtr = 0;
|
||||
|
||||
// Round to uints
|
||||
length >>= 2;
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
|
||||
uint ch = castBlock[castBlockPtr] ^ (key + seed);
|
||||
|
||||
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
|
||||
seed = ch + seed + (seed << 5) + 3;
|
||||
castBlock[castBlockPtr++] = ch;
|
||||
}
|
||||
|
||||
Buffer.BlockCopy(castBlock, 0, block, 0, block.Length >> 2);
|
||||
return block;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,17 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace SabreTools.IO.Extensions
|
||||
{
|
||||
public static class ByteArrayExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Defines the maximum number of characters in a string
|
||||
/// as used in <see cref="ReadStringsWithEncoding"/>
|
||||
/// </summary>
|
||||
private const int MaximumCharactersInString = 64;
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the specified array is null or has a length of zero
|
||||
/// </summary>
|
||||
@@ -50,5 +58,71 @@ namespace SabreTools.IO.Extensions
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source with an encoding
|
||||
/// </summary>
|
||||
/// <param name="bytes">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <param name="encoding">Character encoding to use for checking</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>
|
||||
/// This method has a couple of notable implementation details:
|
||||
/// - Strings can only have a maximum of 64 characters
|
||||
/// - Characters that fall outside of the extended ASCII set will be unused
|
||||
/// </remarks>
|
||||
#if NET20
|
||||
public static List<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
|
||||
#else
|
||||
public static HashSet<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
|
||||
#endif
|
||||
{
|
||||
if (bytes == null || bytes.Length == 0)
|
||||
return [];
|
||||
if (charLimit <= 0 || charLimit > bytes.Length)
|
||||
return [];
|
||||
|
||||
// Create the string set to return
|
||||
#if NET20
|
||||
var strings = new List<string>();
|
||||
#else
|
||||
var strings = new HashSet<string>();
|
||||
#endif
|
||||
|
||||
// Check for strings
|
||||
int index = 0;
|
||||
while (index < bytes.Length)
|
||||
{
|
||||
// Get the maximum number of characters
|
||||
int maxChars = encoding.GetMaxCharCount(bytes.Length - index);
|
||||
int maxBytes = encoding.GetMaxByteCount(Math.Min(MaximumCharactersInString, maxChars));
|
||||
|
||||
// Read the longest string allowed
|
||||
int maxRead = Math.Min(maxBytes, bytes.Length - index);
|
||||
string temp = encoding.GetString(bytes, index, maxRead);
|
||||
char[] tempArr = temp.ToCharArray();
|
||||
|
||||
// Ignore empty strings
|
||||
if (temp.Length == 0)
|
||||
{
|
||||
index++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find the first instance of a control character
|
||||
int endOfString = Array.FindIndex(tempArr, c => char.IsControl(c) || (c & 0xFF00) != 0);
|
||||
if (endOfString > -1)
|
||||
temp = temp.Substring(0, endOfString);
|
||||
|
||||
// Otherwise, just add the string if long enough
|
||||
if (temp.Length >= charLimit)
|
||||
strings.Add(temp);
|
||||
|
||||
// Increment and continue
|
||||
index += Math.Max(encoding.GetByteCount(temp), 1);
|
||||
}
|
||||
|
||||
return strings;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SabreTools.IO.Extensions
|
||||
{
|
||||
@@ -10,7 +12,7 @@ namespace SabreTools.IO.Extensions
|
||||
/// <param name="input">Input stream to try aligning</param>
|
||||
/// <param name="alignment">Number of bytes to align on</param>
|
||||
/// <returns>True if the stream could be aligned, false otherwise</returns>
|
||||
public static bool AlignToBoundary(this Stream? input, byte alignment)
|
||||
public static bool AlignToBoundary(this Stream? input, int alignment)
|
||||
{
|
||||
// If the stream is invalid
|
||||
if (input == null || input.Length == 0 || !input.CanRead)
|
||||
@@ -30,6 +32,80 @@ namespace SabreTools.IO.Extensions
|
||||
return input.Position % alignment == 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read a number of bytes from an offset in a stream, if possible
|
||||
/// </summary>
|
||||
/// <param name="input">Input stream to read from</param>
|
||||
/// <param name="offset">Offset within the stream to start reading</param>
|
||||
/// <param name="length">Number of bytes to read from the offset</param>
|
||||
/// <param name="retainPosition">Indicates if the original position of the stream should be retained after reading</param>
|
||||
/// <returns>Filled byte array on success, null on error</returns>
|
||||
/// <remarks>
|
||||
/// This method will return a null array if the length is greater than what is left
|
||||
/// in the stream. This is different behavior than a normal stream read that would
|
||||
/// attempt to read as much as possible, returning the amount of bytes read.
|
||||
/// </remarks>
|
||||
public static byte[]? ReadFrom(this Stream? input, long offset, int length, bool retainPosition)
|
||||
{
|
||||
if (input == null || !input.CanRead || !input.CanSeek)
|
||||
return null;
|
||||
if (offset < 0 || offset >= input.Length)
|
||||
return null;
|
||||
if (length < 0 || offset + length > input.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current location
|
||||
long currentLocation = input.Position;
|
||||
|
||||
// Seek to the requested offset
|
||||
long newPosition = input.SeekIfPossible(offset);
|
||||
if (newPosition != offset)
|
||||
return null;
|
||||
|
||||
// Read from the position
|
||||
byte[] data = input.ReadBytes(length);
|
||||
|
||||
// Seek back if requested
|
||||
if (retainPosition)
|
||||
_ = input.SeekIfPossible(currentLocation);
|
||||
|
||||
// Return the read data
|
||||
return data;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
public static List<string>? ReadStringsFrom(this Stream? input, int position, int length, int charLimit = 5)
|
||||
{
|
||||
// Read the data as a byte array first
|
||||
byte[]? data = input.ReadFrom(position, length, retainPosition: true);
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// Check for ASCII strings
|
||||
var asciiStrings = data.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
|
||||
|
||||
// Check for UTF-8 strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var utf8Strings = data.ReadStringsWithEncoding(charLimit, Encoding.UTF8);
|
||||
|
||||
// Check for Unicode strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var unicodeStrings = data.ReadStringsWithEncoding(charLimit, Encoding.Unicode);
|
||||
|
||||
// Ignore duplicate strings across encodings
|
||||
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
|
||||
|
||||
// Sort the strings and return
|
||||
sourceStrings.Sort();
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Seek to a specific point in the stream, if possible
|
||||
/// </summary>
|
||||
@@ -62,5 +138,24 @@ namespace SabreTools.IO.Extensions
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a segment is valid in the stream
|
||||
/// </summary>
|
||||
/// <param name="input">Input stream to validate</param>
|
||||
/// <param name="offset">Position in the source</param>
|
||||
/// <param name="count">Length of the data to check</param>
|
||||
/// <returns>True if segment could be read fully, false otherwise</returns>
|
||||
public static bool SegmentValid(this Stream? input, long offset, long count)
|
||||
{
|
||||
if (input == null)
|
||||
return false;
|
||||
if (offset < 0 || offset > input.Length)
|
||||
return false;
|
||||
if (count < 0 || offset + count > input.Length)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.7.0</Version>
|
||||
<Version>1.7.1</Version>
|
||||
|
||||
<!-- Package Properties -->
|
||||
<Authors>Matt Nadareski</Authors>
|
||||
@@ -30,7 +30,8 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.Matching" Version="1.6.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.6.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.7.0" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -39,7 +39,7 @@ namespace SabreTools.IO.Streams
|
||||
|
||||
#endregion
|
||||
|
||||
#region Internal State
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Internal collection of streams to read from
|
||||
@@ -58,6 +58,8 @@ namespace SabreTools.IO.Streams
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Create a new, empty ReadOnlyCompositeStream
|
||||
/// </summary>
|
||||
@@ -109,7 +111,7 @@ namespace SabreTools.IO.Streams
|
||||
/// </summary>
|
||||
public ReadOnlyCompositeStream(IEnumerable<Stream> streams)
|
||||
{
|
||||
_streams = new List<Stream>(streams);
|
||||
_streams = [.. streams];
|
||||
_length = 0;
|
||||
_position = 0;
|
||||
|
||||
@@ -123,6 +125,10 @@ namespace SabreTools.IO.Streams
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Add a new stream to the collection
|
||||
/// </summary>
|
||||
@@ -138,10 +144,13 @@ namespace SabreTools.IO.Streams
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Implementations
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Flush() => throw new NotImplementedException();
|
||||
public override void Flush()
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
@@ -198,27 +207,22 @@ namespace SabreTools.IO.Streams
|
||||
// Handle the "seek"
|
||||
switch (origin)
|
||||
{
|
||||
case SeekOrigin.Begin: _position = offset; break;
|
||||
case SeekOrigin.Current: _position += offset; break;
|
||||
case SeekOrigin.End: _position = _length - offset - 1; break;
|
||||
case SeekOrigin.Begin: Position = offset; break;
|
||||
case SeekOrigin.Current: Position += offset; break;
|
||||
case SeekOrigin.End: Position = _length + offset - 1; break;
|
||||
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
|
||||
}
|
||||
;
|
||||
|
||||
// Handle out-of-bounds seeks
|
||||
if (_position < 0)
|
||||
_position = 0;
|
||||
else if (_position >= _length)
|
||||
_position = _length - 1;
|
||||
|
||||
return _position;
|
||||
return Position;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetLength(long value) => throw new NotImplementedException();
|
||||
public override void SetLength(long value)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
254
SabreTools.IO/Streams/ViewStream.cs
Normal file
254
SabreTools.IO/Streams/ViewStream.cs
Normal file
@@ -0,0 +1,254 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace SabreTools.IO.Streams
|
||||
{
|
||||
/// <summary>
|
||||
/// Stream representing a view into a source
|
||||
/// </summary>
|
||||
public class ViewStream : Stream
|
||||
{
|
||||
#region Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanRead => true;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanSeek => _source.CanSeek;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanWrite => false;
|
||||
|
||||
/// <summary>
|
||||
/// Filename from the source, if possible
|
||||
/// </summary>
|
||||
public string? Filename
|
||||
{
|
||||
get
|
||||
{
|
||||
// A subset of streams have a filename
|
||||
if (_source is FileStream fs)
|
||||
return fs.Name;
|
||||
else if (_source is ViewStream vs)
|
||||
return vs.Filename;
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Length => _length;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Position
|
||||
{
|
||||
get
|
||||
{
|
||||
// Handle 0-length sources
|
||||
if (_length <= 0)
|
||||
return 0;
|
||||
|
||||
return _source.Position - _initialPosition;
|
||||
}
|
||||
set
|
||||
{
|
||||
// Handle 0-length sources
|
||||
if (_length <= 0)
|
||||
{
|
||||
_source.Position = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
long position = value;
|
||||
|
||||
// Handle out-of-bounds seeks
|
||||
if (position < 0)
|
||||
position = 0;
|
||||
else if (position >= _length)
|
||||
position = _length - 1;
|
||||
|
||||
_source.Position = _initialPosition + position;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Initial position within the underlying data
|
||||
/// </summary>
|
||||
protected long _initialPosition;
|
||||
|
||||
/// <summary>
|
||||
/// Usable length in the underlying data
|
||||
/// </summary>
|
||||
protected long _length;
|
||||
|
||||
/// <summary>
|
||||
/// Source data
|
||||
/// </summary>
|
||||
protected Stream _source;
|
||||
|
||||
/// <summary>
|
||||
/// Lock object for reading from the source
|
||||
/// </summary>
|
||||
private readonly object _sourceLock = new();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a Stream
|
||||
/// </summary>
|
||||
public ViewStream(Stream data, long offset)
|
||||
{
|
||||
if (!data.CanRead)
|
||||
throw new ArgumentException(nameof(data));
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
_source = data;
|
||||
_initialPosition = offset;
|
||||
_length = data.Length - offset;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a Stream
|
||||
/// </summary>
|
||||
public ViewStream(Stream data, long offset, long length)
|
||||
{
|
||||
if (!data.CanRead)
|
||||
throw new ArgumentException(nameof(data));
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
if (length < 0 || offset + length > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
|
||||
_source = data;
|
||||
_initialPosition = offset;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a byte array
|
||||
/// </summary>
|
||||
public ViewStream(byte[] data, long offset)
|
||||
{
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
long length = data.Length - offset;
|
||||
_source = new MemoryStream(data, (int)offset, (int)length);
|
||||
_initialPosition = 0;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new ViewStream from a byte array
|
||||
/// </summary>
|
||||
public ViewStream(byte[] data, long offset, long length)
|
||||
{
|
||||
if (offset < 0 || offset > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
if (length < 0 || offset + length > data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
|
||||
_source = new MemoryStream(data, (int)offset, (int)length);
|
||||
_initialPosition = 0;
|
||||
_length = length;
|
||||
|
||||
_source.Seek(_initialPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Check if a data segment is valid in the data source
|
||||
/// </summary>
|
||||
/// <param name="offset">Position in the source</param>
|
||||
/// <param name="count">Length of the data to check</param>
|
||||
/// <returns>True if the positional data is valid, false otherwise</returns>
|
||||
public bool SegmentValid(long offset, long count)
|
||||
{
|
||||
if (offset < 0 || offset > Length)
|
||||
return false;
|
||||
if (count < 0 || offset + count > Length)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Implementations
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Flush()
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Invalid cases always return 0
|
||||
if (buffer.Length == 0)
|
||||
return 0;
|
||||
if (offset < 0 || offset >= buffer.Length)
|
||||
return 0;
|
||||
if (count < 0 || offset + count > buffer.Length)
|
||||
return 0;
|
||||
|
||||
// Short-circuit 0-byte reads
|
||||
if (count == 0)
|
||||
return 0;
|
||||
|
||||
try
|
||||
{
|
||||
lock (_sourceLock)
|
||||
{
|
||||
return _source.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Absorb the error
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
// Handle the "seek"
|
||||
switch (origin)
|
||||
{
|
||||
case SeekOrigin.Begin: Position = offset; break;
|
||||
case SeekOrigin.Current: Position += offset; break;
|
||||
case SeekOrigin.End: Position = _length + offset - 1; break;
|
||||
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
|
||||
}
|
||||
|
||||
return Position;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetLength(long value)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user