31 Commits
1.7.0 ... 1.7.3

Author SHA1 Message Date
Matt Nadareski
8ddd9f3f78 Bump version 2025-09-20 22:16:06 -04:00
Matt Nadareski
54ad538c08 Short-circuit fixed-width encodings 2025-09-20 22:10:54 -04:00
Matt Nadareski
e6bc9ab3e3 Add OptionalEndsWith string extension 2025-09-20 18:04:37 -04:00
Matt Nadareski
94934b00a9 There 2025-09-10 21:53:52 -04:00
Matt Nadareski
e49f56fccc Add an enumerable extension from BOS 2025-09-06 15:42:48 -04:00
Matt Nadareski
79c64ddfa8 .NET Standard had issues with that last one 2025-09-06 15:37:24 -04:00
Matt Nadareski
b22384d5f3 Add neat string extensions from BOS 2025-09-06 15:32:36 -04:00
Matt Nadareski
955c1b5641 Bump version 2025-09-05 09:46:17 -04:00
Matt Nadareski
535f9f928d Update Models to 1.7.1 2025-09-05 09:21:15 -04:00
Matt Nadareski
f0cb15c2e4 Fix comments 2025-09-05 09:15:05 -04:00
Matt Nadareski
ec99304c51 Implement the 16KiB limit 2025-09-03 09:05:08 -04:00
Matt Nadareski
aefc931055 Of all things 2025-09-03 01:29:06 -04:00
Matt Nadareski
e7fe342379 Fix missed compatibility issue in string reading 2025-09-03 01:04:34 -04:00
Matt Nadareski
f372999b1b So that's why 2025-09-03 00:23:46 -04:00
Matt Nadareski
2679975945 TFM support thing 2025-09-03 00:22:46 -04:00
Matt Nadareski
54dd7f2f8f Add new extension tests 2025-09-03 00:20:02 -04:00
Matt Nadareski
aee5891c50 Backport thing 2025-09-03 00:15:41 -04:00
Matt Nadareski
b81d3314ea Bump version 2025-09-01 15:25:09 -04:00
Matt Nadareski
4a3ffa5f90 Update fixes, port needed code 2025-09-01 15:21:53 -04:00
Matt Nadareski
a20c7529d6 Handle an edge case 2025-08-28 19:46:50 -04:00
Matt Nadareski
baea5cb0d7 Allow alignment outside of range of byte 2025-08-28 08:57:25 -04:00
Matt Nadareski
659674dd4a Port ReadStrings extensions from Serialization 2025-08-25 12:44:04 -04:00
Matt Nadareski
5c199a143b Add ReadFrom extension, move SegmentValid as extension 2025-08-25 10:50:16 -04:00
Matt Nadareski
99ec814808 Minor fixes to view stream read 2025-08-23 21:26:02 -04:00
Matt Nadareski
ea1f02798c Reorganize composite stream tests 2025-08-23 21:24:04 -04:00
Matt Nadareski
e3d4cc5e45 Cleanup and sync 2025-08-23 21:16:31 -04:00
Matt Nadareski
c98eb5c42a Add "here to the end" constructors 2025-08-23 21:11:41 -04:00
Matt Nadareski
d0392be2d8 Add view stream type 2025-08-23 21:07:57 -04:00
Matt Nadareski
8761629828 Upstream wrapper from WiseUnpacker 2025-08-15 11:21:10 -04:00
Matt Nadareski
a3b258dfeb Upstream wrapper from WiseUnpacker 2025-08-11 10:48:38 -04:00
Matt Nadareski
f7505effa1 Fix seeking issue in composite streams 2025-08-01 14:22:15 -04:00
19 changed files with 2452 additions and 38 deletions

7
LICENSE Normal file
View File

@@ -0,0 +1,7 @@
Copyright (c) 2018-2025 Matt Nadareski
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,5 +1,6 @@
using System;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using Xunit;
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
{
public class ByteArrayExtensionsTests
{
#region Is Null or Empty
#region IsNullOrEmpty
[Fact]
public void IsNullOrEmpty_Null_True()
@@ -35,7 +36,7 @@ namespace SabreTools.IO.Test.Extensions
#endregion
#region To Hex String
#region ToHexString
[Fact]
public void ToHexString_Null()
@@ -58,7 +59,7 @@ namespace SabreTools.IO.Test.Extensions
#endregion
#region From Hex String
#region FromHexString
[Fact]
public void FromHexString_Null()
@@ -88,5 +89,308 @@ namespace SabreTools.IO.Test.Extensions
}
#endregion
#region ReadStringsFrom
[Fact]
public void ReadStringsFrom_Null_Null()
{
byte[]? arr = null;
var actual = arr.ReadStringsFrom(3);
Assert.Null(actual);
}
[Fact]
public void ReadStringsFrom_Empty_Null()
{
byte[]? arr = [];
var actual = arr.ReadStringsFrom(3);
Assert.Null(actual);
}
[Theory]
[InlineData(-1)]
[InlineData(0)]
[InlineData(2048)]
public void ReadStringsFrom_InvalidLimit_Empty(int charLimit)
{
byte[]? arr = new byte[1024];
var actual = arr.ReadStringsFrom(charLimit);
Assert.NotNull(actual);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsFrom_NoValidStrings_Empty()
{
byte[]? arr = new byte[1024];
var actual = arr.ReadStringsFrom(4);
Assert.NotNull(actual);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsFrom_AsciiStrings_Filled()
{
byte[]? arr =
[
.. Encoding.ASCII.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = arr.ReadStringsFrom(4);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsFrom_UTF8_Filled()
{
byte[]? arr =
[
.. Encoding.UTF8.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = arr.ReadStringsFrom(4);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsFrom_UTF16_Filled()
{
byte[]? arr =
[
.. Encoding.Unicode.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = arr.ReadStringsFrom(4);
Assert.NotNull(actual);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsFrom_Mixed_Filled()
{
byte[]? arr =
[
.. Encoding.ASCII.GetBytes("TEST1"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("TWO1"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("DATA1"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TEST2"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TWO2"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("DATA2"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TEST3"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TWO3"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("DATA3"),
.. new byte[] { 0x00 },
];
var actual = arr.ReadStringsFrom(5);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(6, actual.Count);
}
/// <summary>
/// This test is here mainly for performance testing
/// and should not be enabled unless there are changes
/// to the core reading methods that need comparison.
/// </summary>
// [Fact]
// public void ReadStringsFrom_Mixed_MASSIVE()
// {
// byte[]? arr =
// [
// .. Encoding.ASCII.GetBytes("TEST1"),
// .. new byte[] { 0x00 },
// .. Encoding.ASCII.GetBytes("TWO1"),
// .. new byte[] { 0x00 },
// .. Encoding.ASCII.GetBytes("DATA1"),
// .. new byte[] { 0x00 },
// .. Encoding.UTF8.GetBytes("TEST2"),
// .. new byte[] { 0x00 },
// .. Encoding.UTF8.GetBytes("TWO2"),
// .. new byte[] { 0x00 },
// .. Encoding.UTF8.GetBytes("DATA2"),
// .. new byte[] { 0x00 },
// .. Encoding.Unicode.GetBytes("TEST3"),
// .. new byte[] { 0x00 },
// .. Encoding.Unicode.GetBytes("TWO3"),
// .. new byte[] { 0x00 },
// .. Encoding.Unicode.GetBytes("DATA3"),
// .. new byte[] { 0x00 },
// ];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// arr = [.. arr, .. arr, .. arr, .. arr];
// // arr = [.. arr, .. arr, .. arr, .. arr];
// // arr = [.. arr, .. arr, .. arr, .. arr];
// var actual = arr.ReadStringsFrom(5);
// Assert.NotNull(actual);
// Assert.NotEmpty(actual);
// }
#endregion
#region ReadStringsWithEncoding
[Fact]
public void ReadStringsWithEncoding_Null_Empty()
{
byte[]? bytes = null;
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsWithEncoding_Empty_Empty()
{
byte[]? bytes = [];
var actual = bytes.ReadStringsWithEncoding(1, Encoding.ASCII);
Assert.Empty(actual);
}
[Theory]
[InlineData(-1)]
[InlineData(0)]
[InlineData(2048)]
public void ReadStringsWithEncoding_InvalidLimit_Empty(int charLimit)
{
byte[]? bytes = new byte[1024];
var actual = bytes.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsWithEncoding_NoValidStrings_Empty()
{
byte[]? bytes = new byte[1024];
var actual = bytes.ReadStringsWithEncoding(5, Encoding.ASCII);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsWithEncoding_AsciiStrings_Filled()
{
byte[]? bytes =
[
.. Encoding.ASCII.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("ONE"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = bytes.ReadStringsWithEncoding(4, Encoding.ASCII);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsWithEncoding_Latin1_Filled()
{
byte[]? bytes =
[
.. Encoding.Latin1.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.Latin1.GetBytes("ONE"),
.. new byte[] { 0x00 },
.. Encoding.Latin1.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.Latin1.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Latin1);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsWithEncoding_UTF8_Filled()
{
byte[]? bytes =
[
.. Encoding.UTF8.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("ONE"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF8);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsWithEncoding_UTF16_Filled()
{
byte[]? bytes =
[
.. Encoding.Unicode.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("ONE"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = bytes.ReadStringsWithEncoding(4, Encoding.Unicode);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsWithEncoding_UTF32_Filled()
{
byte[]? bytes =
[
.. Encoding.UTF32.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.UTF32.GetBytes("ONE"),
.. new byte[] { 0x00 },
.. Encoding.UTF32.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.UTF32.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
var actual = bytes.ReadStringsWithEncoding(4, Encoding.UTF32);
Assert.Equal(2, actual.Count);
}
#endregion
}
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using SabreTools.IO.Extensions;
using Xunit;
@@ -9,6 +10,22 @@ namespace SabreTools.IO.Test.Extensions
{
public class EnumerableExtensionsTests
{
#region IterateWithAction
[Fact]
public void IterateWithActionTest()
{
List<int> source = [1, 2, 3, 4];
int actual = 0;
source.IterateWithAction(i => Interlocked.Add(ref actual, i));
Assert.Equal(10, actual);
}
#endregion
#region SafeEnumerate
[Fact]
public void SafeEnumerate_Empty()
{
@@ -60,6 +77,8 @@ namespace SabreTools.IO.Test.Extensions
Assert.Equal(2, list.Count);
}
#endregion
/// <summary>
/// Fake enumerable that uses <see cref="ErrorEnumerator"/>
/// </summary>

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using Xunit;
@@ -7,7 +8,7 @@ namespace SabreTools.IO.Test.Extensions
{
public class StreamExtensionsTests
{
#region Align to Boundary
#region AlignToBoundary
[Fact]
public void AlignToBoundary_Null_False()
@@ -62,7 +63,221 @@ namespace SabreTools.IO.Test.Extensions
#endregion
#region Seek If Possible
#region ReadFrom
[Theory]
[InlineData(true)]
[InlineData(false)]
public void ReadFrom_Null_Null(bool retainPosition)
{
Stream? stream = null;
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
Assert.Null(actual);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void ReadFrom_NonSeekable_Null(bool retainPosition)
{
Stream? stream = new NonSeekableStream();
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
Assert.Null(actual);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void ReadFrom_Empty_Null(bool retainPosition)
{
Stream? stream = new MemoryStream([]);
byte[]? actual = stream.ReadFrom(0, 1, retainPosition);
Assert.Null(actual);
}
[Theory]
[InlineData(-1, true)]
[InlineData(2048, true)]
[InlineData(-1, false)]
[InlineData(2048, false)]
public void ReadFrom_InvalidOffset_Null(long offset, bool retainPosition)
{
Stream? stream = new MemoryStream(new byte[1024]);
byte[]? actual = stream.ReadFrom(offset, 1, retainPosition);
Assert.Null(actual);
}
[Theory]
[InlineData(-1, true)]
[InlineData(2048, true)]
[InlineData(-1, false)]
[InlineData(2048, false)]
public void ReadFrom_InvalidLength_Null(int length, bool retainPosition)
{
Stream? stream = new MemoryStream(new byte[1024]);
byte[]? actual = stream.ReadFrom(0, length, retainPosition);
Assert.Null(actual);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void ReadFrom_Valid_Filled(bool retainPosition)
{
Stream? stream = new MemoryStream(new byte[1024]);
byte[]? actual = stream.ReadFrom(0, 512, retainPosition);
Assert.NotNull(actual);
Assert.Equal(512, actual.Length);
if (retainPosition)
Assert.Equal(0, stream.Position);
else
Assert.Equal(512, stream.Position);
}
#endregion
#region ReadStringsFrom
[Fact]
public void ReadStringsFrom_Null_Null()
{
Stream? stream = null;
var actual = stream.ReadStringsFrom(0, 1, 3);
Assert.Null(actual);
}
[Fact]
public void ReadStringsFrom_NonSeekable_Null()
{
Stream? stream = new NonSeekableStream();
var actual = stream.ReadStringsFrom(0, 1, 3);
Assert.Null(actual);
}
[Fact]
public void ReadStringsFrom_Empty_Null()
{
Stream? stream = new MemoryStream([]);
var actual = stream.ReadStringsFrom(0, 1, 3);
Assert.Null(actual);
}
[Theory]
[InlineData(-1)]
[InlineData(0)]
[InlineData(2048)]
public void ReadStringsFrom_InvalidLimit_Empty(int charLimit)
{
Stream? stream = new MemoryStream(new byte[1024]);
var actual = stream.ReadStringsFrom(0, 1024, charLimit);
Assert.NotNull(actual);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsFrom_NoValidStrings_Empty()
{
Stream? stream = new MemoryStream(new byte[1024]);
var actual = stream.ReadStringsFrom(0, 1024, 4);
Assert.NotNull(actual);
Assert.Empty(actual);
}
[Fact]
public void ReadStringsFrom_AsciiStrings_Filled()
{
byte[]? bytes =
[
.. Encoding.ASCII.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
Stream? stream = new MemoryStream(bytes);
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(4, actual.Count);
}
[Fact]
public void ReadStringsFrom_UTF8_Filled()
{
byte[]? bytes =
[
.. Encoding.UTF8.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
Stream? stream = new MemoryStream(bytes);
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(4, actual.Count);
}
[Fact]
public void ReadStringsFrom_UTF16_Filled()
{
byte[]? bytes =
[
.. Encoding.Unicode.GetBytes("TEST"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TWO"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("DATA"),
.. new byte[] { 0x00 },
];
Stream? stream = new MemoryStream(bytes);
var actual = stream.ReadStringsFrom(0, bytes.Length, 4);
Assert.NotNull(actual);
Assert.Equal(2, actual.Count);
}
[Fact]
public void ReadStringsFrom_Mixed_Filled()
{
byte[]? bytes =
[
.. Encoding.ASCII.GetBytes("TEST1"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("TWO1"),
.. new byte[] { 0x00 },
.. Encoding.ASCII.GetBytes("DATA1"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TEST2"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("TWO2"),
.. new byte[] { 0x00 },
.. Encoding.UTF8.GetBytes("DATA2"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TEST3"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("TWO3"),
.. new byte[] { 0x00 },
.. Encoding.Unicode.GetBytes("DATA3"),
.. new byte[] { 0x00 },
];
Stream? stream = new MemoryStream(bytes);
var actual = stream.ReadStringsFrom(0, bytes.Length, 5);
Assert.NotNull(actual);
// ASCII and UTF-8 are identical for the character range
Assert.Equal(10, actual.Count);
}
#endregion
#region SeekIfPossible
[Fact]
public void SeekIfPossible_NonSeekable_CurrentPosition()
@@ -106,6 +321,46 @@ namespace SabreTools.IO.Test.Extensions
#endregion
#region SegmentValid
[Fact]
public void SegmentValid_Null_False()
{
Stream? stream = null;
bool actual = stream.SegmentValid(0, 1);
Assert.False(actual);
}
[Theory]
[InlineData(-1)]
[InlineData(2048)]
public void SegmentValid_InvalidOffset_False(long offset)
{
Stream? stream = new MemoryStream(new byte[1024]);
bool actual = stream.SegmentValid(offset, 1);
Assert.False(actual);
}
[Theory]
[InlineData(-1)]
[InlineData(2048)]
public void SegmentValid_InvalidLength_False(int length)
{
Stream? stream = new MemoryStream(new byte[1024]);
bool actual = stream.SegmentValid(0, length);
Assert.False(actual);
}
[Fact]
public void SegmentValid_ValidSegment_True()
{
Stream? stream = new MemoryStream(new byte[1024]);
bool actual = stream.SegmentValid(0, 1024);
Assert.True(actual);
}
#endregion
/// <summary>
/// Represents a hidden non-seekable stream
/// </summary>

View File

@@ -0,0 +1,76 @@
using SabreTools.IO.Extensions;
using Xunit;
namespace SabreTools.IO.Test.Extensions
{
public class StringExtensionsTests
{
#region OptionalContains
[Theory]
[InlineData(null, "ANY", false)]
[InlineData("", "ANY", false)]
[InlineData("ANY", "ANY", true)]
[InlineData("ANYTHING", "ANY", true)]
[InlineData("THING", "ANY", false)]
[InlineData("THINGANY", "ANY", true)]
public void OptionalContainsTest(string? haystack, string needle, bool expected)
{
bool actual = haystack.OptionalContains(needle);
Assert.Equal(expected, actual);
}
#endregion
#region OptionalEndsWith
[Theory]
[InlineData(null, "ANY", false)]
[InlineData("", "ANY", false)]
[InlineData("ANY", "ANY", true)]
[InlineData("ANYTHING", "ANY", false)]
[InlineData("THING", "ANY", false)]
[InlineData("THINGANY", "ANY", true)]
public void OptionalEndsWithTest(string? haystack, string needle, bool expected)
{
bool actual = haystack.OptionalEndsWith(needle);
Assert.Equal(expected, actual);
}
#endregion
#region OptionalEquals
[Theory]
[InlineData(null, "ANY", false)]
[InlineData("", "ANY", false)]
[InlineData("ANY", "ANY", true)]
[InlineData("ANYTHING", "ANY", false)]
[InlineData("THING", "ANY", false)]
[InlineData("THINGANY", "ANY", false)]
public void OptionalEqualsTest(string? haystack, string needle, bool expected)
{
bool actual = haystack.OptionalEquals(needle);
Assert.Equal(expected, actual);
}
#endregion
#region OptionalStartsWith
[Theory]
[InlineData(null, "ANY", false)]
[InlineData("", "ANY", false)]
[InlineData("ANY", "ANY", true)]
[InlineData("ANYTHING", "ANY", true)]
[InlineData("THING", "ANY", false)]
[InlineData("THINGANY", "ANY", false)]
public void OptionalStartsWithTest(string? haystack, string needle, bool expected)
{
bool actual = haystack.OptionalStartsWith(needle);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -26,7 +26,7 @@
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3">
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Streams;
@@ -7,8 +8,10 @@ namespace SabreTools.IO.Test.Streams
{
public class ReadOnlyCompositeStreamTests
{
#region Constructor
[Fact]
public void DefaultConstructorTest()
public void Constructor_Default()
{
var stream = new ReadOnlyCompositeStream();
Assert.Equal(0, stream.Length);
@@ -16,7 +19,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void EmptyArrayConstructorTest()
public void Constructor_EmptyArray()
{
Stream[] arr = [new MemoryStream()];
var stream = new ReadOnlyCompositeStream(arr);
@@ -25,9 +28,8 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void EmptyEnumerableConstructorTest()
public void Constructor_EmptyEnumerable()
{
// Empty enumerable constructor
List<Stream> list = [new MemoryStream()];
var stream = new ReadOnlyCompositeStream(list);
Assert.Equal(0, stream.Length);
@@ -35,7 +37,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void SingleStreamConstructorTest()
public void Constructor_SingleStream()
{
var stream = new ReadOnlyCompositeStream(new MemoryStream(new byte[1024]));
Assert.Equal(1024, stream.Length);
@@ -43,7 +45,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void FilledArrayConstructorTest()
public void Constructor_FilledArray()
{
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -52,7 +54,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void FilledEnumerableConstructorTest()
public void Constructor_FilledEnumerable()
{
List<Stream> list = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
var stream = new ReadOnlyCompositeStream(list);
@@ -60,6 +62,10 @@ namespace SabreTools.IO.Test.Streams
Assert.Equal(0, stream.Position);
}
#endregion
#region AddStream
[Fact]
public void AddStreamTest()
{
@@ -70,10 +76,18 @@ namespace SabreTools.IO.Test.Streams
stream.AddStream(new MemoryStream(new byte[1024]));
Assert.Equal(1024, stream.Length);
Assert.Equal(0, stream.Position);
stream.AddStream(new MemoryStream([]));
Assert.Equal(1024, stream.Length);
Assert.Equal(0, stream.Position);
}
#endregion
#region Read
[Fact]
public void EmptyStreamReadTest()
public void Read_EmptyStream()
{
var stream = new ReadOnlyCompositeStream();
@@ -84,7 +98,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void SingleStreamReadTest()
public void Read_SingleStream()
{
Stream[] arr = [new MemoryStream(new byte[1024])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -96,7 +110,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void MultipleStreamSingleContainedReadTest()
public void Read_MultipleStream_SingleContained()
{
Stream[] arr = [new MemoryStream(new byte[1024]), new MemoryStream(new byte[1024])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -108,7 +122,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void MultipleStreamMultipleContainedReadTest()
public void Read_MultipleStream_MultipleContained()
{
Stream[] arr = [new MemoryStream(new byte[256]), new MemoryStream(new byte[256])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -120,7 +134,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void SingleStreamExtraReadTest()
public void Read_SingleStream_Extra()
{
Stream[] arr = [new MemoryStream(new byte[256])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -132,7 +146,7 @@ namespace SabreTools.IO.Test.Streams
}
[Fact]
public void MultipleStreamExtraReadTest()
public void Read_MultipleStream_Extra()
{
Stream[] arr = [new MemoryStream(new byte[128]), new MemoryStream(new byte[128])];
var stream = new ReadOnlyCompositeStream(arr);
@@ -142,5 +156,32 @@ namespace SabreTools.IO.Test.Streams
Assert.Equal(256, read);
}
#endregion
#region Unimplemented
[Fact]
public void Flush_Throws()
{
var stream = new ReadOnlyCompositeStream();
Assert.Throws<NotImplementedException>(() => stream.Flush());
}
[Fact]
public void SetLength_Throws()
{
var stream = new ReadOnlyCompositeStream();
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
}
[Fact]
public void Write_Throws()
{
var stream = new ReadOnlyCompositeStream();
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
}
#endregion
}
}

View File

@@ -0,0 +1,419 @@
using System;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.IO.Streams;
using Xunit;
namespace SabreTools.IO.Test.Streams
{
public class ViewStreamTests
{
#region Constructor
[Theory]
[InlineData(0, 0, 0)]
[InlineData(1024, 0, 1024)]
[InlineData(1024, 256, 768)]
public void Constructor_Array(int size, long offset, long expectedLength)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset);
Assert.Equal(expectedLength, stream.Length);
Assert.Equal(0, stream.Position);
}
[Theory]
[InlineData(0, 0, 0, 0)]
[InlineData(1024, 0, 1024, 1024)]
[InlineData(1024, 256, 512, 512)]
public void Constructor_Array_Length(int size, long offset, long length, long expectedLength)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset, length);
Assert.Equal(expectedLength, stream.Length);
Assert.Equal(0, stream.Position);
}
[Theory]
[InlineData(0, -1, 0)]
[InlineData(0, 2048, 0)]
[InlineData(1024, -1, 1024)]
[InlineData(1024, 2048, 1024)]
[InlineData(1024, -1, 512)]
[InlineData(1024, 2048, 512)]
public void Constructor_Array_InvalidOffset(int size, long offset, long length)
{
byte[] data = new byte[size];
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
}
[Theory]
[InlineData(0, 0, -1)]
[InlineData(0, 0, 2048)]
[InlineData(1024, 0, -1)]
[InlineData(1024, 0, 2048)]
[InlineData(1024, 256, -1)]
[InlineData(1024, 256, 2048)]
public void Constructor_Array_InvalidLength(int size, long offset, long length)
{
byte[] data = new byte[size];
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
}
[Theory]
[InlineData(0, 0, 0)]
[InlineData(1024, 0, 1024)]
[InlineData(1024, 256, 768)]
public void Constructor_Stream(int size, long offset, long expectedLength)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset);
Assert.Equal(expectedLength, stream.Length);
Assert.Equal(0, stream.Position);
}
[Theory]
[InlineData(0, 0, 0, 0)]
[InlineData(1024, 0, 1024, 1024)]
[InlineData(1024, 256, 512, 512)]
public void Constructor_Stream_Length(int size, long offset, long length, long expectedLength)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset, length);
Assert.Equal(expectedLength, stream.Length);
Assert.Equal(0, stream.Position);
}
[Theory]
[InlineData(0, -1, 0)]
[InlineData(0, 2048, 0)]
[InlineData(1024, -1, 1024)]
[InlineData(1024, 2048, 1024)]
[InlineData(1024, -1, 512)]
[InlineData(1024, 2048, 512)]
public void Constructor_Stream_InvalidOffset(int size, long offset, long length)
{
Stream data = new MemoryStream(new byte[size]);
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
}
[Theory]
[InlineData(0, 0, -1)]
[InlineData(0, 0, 2048)]
[InlineData(1024, 0, -1)]
[InlineData(1024, 0, 2048)]
[InlineData(1024, 256, -1)]
[InlineData(1024, 256, 2048)]
public void Constructor_Stream_InvalidLength(int size, long offset, long length)
{
Stream data = new MemoryStream(new byte[size]);
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new ViewStream(data, offset, length));
}
#endregion
#region Position
[Theory]
[InlineData(0, 0, 0, -1, 0)]
[InlineData(0, 0, 0, 0, 0)]
[InlineData(0, 0, 0, 256, 0)]
[InlineData(0, 0, 0, 2048, 0)]
[InlineData(1024, 0, 1024, -1, 0)]
[InlineData(1024, 0, 1024, 0, 0)]
[InlineData(1024, 0, 1024, 256, 256)]
[InlineData(1024, 0, 1024, 2048, 1023)]
[InlineData(1024, 256, 512, -1, 0)]
[InlineData(1024, 256, 512, 0, 0)]
[InlineData(1024, 256, 512, 256, 256)]
[InlineData(1024, 256, 512, 2048, 511)]
public void Position_Array(int size, long offset, long length, long position, long expectedPosition)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset, length);
stream.Position = position;
Assert.Equal(expectedPosition, stream.Position);
}
[Theory]
[InlineData(0, 0, 0, -1, 0)]
[InlineData(0, 0, 0, 0, 0)]
[InlineData(0, 0, 0, 256, 0)]
[InlineData(0, 0, 0, 2048, 0)]
[InlineData(1024, 0, 1024, -1, 0)]
[InlineData(1024, 0, 1024, 0, 0)]
[InlineData(1024, 0, 1024, 256, 256)]
[InlineData(1024, 0, 1024, 2048, 1023)]
[InlineData(1024, 256, 512, -1, 0)]
[InlineData(1024, 256, 512, 0, 0)]
[InlineData(1024, 256, 512, 256, 256)]
[InlineData(1024, 256, 512, 2048, 511)]
public void Position_Stream(int size, long offset, long length, long position, long expectedPosition)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset, length);
stream.Position = position;
Assert.Equal(expectedPosition, stream.Position);
}
#endregion
#region SegmentValid
[Theory]
[InlineData(0, 0, 0, -1, 0, false)]
[InlineData(0, 0, 0, 2048, 0, false)]
[InlineData(0, 0, 0, 0, 0, true)]
[InlineData(0, 0, 0, 0, -1, false)]
[InlineData(0, 0, 0, 0, 2048, false)]
[InlineData(1024, 0, 1024, -1, 0, false)]
[InlineData(1024, 0, 1024, 2048, 0, false)]
[InlineData(1024, 0, 1024, 0, 0, true)]
[InlineData(1024, 0, 1024, 256, 0, true)]
[InlineData(1024, 0, 1024, 256, 256, true)]
[InlineData(1024, 0, 1024, 0, -1, false)]
[InlineData(1024, 0, 1024, 0, 2048, false)]
[InlineData(1024, 256, 512, -1, 0, false)]
[InlineData(1024, 256, 512, 2048, 0, false)]
[InlineData(1024, 256, 512, 0, 0, true)]
[InlineData(1024, 256, 512, 256, 0, true)]
[InlineData(1024, 256, 512, 256, 256, true)]
[InlineData(1024, 256, 512, 0, -1, false)]
[InlineData(1024, 256, 512, 0, 2048, false)]
public void SegmentValid_Array(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset, length);
bool actual = stream.SegmentValid(segmentStart, segmentLength);
Assert.Equal(expected, actual);
}
[Theory]
[InlineData(0, 0, 0, -1, 0, false)]
[InlineData(0, 0, 0, 2048, 0, false)]
[InlineData(0, 0, 0, 0, 0, true)]
[InlineData(0, 0, 0, 0, -1, false)]
[InlineData(0, 0, 0, 0, 2048, false)]
[InlineData(1024, 0, 1024, -1, 0, false)]
[InlineData(1024, 0, 1024, 2048, 0, false)]
[InlineData(1024, 0, 1024, 0, 0, true)]
[InlineData(1024, 0, 1024, 256, 0, true)]
[InlineData(1024, 0, 1024, 256, 256, true)]
[InlineData(1024, 0, 1024, 0, -1, false)]
[InlineData(1024, 0, 1024, 0, 2048, false)]
[InlineData(1024, 256, 512, -1, 0, false)]
[InlineData(1024, 256, 512, 2048, 0, false)]
[InlineData(1024, 256, 512, 0, 0, true)]
[InlineData(1024, 256, 512, 256, 0, true)]
[InlineData(1024, 256, 512, 256, 256, true)]
[InlineData(1024, 256, 512, 0, -1, false)]
[InlineData(1024, 256, 512, 0, 2048, false)]
public void SegmentValid_Stream(int size, long offset, long length, int segmentStart, int segmentLength, bool expected)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset, length);
bool actual = stream.SegmentValid(segmentStart, segmentLength);
Assert.Equal(expected, actual);
}
#endregion
#region Read
[Theory]
[InlineData(0, 0, 0, -1, 0)]
[InlineData(0, 0, 0, 0, 0)]
[InlineData(0, 0, 0, 2048, 0)]
[InlineData(1024, 0, 1024, -1, 0)]
[InlineData(1024, 0, 1024, 0, 0)]
[InlineData(1024, 0, 1024, 256, 256)]
[InlineData(1024, 0, 1024, 1024, 1024)]
[InlineData(1024, 0, 1024, 2048, 0)]
[InlineData(1024, 256, 512, -1, 0)]
[InlineData(1024, 256, 512, 0, 0)]
[InlineData(1024, 256, 512, 256, 256)]
[InlineData(1024, 256, 512, 512, 512)]
[InlineData(1024, 256, 512, 2048, 0)]
public void Read_Array(int size, long offset, long length, int count, int expectedRead)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset, length);
byte[] buffer = new byte[1024];
int actual = stream.Read(buffer, 0, count);
Assert.Equal(expectedRead, actual);
}
[Theory]
[InlineData(0, 0, 0, -1, 0)]
[InlineData(0, 0, 0, 0, 0)]
[InlineData(0, 0, 0, 2048, 0)]
[InlineData(1024, 0, 1024, -1, 0)]
[InlineData(1024, 0, 1024, 0, 0)]
[InlineData(1024, 0, 1024, 256, 256)]
[InlineData(1024, 0, 1024, 1024, 1024)]
[InlineData(1024, 0, 1024, 2048, 0)]
[InlineData(1024, 256, 512, -1, 0)]
[InlineData(1024, 256, 512, 0, 0)]
[InlineData(1024, 256, 512, 256, 256)]
[InlineData(1024, 256, 512, 512, 512)]
[InlineData(1024, 256, 512, 2048, 0)]
public void Read_Stream(int size, long offset, long length, int count, int expectedRead)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset, length);
byte[] buffer = new byte[1024];
int actual = stream.Read(buffer, 0, count);
Assert.Equal(expectedRead, actual);
}
#endregion
#region Seek
[Theory]
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
public void Seek_Array(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
{
byte[] data = new byte[size];
var stream = new ViewStream(data, offset, length);
stream.Seek(position, seekOrigin);
Assert.Equal(expectedPosition, stream.Position);
}
[Theory]
[InlineData(0, 0, 0, -1, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, -1, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, -1, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 0, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 256, SeekOrigin.Current, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.Begin, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.End, 0)]
[InlineData(0, 0, 0, 2048, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.Begin, 0)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.End, 1022)]
[InlineData(1024, 0, 1024, -1, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.Begin, 0)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 0, SeekOrigin.Current, 0)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.Begin, 256)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 256, SeekOrigin.Current, 256)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Begin, 1023)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.End, 1023)]
[InlineData(1024, 0, 1024, 2048, SeekOrigin.Current, 1023)]
[InlineData(1024, 256, 512, -1, SeekOrigin.Begin, 0)]
[InlineData(1024, 256, 512, -1, SeekOrigin.End, 510)]
[InlineData(1024, 256, 512, -1, SeekOrigin.Current, 0)]
[InlineData(1024, 256, 512, 0, SeekOrigin.Begin, 0)]
[InlineData(1024, 256, 512, 0, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 0, SeekOrigin.Current, 0)]
[InlineData(1024, 256, 512, 256, SeekOrigin.Begin, 256)]
[InlineData(1024, 256, 512, 256, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 256, SeekOrigin.Current, 256)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.Begin, 511)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.End, 511)]
[InlineData(1024, 256, 512, 2048, SeekOrigin.Current, 511)]
public void Seek_Stream(int size, long offset, long length, long position, SeekOrigin seekOrigin, long expectedPosition)
{
Stream data = new MemoryStream(new byte[size]);
var stream = new ViewStream(data, offset, length);
stream.Seek(position, seekOrigin);
Assert.Equal(expectedPosition, stream.Position);
}
#endregion
#region Unimplemented
[Fact]
public void Flush_Array_Throws()
{
byte[] data = new byte[1024];
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.Flush());
}
[Fact]
public void Flush_Stream_Throws()
{
Stream data = new MemoryStream(new byte[1024]);
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.Flush());
}
[Fact]
public void SetLength_Array_Throws()
{
byte[] data = new byte[1024];
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
}
[Fact]
public void SetLength_Stream_Throws()
{
Stream data = new MemoryStream(new byte[1024]);
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.SetLength(0));
}
[Fact]
public void Write_Array_Throws()
{
byte[] data = new byte[1024];
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
}
[Fact]
public void Write_Stream_Throws()
{
Stream data = new MemoryStream(new byte[1024]);
var stream = new ViewStream(data, 0, 1024);
Assert.Throws<NotImplementedException>(() => stream.Write([], 0, 0));
}
#endregion
}
}

View File

@@ -0,0 +1,26 @@
namespace SabreTools.IO.Compression.Deflate
{
/// <summary>
/// Represents information about a DEFLATE stream
/// </summary>
public class DeflateInfo
{
/// <summary>
/// Size of the deflated data
/// </summary>
/// <remarks>Set to a value less than 0 to ignore</remarks>
public long InputSize { get; set; }
/// <summary>
/// Size of the inflated data
/// </summary>
/// <remarks>Set to a value less than 0 to ignore</remarks>
public long OutputSize { get; set; }
/// <summary>
/// CRC-32 of the inflated data
/// </summary>
/// <remarks>Set to a value of 0 to ignore</remarks>
public uint Crc32 { get; set; }
}
}

View File

@@ -0,0 +1,34 @@
namespace SabreTools.IO.Compression.Deflate
{
/// <summary>
/// Represents the status returned from extracting
/// </summary>
public enum ExtractionStatus
{
/// <summary>
/// Extraction wasn't performed because the inputs were invalid
/// </summary>
INVALID,
/// <summary>
/// No issues with the extraction
/// </summary>
GOOD,
/// <summary>
/// File extracted but was the wrong size
/// </summary>
/// <remarks>Rewinds the stream and deletes the bad file</remarks>
WRONG_SIZE,
/// <summary>
/// File extracted but had the wrong CRC-32 value
/// </summary>
BAD_CRC,
/// <summary>
/// Extraction failed entirely
/// </summary>
FAIL,
}
}

View File

@@ -0,0 +1,439 @@
using System;
using System.IO;
using System.Text;
using SabreTools.Hashing;
using SabreTools.IO.Extensions;
using SabreTools.Models.PKZIP;
using static SabreTools.Models.PKZIP.Constants;
namespace SabreTools.IO.Compression.Deflate
{
/// <summary>
/// Wrapper to handle DEFLATE decompression with data verification
/// </summary>
public class InflateWrapper
{
#region Constants
/// <summary>
/// Buffer size for decompression
/// </summary>
private const int BufferSize = 1024 * 1024;
#endregion
#region Extraction
/// <summary>
/// Attempt to extract a file defined by a filename
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="filename">Output filename, null to auto-generate</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="expected">Expected DEFLATE stream information</param>
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>Extraction status representing the final state</returns>
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
public static ExtractionStatus ExtractFile(Stream source,
string? filename,
string outputDirectory,
DeflateInfo expected,
bool pkzip,
bool includeDebug)
{
// Debug output
if (includeDebug) Console.WriteLine($"Attempting to extract {filename}");
// Extract the file
var destination = new MemoryStream();
ExtractionStatus status = ExtractStream(source,
destination,
expected,
pkzip,
includeDebug,
out var foundFilename);
// If the extracted data is invalid
if (status != ExtractionStatus.GOOD || destination == null)
return status;
// Ensure directory separators are consistent
filename ??= foundFilename ?? $"FILE_[{expected.InputSize}, {expected.OutputSize}, {expected.Crc32}]";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Write the output file
File.WriteAllBytes(filename, destination.ToArray());
return status;
}
/// <summary>
/// Attempt to extract a file to a stream
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="destination">Stream where the inflated data will be written</param>
/// <param name="expected">Expected DEFLATE stream information</param>
/// <param name="pkzip">Indicates if PKZIP containers are used</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <param name="filename">Output filename if extracted from the data, null otherwise</param>
/// <returns>Extraction status representing the final state</returns>
/// <remarks>Assumes that the current stream position is where the compressed data lives</remarks>
public static ExtractionStatus ExtractStream(Stream source,
Stream destination,
DeflateInfo expected,
bool pkzip,
bool includeDebug,
out string? filename)
{
// If PKZIP containers are used
if (pkzip)
return ExtractStreamWithContainer(source, destination, expected, includeDebug, out filename);
// If post-data checksums are used
filename = null;
return ExtractStreamWithChecksum(source, destination, expected, includeDebug);
}
/// <summary>
/// Extract source data in a PKZIP container
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="destination">Stream where the inflated data will be written</param>
/// <param name="expected">Expected DEFLATE stream information</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <param name="filename">Filename from the PKZIP header, if it exists</param>
/// <returns></returns>
public static ExtractionStatus ExtractStreamWithContainer(Stream source,
Stream destination,
DeflateInfo expected,
bool includeDebug,
out string? filename)
{
// Set default values
filename = null;
// Debug output
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
// Check the validity of the inputs
if (expected.InputSize == 0)
{
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
return ExtractionStatus.INVALID;
}
else if (expected.InputSize > (source.Length - source.Position))
{
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
return ExtractionStatus.INVALID;
}
// Cache the current offset
long current = source.Position;
// Parse the PKZIP header, if it exists
LocalFileHeader? zipHeader = ParseLocalFileHeader(source);
long zipHeaderBytes = source.Position - current;
// Always trust the PKZIP CRC-32 value over what is supplied
if (zipHeader != null)
expected.Crc32 = zipHeader.CRC32;
// If the filename is [NULL], replace with the zip filename
if (zipHeader?.FileName != null)
{
filename = zipHeader.FileName;
if (includeDebug) Console.WriteLine($"Filename from PKZIP header: {filename}");
}
// Debug output
if (includeDebug) Console.WriteLine($"PKZIP Filename: {zipHeader?.FileName}, PKZIP Expected Read: {zipHeader?.CompressedSize}, PKZIP Expected Write: {zipHeader?.UncompressedSize}, PKZIP Expected CRC-32: {zipHeader?.CRC32:X4}");
// Extract the file
var actual = Inflate(source, destination);
if (actual == null)
{
if (includeDebug) Console.Error.WriteLine($"Could not extract {filename}");
return ExtractionStatus.FAIL;
}
// Account for the header bytes read
actual.InputSize += zipHeaderBytes;
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
// Verify the extracted data
return VerifyExtractedData(source, current, expected, actual, includeDebug);
}
/// <summary>
/// Extract source data with a trailing CRC-32 checksum
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="destination">Stream where the inflated data will be written</param>
/// <param name="expected">Expected DEFLATE stream information</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns></returns>
public static ExtractionStatus ExtractStreamWithChecksum(Stream source,
Stream destination,
DeflateInfo expected,
bool includeDebug)
{
// Debug output
if (includeDebug) Console.WriteLine($"Offset: {source.Position:X8}, Expected Read: {expected.InputSize}, Expected Write: {expected.OutputSize}, Expected CRC-32: {expected.Crc32:X8}");
// Check the validity of the inputs
if (expected.InputSize == 0)
{
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read 0 bytes");
return ExtractionStatus.INVALID;
}
else if (expected.InputSize > (source.Length - source.Position))
{
if (includeDebug) Console.Error.WriteLine($"Not attempting to extract, expected to read {expected.InputSize} bytes but only {source.Length - source.Position} bytes remain");
return ExtractionStatus.INVALID;
}
// Cache the current offset
long current = source.Position;
// Extract the file
var actual = Inflate(source, destination);
if (actual == null)
{
if (includeDebug) Console.Error.WriteLine($"Could not extract");
return ExtractionStatus.FAIL;
}
// Seek to the true end of the data
source.Seek(current + actual.InputSize, SeekOrigin.Begin);
// If the read value is off-by-one after checksum
if (actual.InputSize == expected.InputSize - 5)
{
// If not at the end of the file, get the corrected offset
if (source.Position + 5 < source.Length)
{
// TODO: What does this byte represent?
byte padding = source.ReadByteValue();
actual.InputSize += 1;
// Debug output
if (includeDebug) Console.WriteLine($"Off-by-one padding byte detected: 0x{padding:X2}");
}
else
{
// Debug output
if (includeDebug) Console.WriteLine($"Not enough data to adjust offset");
}
}
// If there is enough data to read the full CRC
uint deflateCrc;
if (source.Position + 4 < source.Length)
{
deflateCrc = source.ReadUInt32LittleEndian();
actual.InputSize += 4;
}
// Otherwise, read what is possible and pad with 0x00
else
{
byte[] deflateCrcBytes = new byte[4];
int realCrcLength = source.Read(deflateCrcBytes, 0, (int)(source.Length - source.Position));
// Parse as a little-endian 32-bit value
deflateCrc = (uint)(deflateCrcBytes[0]
| (deflateCrcBytes[1] << 8)
| (deflateCrcBytes[2] << 16)
| (deflateCrcBytes[3] << 24));
actual.InputSize += realCrcLength;
}
// If the CRC to check isn't set
if (expected.Crc32 == 0)
expected.Crc32 = deflateCrc;
// Debug output
if (includeDebug) Console.WriteLine($"DeflateStream CRC-32: {deflateCrc:X8}");
// Verify the extracted data
return VerifyExtractedData(source, current, expected, actual, includeDebug);
}
/// <summary>
/// Parse a Stream into a local file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled local file header on success, null on error</returns>
/// <remarks>Mirror of method in Serialization</remarks>
private static LocalFileHeader? ParseLocalFileHeader(Stream data)
{
var header = new LocalFileHeader();
header.Signature = data.ReadUInt32LittleEndian();
if (header.Signature != LocalFileHeaderSignature)
return null;
header.Version = data.ReadUInt16LittleEndian();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16LittleEndian();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16LittleEndian();
header.LastModifedFileTime = data.ReadUInt16LittleEndian();
header.LastModifiedFileDate = data.ReadUInt16LittleEndian();
header.CRC32 = data.ReadUInt32LittleEndian();
header.CompressedSize = data.ReadUInt32LittleEndian();
header.UncompressedSize = data.ReadUInt32LittleEndian();
header.FileNameLength = data.ReadUInt16LittleEndian();
header.ExtraFieldLength = data.ReadUInt16LittleEndian();
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
{
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
if (filenameBytes.Length != header.FileNameLength)
return null;
header.FileName = Encoding.ASCII.GetString(filenameBytes);
}
// Parsing extras is skipped here, unlike in Serialization
if (header.ExtraFieldLength > 0 && data.Position + header.ExtraFieldLength <= data.Length)
{
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
if (extraBytes.Length != header.ExtraFieldLength)
return null;
}
return header;
}
/// <summary>
/// Verify the extracted stream data, seeking to the original location on failure
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="start">Position representing the start of the deflated data</param>
/// <param name="expected">Expected deflation info</param>
/// <param name="actual">Actual deflation info</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>Extraction status representing the final state</returns>
private static ExtractionStatus VerifyExtractedData(Stream source,
long start,
DeflateInfo expected,
DeflateInfo actual,
bool includeDebug)
{
// Debug output
if (includeDebug) Console.WriteLine($"Actual Read: {actual.InputSize}, Actual Write: {actual.OutputSize}, Actual CRC-32: {actual.Crc32:X8}");
// If there's a mismatch during both reading and writing
if (expected.InputSize >= 0 && expected.InputSize != actual.InputSize)
{
// This in/out check helps catch false positives, such as
// files that have an off-by-one mismatch for read values
// but properly match the output written values.
// If the written bytes not correct as well
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
{
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
source.Seek(start, SeekOrigin.Begin);
return ExtractionStatus.WRONG_SIZE;
}
// If the written bytes are not being verified
else if (expected.OutputSize < 0)
{
if (includeDebug) Console.Error.WriteLine($"Mismatched read/write values!");
source.Seek(start, SeekOrigin.Begin);
return ExtractionStatus.WRONG_SIZE;
}
}
// If there's just a mismatch during only writing
if (expected.InputSize >= 0 && expected.InputSize == actual.InputSize)
{
// We want to log this but ignore the error
if (expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
{
if (includeDebug) Console.WriteLine($"Ignoring mismatched write values because read values match!");
}
}
// Otherwise, the write size should be checked normally
else if (expected.InputSize == 0 && expected.OutputSize >= 0 && expected.OutputSize != actual.OutputSize)
{
if (includeDebug) Console.Error.WriteLine($"Mismatched write values!");
source.Seek(start, SeekOrigin.Begin);
return ExtractionStatus.WRONG_SIZE;
}
// If there's a mismatch with the CRC-32
if (expected.Crc32 != 0 && expected.Crc32 != actual.Crc32)
{
if (includeDebug) Console.Error.WriteLine($"Mismatched CRC-32 values!");
source.Seek(start, SeekOrigin.Begin);
return ExtractionStatus.BAD_CRC;
}
return ExtractionStatus.GOOD;
}
#endregion
#region Inflation
/// <summary>
/// Inflate an input stream to an output stream
/// </summary>
/// <param name="source">Stream representing the deflated data</param>
/// <param name="destination">Stream where the inflated data will be written</param>
/// <returns>Deflate information representing the processed data on success, null on error</returns>
public static DeflateInfo? Inflate(Stream source, Stream destination)
{
try
{
// Setup the hasher for CRC-32 calculation
using var hasher = new HashWrapper(HashType.CRC32);
// Create a DeflateStream from the input
using var ds = new DeflateStream(source, CompressionMode.Decompress, leaveOpen: true);
// Decompress in blocks
while (true)
{
byte[] buf = new byte[BufferSize];
int read = ds.Read(buf, 0, buf.Length);
if (read == 0)
break;
hasher.Process(buf, 0, read);
destination.Write(buf, 0, read);
}
// Finalize the hash
hasher.Terminate();
byte[] hashBytes = hasher.CurrentHashBytes!;
// Save the deflate values
return new DeflateInfo
{
InputSize = ds.TotalIn,
OutputSize = ds.TotalOut,
Crc32 = BitConverter.ToUInt32(hashBytes, 0),
};
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -0,0 +1,172 @@
using System;
using System.IO;
using SabreTools.Hashing;
using SabreTools.Matching;
using static SabreTools.Models.MoPaQ.Constants;
namespace SabreTools.IO.Encryption
{
/// <summary>
/// Handler for decrypting MoPaQ block and table data
/// </summary>
public class MoPaQDecrypter
{
#region Private Instance Variables
/// <summary>
/// Buffer for encryption and decryption
/// </summary>
private readonly uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
#endregion
public MoPaQDecrypter()
{
PrepareCryptTable();
}
/// <summary>
/// Prepare the encryption table
/// </summary>
private void PrepareCryptTable()
{
uint seed = 0x00100001;
for (uint index1 = 0; index1 < 0x100; index1++)
{
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
{
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp1 = (seed & 0xFFFF) << 0x10;
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp2 = (seed & 0xFFFF);
_stormBuffer[index2] = (temp1 | temp2);
}
}
}
/// <summary>
/// Load a table block by optionally decompressing and
/// decrypting before returning the data.
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="offset">Data offset to parse</param>
/// <param name="expectedHash">Optional MD5 hash for validation</param>
/// <param name="compressedSize">Size of the table in the file</param>
/// <param name="tableSize">Expected size of the table</param>
/// <param name="key">Encryption key to use</param>
/// <param name="realTableSize">Output represening the real table size</param>
/// <returns>Byte array representing the processed table</returns>
public byte[]? LoadTable(Stream data,
long offset,
byte[]? expectedHash,
uint compressedSize,
uint tableSize,
uint key,
out long realTableSize)
{
byte[]? tableData;
byte[]? readBytes;
long bytesToRead = tableSize;
// Allocate the MPQ table
tableData = readBytes = new byte[tableSize];
// Check if the MPQ table is compressed
if (compressedSize != 0 && compressedSize < tableSize)
{
// Allocate temporary buffer for holding compressed data
readBytes = new byte[compressedSize];
bytesToRead = compressedSize;
}
// Get the file offset from which we will read the table
// Note: According to Storm.dll from Warcraft III (version 2002),
// if the hash table position is 0xFFFFFFFF, no SetFilePointer call is done
// and the table is loaded from the current file offset
if (offset == 0xFFFFFFFF)
offset = data.Position;
// Is the sector table within the file?
if (offset >= data.Length)
{
realTableSize = 0;
return null;
}
// The hash table and block table can go beyond EOF.
// Storm.dll reads as much as possible, then fills the missing part with zeros.
// Abused by Spazzler map protector which sets hash table size to 0x00100000
// Abused by NP_Protect in MPQs v4 as well
if ((offset + bytesToRead) > data.Length)
bytesToRead = (uint)(data.Length - offset);
// Give the caller information that the table was cut
realTableSize = bytesToRead;
// If everything succeeded, read the raw table from the MPQ
data.Seek(offset, SeekOrigin.Begin);
_ = data.Read(readBytes, 0, (int)bytesToRead);
// Verify the MD5 of the table, if present
byte[]? actualHash = HashTool.GetByteArrayHashArray(readBytes, HashType.MD5);
if (expectedHash != null && actualHash != null && !actualHash.EqualsExactly(expectedHash))
{
Console.WriteLine("Table is corrupt!");
return null;
}
// First of all, decrypt the table
if (key != 0)
tableData = DecryptBlock(readBytes, bytesToRead, key);
// If the table is compressed, decompress it
if (compressedSize != 0 && compressedSize < tableSize)
{
Console.WriteLine("Table is compressed, it will not read properly!");
return null;
// TODO: Handle decompression
// int cbOutBuffer = (int)tableSize;
// int cbInBuffer = (int)compressedSize;
// if (!SCompDecompress2(readBytes, &cbOutBuffer, tableData, cbInBuffer))
// errorCode = SErrGetLastError();
// tableData = readBytes;
}
// Return the MPQ table
return tableData;
}
/// <summary>
/// Decrypt a single block of data
/// </summary>
public unsafe byte[] DecryptBlock(byte[] block, long length, uint key)
{
uint seed = 0xEEEEEEEE;
uint[] castBlock = new uint[length >> 2];
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
int castBlockPtr = 0;
// Round to uints
length >>= 2;
while (length-- > 0)
{
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
uint ch = castBlock[castBlockPtr] ^ (key + seed);
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
seed = ch + seed + (seed << 5) + 3;
castBlock[castBlockPtr++] = ch;
}
Buffer.BlockCopy(castBlock, 0, block, 0, block.Length >> 2);
return block;
}
}
}

View File

@@ -1,4 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SabreTools.IO.Extensions
{
@@ -50,5 +53,187 @@ namespace SabreTools.IO.Extensions
return null;
}
}
/// <summary>
/// Read string data from a byte array
/// </summary>
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
/// <returns>String list containing the requested data, null on error</returns>
public static List<string>? ReadStringsFrom(this byte[]? input, int charLimit = 5)
{
// Validate the data
if (input == null || input.Length == 0)
return null;
// Check for ASCII strings
var asciiStrings = input.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
// Check for Unicode strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var unicodeStrings = input.ReadStringsWithEncoding(charLimit, Encoding.Unicode);
// Ignore duplicate strings across encodings
List<string> sourceStrings = [.. asciiStrings, .. unicodeStrings];
// Sort the strings and return
sourceStrings.Sort();
return sourceStrings;
}
/// <summary>
/// Read string data from a byte array with an encoding
/// </summary>
/// <param name="bytes">Byte array representing the source data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <param name="encoding">Character encoding to use for checking</param>
/// <returns>String list containing the requested data, empty on error</returns>
/// <remarks>Characters with the higher bytes set are unused</remarks>
#if NET20
public static List<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
#else
public static HashSet<string> ReadStringsWithEncoding(this byte[]? bytes, int charLimit, Encoding encoding)
#endif
{
if (bytes == null || bytes.Length == 0)
return [];
if (charLimit <= 0 || charLimit > bytes.Length)
return [];
// Short-circuit for some encoding types
if (encoding.CodePage == Encoding.ASCII.CodePage)
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.ASCII, 1);
#if NET5_0_OR_GREATER
else if (encoding.CodePage == Encoding.Latin1.CodePage)
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.Latin1, 1);
#endif
else if (encoding.CodePage == Encoding.Unicode.CodePage)
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.Unicode, 2);
else if (encoding.CodePage == Encoding.UTF32.CodePage)
return bytes.ReadFixedWidthEncodingStrings(charLimit, Encoding.UTF32, 4);
// Create the string set to return
#if NET20
var strings = new List<string>();
#else
var strings = new HashSet<string>();
#endif
// Open the text reader with the correct encoding
using var ms = new MemoryStream(bytes);
using var reader = new StreamReader(ms, encoding);
// Create a string builder for the loop
var sb = new StringBuilder();
// Check for strings
long lastOffset = 0;
while (!reader.EndOfStream)
{
// Read the next character from the stream
char c = (char)reader.Read();
// If the character is invalid
if (char.IsControl(c) || (c & 0xFF00) != 0)
{
// Seek to the end of the last found string
string str = sb.ToString();
lastOffset += encoding.GetByteCount(str) + 1;
ms.Seek(lastOffset, SeekOrigin.Begin);
reader.DiscardBufferedData();
// Add the string if long enough
if (str.Length >= charLimit)
strings.Add(str);
// Clear the builder and continue
#if NET20 || NET35
sb = new();
#else
sb.Clear();
#endif
continue;
}
// Otherwise, add the character to the builder and continue
sb.Append(c);
}
// Handle any remaining data
if (sb.Length >= charLimit)
strings.Add(sb.ToString());
return strings;
}
#region Fixed Byte-Width Encoding Helpers
/// <summary>
/// Read string data from a byte array using an encoding with a fixed width
/// </summary>
/// <param name="bytes">Byte array representing the source data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <param name="encoding">Character encoding to use for checking</param>
/// <param name="width">Character width of the encoding</param>
/// <returns>String list containing the requested data, empty on error</returns>
/// <remarks>Characters with the higher bytes set are unused</remarks>
#if NET20
private static List<string> ReadFixedWidthEncodingStrings(this byte[] bytes, int charLimit, Encoding encoding, int width)
#else
private static HashSet<string> ReadFixedWidthEncodingStrings(this byte[] bytes, int charLimit, Encoding encoding, int width)
#endif
{
if (charLimit <= 0 || charLimit > bytes.Length)
return [];
// Create the string set to return
#if NET20
var strings = new List<string>();
#else
var strings = new HashSet<string>();
#endif
// Create a string builder for the loop
var sb = new StringBuilder();
// Check for strings
int offset = 0;
while (offset <= bytes.Length - width)
{
// Read the next character from the stream
char c = encoding.GetChars(bytes, offset, width)[0];
offset += width;
// If the character is invalid
if (char.IsControl(c) || (c & 0xFFFFFF00) != 0)
{
// Pretend only one byte was read
offset -= width - 1;
// Add the string if long enough
string str = sb.ToString();
if (str.Length >= charLimit)
strings.Add(str);
// Clear the builder and continue
#if NET20 || NET35
sb = new();
#else
sb.Clear();
#endif
continue;
}
// Otherwise, add the character to the builder and continue
sb.Append(c);
}
// Handle any remaining data
if (sb.Length >= charLimit)
strings.Add(sb.ToString());
return strings;
}
#endregion
}
}

View File

@@ -5,6 +5,25 @@ namespace SabreTools.IO.Extensions
{
public static class EnumerableExtensions
{
/// <summary>
/// Wrap iterating through an enumerable with an action
/// </summary>
/// <remarks>
/// .NET Frameworks 2.0 and 3.5 process in series.
/// .NET Frameworks 4.0 onward process in parallel.
/// </remarks>
public static void IterateWithAction<T>(this IEnumerable<T> source, Action<T> action)
{
#if NET20 || NET35
foreach (var item in source)
{
action(item);
}
#else
System.Threading.Tasks.Parallel.ForEach(source, action);
#endif
}
/// <summary>
/// Safely iterate through an enumerable, skipping any errors
/// </summary>

View File

@@ -1,4 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SabreTools.IO.Extensions
{
@@ -10,7 +12,7 @@ namespace SabreTools.IO.Extensions
/// <param name="input">Input stream to try aligning</param>
/// <param name="alignment">Number of bytes to align on</param>
/// <returns>True if the stream could be aligned, false otherwise</returns>
public static bool AlignToBoundary(this Stream? input, byte alignment)
public static bool AlignToBoundary(this Stream? input, int alignment)
{
// If the stream is invalid
if (input == null || input.Length == 0 || !input.CanRead)
@@ -30,6 +32,80 @@ namespace SabreTools.IO.Extensions
return input.Position % alignment == 0;
}
/// <summary>
/// Read a number of bytes from an offset in a stream, if possible
/// </summary>
/// <param name="input">Input stream to read from</param>
/// <param name="offset">Offset within the stream to start reading</param>
/// <param name="length">Number of bytes to read from the offset</param>
/// <param name="retainPosition">Indicates if the original position of the stream should be retained after reading</param>
/// <returns>Filled byte array on success, null on error</returns>
/// <remarks>
/// This method will return a null array if the length is greater than what is left
/// in the stream. This is different behavior than a normal stream read that would
/// attempt to read as much as possible, returning the amount of bytes read.
/// </remarks>
public static byte[]? ReadFrom(this Stream? input, long offset, int length, bool retainPosition)
{
if (input == null || !input.CanRead || !input.CanSeek)
return null;
if (offset < 0 || offset >= input.Length)
return null;
if (length < 0 || offset + length > input.Length)
return null;
// Cache the current location
long currentLocation = input.Position;
// Seek to the requested offset
long newPosition = input.SeekIfPossible(offset);
if (newPosition != offset)
return null;
// Read from the position
byte[] data = input.ReadBytes(length);
// Seek back if requested
if (retainPosition)
_ = input.SeekIfPossible(currentLocation);
// Return the read data
return data;
}
/// <summary>
/// Read string data from a Stream
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string, default 5</param>
/// <returns>String list containing the requested data, null on error</returns>
public static List<string>? ReadStringsFrom(this Stream? input, int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? data = input.ReadFrom(position, length, retainPosition: true);
if (data == null)
return null;
// Check for ASCII strings
var asciiStrings = data.ReadStringsWithEncoding(charLimit, Encoding.ASCII);
// Check for UTF-8 strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var utf8Strings = data.ReadStringsWithEncoding(charLimit, Encoding.UTF8);
// Check for Unicode strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var unicodeStrings = data.ReadStringsWithEncoding(charLimit, Encoding.Unicode);
// Ignore duplicate strings across encodings
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
// Sort the strings and return
sourceStrings.Sort();
return sourceStrings;
}
/// <summary>
/// Seek to a specific point in the stream, if possible
/// </summary>
@@ -62,5 +138,24 @@ namespace SabreTools.IO.Extensions
return -1;
}
}
/// <summary>
/// Check if a segment is valid in the stream
/// </summary>
/// <param name="input">Input stream to validate</param>
/// <param name="offset">Position in the source</param>
/// <param name="count">Length of the data to check</param>
/// <returns>True if segment could be read fully, false otherwise</returns>
public static bool SegmentValid(this Stream? input, long offset, long count)
{
if (input == null)
return false;
if (offset < 0 || offset > input.Length)
return false;
if (count < 0 || offset + count > input.Length)
return false;
return true;
}
}
}

View File

@@ -0,0 +1,63 @@
using System;
namespace SabreTools.IO.Extensions
{
public static class StringExtensions
{
/// <inheritdoc cref="string.Contains(string)"/>
public static bool OptionalContains(this string? self, string value)
=> OptionalContains(self, value, StringComparison.Ordinal);
/// <inheritdoc cref="string.Contains(string, StringComparison)"/>
public static bool OptionalContains(this string? self, string value, StringComparison comparisonType)
{
if (self == null)
return false;
#if NETFRAMEWORK || NETSTANDARD2_0
return self.Contains(value);
#else
return self.Contains(value, comparisonType);
#endif
}
/// <inheritdoc cref="string.EndsWith(string)"/>
public static bool OptionalEndsWith(this string? self, string value)
=> OptionalEndsWith(self, value, StringComparison.Ordinal);
/// <inheritdoc cref="string.EndsWith(string, StringComparison)"/>
public static bool OptionalEndsWith(this string? self, string value, StringComparison comparisonType)
{
if (self == null)
return false;
return self.EndsWith(value, comparisonType);
}
/// <inheritdoc cref="string.Equals(string)"/>
public static bool OptionalEquals(this string? self, string value)
=> OptionalEquals(self, value, StringComparison.Ordinal);
/// <inheritdoc cref="string.Equals(string, StringComparison)"/>
public static bool OptionalEquals(this string? self, string value, StringComparison comparisonType)
{
if (self == null)
return false;
return self.Equals(value, comparisonType);
}
/// <inheritdoc cref="string.StartsWith(string)"/>
public static bool OptionalStartsWith(this string? self, string value)
=> OptionalStartsWith(self, value, StringComparison.Ordinal);
/// <inheritdoc cref="string.StartsWith(string, StringComparison)"/>
public static bool OptionalStartsWith(this string? self, string value, StringComparison comparisonType)
{
if (self == null)
return false;
return self.StartsWith(value, comparisonType);
}
}
}

View File

@@ -8,9 +8,10 @@
<LangVersion>latest</LangVersion>
<NoWarn>CS0618</NoWarn>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.0</Version>
<Version>1.7.3</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -30,7 +31,8 @@
<ItemGroup>
<PackageReference Include="SabreTools.Matching" Version="1.6.0" />
<PackageReference Include="SabreTools.Models" Version="1.6.0" />
<PackageReference Include="SabreTools.Models" Version="1.7.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
</ItemGroup>
</Project>

View File

@@ -39,7 +39,7 @@ namespace SabreTools.IO.Streams
#endregion
#region Internal State
#region Instance Variables
/// <summary>
/// Internal collection of streams to read from
@@ -58,6 +58,8 @@ namespace SabreTools.IO.Streams
#endregion
#region Constructors
/// <summary>
/// Create a new, empty ReadOnlyCompositeStream
/// </summary>
@@ -109,7 +111,7 @@ namespace SabreTools.IO.Streams
/// </summary>
public ReadOnlyCompositeStream(IEnumerable<Stream> streams)
{
_streams = new List<Stream>(streams);
_streams = [.. streams];
_length = 0;
_position = 0;
@@ -123,6 +125,10 @@ namespace SabreTools.IO.Streams
}
}
#endregion
#region Data
/// <summary>
/// Add a new stream to the collection
/// </summary>
@@ -138,10 +144,13 @@ namespace SabreTools.IO.Streams
return true;
}
#endregion
#region Stream Implementations
/// <inheritdoc/>
public override void Flush() => throw new NotImplementedException();
public override void Flush()
=> throw new NotImplementedException();
/// <inheritdoc/>
public override int Read(byte[] buffer, int offset, int count)
@@ -198,27 +207,22 @@ namespace SabreTools.IO.Streams
// Handle the "seek"
switch (origin)
{
case SeekOrigin.Begin: _position = offset; break;
case SeekOrigin.Current: _position += offset; break;
case SeekOrigin.End: _position = _length - offset - 1; break;
case SeekOrigin.Begin: Position = offset; break;
case SeekOrigin.Current: Position += offset; break;
case SeekOrigin.End: Position = _length + offset - 1; break;
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
}
;
// Handle out-of-bounds seeks
if (_position < 0)
_position = 0;
else if (_position >= _length)
_position = _length - 1;
return _position;
return Position;
}
/// <inheritdoc/>
public override void SetLength(long value) => throw new NotImplementedException();
public override void SetLength(long value)
=> throw new NotImplementedException();
/// <inheritdoc/>
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotImplementedException();
#endregion

View File

@@ -0,0 +1,254 @@
using System;
using System.IO;
namespace SabreTools.IO.Streams
{
/// <summary>
/// Stream representing a view into a source
/// </summary>
public class ViewStream : Stream
{
#region Properties
/// <inheritdoc/>
public override bool CanRead => true;
/// <inheritdoc/>
public override bool CanSeek => _source.CanSeek;
/// <inheritdoc/>
public override bool CanWrite => false;
/// <summary>
/// Filename from the source, if possible
/// </summary>
public string? Filename
{
get
{
// A subset of streams have a filename
if (_source is FileStream fs)
return fs.Name;
else if (_source is ViewStream vs)
return vs.Filename;
return null;
}
}
/// <inheritdoc/>
public override long Length => _length;
/// <inheritdoc/>
public override long Position
{
get
{
// Handle 0-length sources
if (_length <= 0)
return 0;
return _source.Position - _initialPosition;
}
set
{
// Handle 0-length sources
if (_length <= 0)
{
_source.Position = 0;
return;
}
long position = value;
// Handle out-of-bounds seeks
if (position < 0)
position = 0;
else if (position >= _length)
position = _length - 1;
_source.Position = _initialPosition + position;
}
}
#endregion
#region Instance Variables
/// <summary>
/// Initial position within the underlying data
/// </summary>
protected long _initialPosition;
/// <summary>
/// Usable length in the underlying data
/// </summary>
protected long _length;
/// <summary>
/// Source data
/// </summary>
protected Stream _source;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _sourceLock = new();
#endregion
#region Constructors
/// <summary>
/// Construct a new ViewStream from a Stream
/// </summary>
public ViewStream(Stream data, long offset)
{
if (!data.CanRead)
throw new ArgumentException(nameof(data));
if (offset < 0 || offset > data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
_source = data;
_initialPosition = offset;
_length = data.Length - offset;
_source.Seek(_initialPosition, SeekOrigin.Begin);
}
/// <summary>
/// Construct a new ViewStream from a Stream
/// </summary>
public ViewStream(Stream data, long offset, long length)
{
if (!data.CanRead)
throw new ArgumentException(nameof(data));
if (offset < 0 || offset > data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
if (length < 0 || offset + length > data.Length)
throw new ArgumentOutOfRangeException(nameof(length));
_source = data;
_initialPosition = offset;
_length = length;
_source.Seek(_initialPosition, SeekOrigin.Begin);
}
/// <summary>
/// Construct a new ViewStream from a byte array
/// </summary>
public ViewStream(byte[] data, long offset)
{
if (offset < 0 || offset > data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
long length = data.Length - offset;
_source = new MemoryStream(data, (int)offset, (int)length);
_initialPosition = 0;
_length = length;
_source.Seek(_initialPosition, SeekOrigin.Begin);
}
/// <summary>
/// Construct a new ViewStream from a byte array
/// </summary>
public ViewStream(byte[] data, long offset, long length)
{
if (offset < 0 || offset > data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
if (length < 0 || offset + length > data.Length)
throw new ArgumentOutOfRangeException(nameof(length));
_source = new MemoryStream(data, (int)offset, (int)length);
_initialPosition = 0;
_length = length;
_source.Seek(_initialPosition, SeekOrigin.Begin);
}
#endregion
#region Data
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="offset">Position in the source</param>
/// <param name="count">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(long offset, long count)
{
if (offset < 0 || offset > Length)
return false;
if (count < 0 || offset + count > Length)
return false;
return true;
}
#endregion
#region Stream Implementations
/// <inheritdoc/>
public override void Flush()
=> throw new NotImplementedException();
/// <inheritdoc/>
public override int Read(byte[] buffer, int offset, int count)
{
// Invalid cases always return 0
if (buffer.Length == 0)
return 0;
if (offset < 0 || offset >= buffer.Length)
return 0;
if (count < 0 || offset + count > buffer.Length)
return 0;
// Short-circuit 0-byte reads
if (count == 0)
return 0;
try
{
lock (_sourceLock)
{
return _source.Read(buffer, offset, count);
}
}
catch
{
// Absorb the error
return 0;
}
}
/// <inheritdoc/>
public override long Seek(long offset, SeekOrigin origin)
{
// Handle the "seek"
switch (origin)
{
case SeekOrigin.Begin: Position = offset; break;
case SeekOrigin.Current: Position += offset; break;
case SeekOrigin.End: Position = _length + offset - 1; break;
default: throw new ArgumentException($"Invalid value for {nameof(origin)}");
}
return Position;
}
/// <inheritdoc/>
public override void SetLength(long value)
=> throw new NotImplementedException();
/// <inheritdoc/>
public override void Write(byte[] buffer, int offset, int count)
=> throw new NotImplementedException();
#endregion
}
}