Merge branch 'release' into adam/master-release

# Conflicts:
#	src/SharpCompress/packages.lock.json
This commit is contained in:
Adam Hathcock
2026-04-04 10:50:12 +01:00
20 changed files with 353 additions and 108 deletions

View File

@@ -22,9 +22,11 @@ public static class Constants
/// by rewinding and re-reading the same data.
/// </para>
/// <para>
/// <b>Default:</b> 163840 bytes (160KB) - sized to cover ZStandard's worst-case
/// first block on a tar archive (~131KB including frame header overhead).
/// ZStandard blocks can be up to 128KB, exceeding the previous 81KB default.
/// <b>Default:</b> 81920 bytes (80KB) sufficient for most formats.
/// Formats that require larger buffers (e.g. BZip2, ZStandard) declare their
/// own minimum via <c>TarWrapper.MinimumRewindBufferSize</c>, and
/// <c>TarWrapper.MaximumRewindBufferSize</c> is used at stream construction
/// to ensure the correct capacity is allocated upfront.
/// </para>
/// <para>
/// <b>Typical usage:</b> 500-1000 bytes for most archives
@@ -41,7 +43,7 @@ public static class Constants
/// </list>
/// </para>
/// </remarks>
public static int RewindableBufferSize { get; set; } = 163840;
public static int RewindableBufferSize { get; set; } = 81920;
public static CultureInfo DefaultCultureInfo { get; set; } = CultureInfo.InvariantCulture;
}

View File

@@ -151,7 +151,17 @@ internal partial class StreamingZipHeaderFactory : ZipHeaderFactory
}
else
{
headerBytes = reader.ReadUInt32();
try
{
headerBytes = reader.ReadUInt32();
}
catch (EndOfStreamException ex)
{
throw new InvalidFormatException(
"Unexpected end of stream while reading ZIP archive",
ex
);
}
}
_lastEntryHeader = null;

View File

@@ -380,6 +380,10 @@ internal partial class CBZip2InputStream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = await BsRAsync(zn, cancellationToken).ConfigureAwait(false);
@@ -465,6 +469,10 @@ internal partial class CBZip2InputStream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = await BsRAsync(zn, cancellationToken).ConfigureAwait(false);
@@ -542,6 +550,10 @@ internal partial class CBZip2InputStream
BlockOverrun();
}
if (nextSym - 1 < 0 || nextSym - 1 >= yy.Length)
{
throw new InvalidFormatException("BZip2: symbol out of range");
}
tmp = yy[nextSym - 1];
unzftab[seqToUnseq[tmp]]++;
ll8[last] = seqToUnseq[tmp];
@@ -578,6 +590,10 @@ internal partial class CBZip2InputStream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = await BsRAsync(zn, cancellationToken).ConfigureAwait(false);

View File

@@ -706,6 +706,10 @@ internal partial class CBZip2InputStream : Stream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = BsR(zn);
@@ -788,6 +792,10 @@ internal partial class CBZip2InputStream : Stream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = BsR(zn);
@@ -862,6 +870,10 @@ internal partial class CBZip2InputStream : Stream
BlockOverrun();
}
if (nextSym - 1 < 0 || nextSym - 1 >= yy.Length)
{
throw new InvalidFormatException("BZip2: symbol out of range");
}
tmp = yy[nextSym - 1];
unzftab[seqToUnseq[tmp]]++;
ll8[last] = seqToUnseq[tmp];
@@ -898,6 +910,10 @@ internal partial class CBZip2InputStream : Stream
groupPos = BZip2Constants.G_SIZE;
}
groupPos--;
if (groupNo < 0 || groupNo >= selector.Length)
{
throw new InvalidFormatException("BZip2: group selector out of range");
}
zt = selector[groupNo];
zn = minLens[zt];
zvec = BsR(zn);

View File

@@ -244,6 +244,10 @@ internal sealed class HuffmanTree
overflowBits--;
} while (overflowBits != 0);
if (index < 0 || index >= array.Length)
{
throw new InvalidFormatException("Deflate64: invalid Huffman data");
}
array[index] = (short)ch;
}
}

View File

@@ -26,6 +26,7 @@ public partial class ReduceStream
{
if (inByteCount == compressedSize)
{
_inputExhausted = true;
return EOF;
}
@@ -35,6 +36,7 @@ public partial class ReduceStream
.ConfigureAwait(false);
if (bytesRead == 0)
{
_inputExhausted = true;
return EOF;
}
@@ -117,6 +119,13 @@ public partial class ReduceStream
{
if (length == 0)
{
if (_inputExhausted && bitBufferCount <= 0)
{
throw new InvalidFormatException(
"ReduceStream: compressed data exhausted before uncompressed size reached"
);
}
byte nextByte = await GetNextByteAsync(cancellationToken).ConfigureAwait(false);
if (nextByte != RunLengthCode)
{

View File

@@ -115,16 +115,24 @@ public partial class ReduceStream : Stream
private int bitBufferCount;
private ulong bitBuffer;
private bool _inputExhausted;
private int NEXTBYTE()
{
if (inByteCount == compressedSize)
{
_inputExhausted = true;
return EOF;
}
inByteCount++;
return inStream.ReadByte();
int b = inStream.ReadByte();
if (b < 0)
{
_inputExhausted = true;
return EOF;
}
return b;
}
private void READBITS(int nbits, out byte zdest)
@@ -208,6 +216,13 @@ public partial class ReduceStream : Stream
{
if (length == 0)
{
if (_inputExhausted && bitBufferCount <= 0)
{
throw new InvalidFormatException(
"ReduceStream: compressed data exhausted before uncompressed size reached"
);
}
byte nextByte = GetNextByte();
if (nextByte != RunLengthCode)
{

View File

@@ -2,7 +2,6 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Shrink;
@@ -10,18 +9,11 @@ internal partial class ShrinkStream : Stream
{
internal static async ValueTask<ShrinkStream> CreateAsync(
Stream stream,
CompressionMode compressionMode,
long compressedSize,
long uncompressedSize,
CancellationToken cancellationToken = default
)
{
var shrinkStream = new ShrinkStream(
stream,
compressionMode,
compressedSize,
uncompressedSize
);
var shrinkStream = new ShrinkStream(stream, uncompressedSize);
await shrinkStream.DecompressAsync(cancellationToken).ConfigureAwait(false);
return shrinkStream;
}
@@ -33,42 +25,16 @@ internal partial class ShrinkStream : Stream
return;
}
// Read all compressed data asynchronously
var src = new byte[_compressedSize];
int bytesRead = 0;
int totalBytesRead = 0;
while (totalBytesRead < (int)_compressedSize)
{
bytesRead = await inStream
.ReadAsync(
src,
totalBytesRead,
(int)_compressedSize - totalBytesRead,
cancellationToken
)
.ConfigureAwait(false);
if (bytesRead == 0)
{
throw new IncompleteArchiveException(
"Unexpected end of stream while reading compressed data"
);
}
totalBytesRead += bytesRead;
}
// Read actual compressed data from the stream rather than pre-allocating based on the
// declared compressed size, which may be crafted to cause an OutOfMemoryException.
// The stream is already bounded by ReadOnlySubStream in ZipFilePart.
using var srcMs = new MemoryStream();
await _inStream.CopyToAsync(srcMs, 81920, cancellationToken).ConfigureAwait(false);
var src = srcMs.ToArray();
var srcLen = src.Length;
// Decompress synchronously (CPU-bound operation)
var srcUsed = 0;
var dstUsed = 0;
HwUnshrink.Unshrink(
src,
(int)_compressedSize,
out srcUsed,
_byteOut,
(int)_uncompressedSize,
out dstUsed
);
HwUnshrink.Unshrink(src, srcLen, out _, _byteOut, (int)_uncompressedSize, out var dstUsed);
_outBytesCount = dstUsed;
_decompressed = true;
}

View File

@@ -1,40 +1,35 @@
using System;
using System.IO;
using SharpCompress;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Shrink;
internal partial class ShrinkStream : Stream
{
private Stream inStream;
private readonly Stream _inStream;
private ulong _compressedSize;
private long _uncompressedSize;
private byte[] _byteOut;
private readonly long _uncompressedSize;
private readonly byte[] _byteOut;
private long _outBytesCount;
private bool _decompressed;
private long _position;
public ShrinkStream(
Stream stream,
CompressionMode compressionMode,
long compressedSize,
long uncompressedSize
)
public ShrinkStream(Stream stream, long uncompressedSize)
{
inStream = stream;
if (uncompressedSize > int.MaxValue)
{
throw new InvalidFormatException(
$"Shrink: declared uncompressed size {uncompressedSize} exceeds maximum supported size."
);
}
_inStream = stream;
_compressedSize = (ulong)compressedSize;
_uncompressedSize = uncompressedSize;
_byteOut = new byte[_uncompressedSize];
_byteOut = new byte[(int)_uncompressedSize];
_outBytesCount = 0L;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
}
public override bool CanRead => true;
public override bool CanSeek => true;
@@ -55,18 +50,21 @@ internal partial class ShrinkStream : Stream
{
if (!_decompressed)
{
var src = new byte[_compressedSize];
inStream.ReadExact(src, 0, (int)_compressedSize);
var srcUsed = 0;
var dstUsed = 0;
// Read actual compressed data from the stream rather than pre-allocating based on the
// declared compressed size, which may be crafted to cause an OutOfMemoryException.
// The stream is already bounded by ReadOnlySubStream in ZipFilePart.
using var srcMs = new MemoryStream();
_inStream.CopyTo(srcMs);
var src = srcMs.ToArray();
var srcLen = src.Length;
HwUnshrink.Unshrink(
src,
(int)_compressedSize,
out srcUsed,
srcLen,
out _,
_byteOut,
(int)_uncompressedSize,
out dstUsed
out var dstUsed
);
_outBytesCount = dstUsed;
_decompressed = true;

View File

@@ -6,4 +6,17 @@ internal class ZstandardConstants
/// Magic number found at start of ZStandard frame: 0xFD 0x2F 0xB5 0x28
/// </summary>
public const uint MAGIC = 0xFD2FB528;
/// <summary>
/// Maximum uncompressed size of a single ZStandard block: ZSTD_BLOCKSIZE_MAX = 128 KB.
/// </summary>
public const int BlockSizeMax = 1 << 17; // 131072 bytes
/// <summary>
/// Recommended input (compressed) buffer size for streaming decompression:
/// ZSTD_DStreamInSize = ZSTD_BLOCKSIZE_MAX + ZSTD_blockHeaderSize (3 bytes).
/// The ring buffer must be at least this large to hold the compressed bytes read
/// during format detection before the first rewind.
/// </summary>
public const int DStreamInSize = BlockSizeMax + 3;
}

View File

@@ -52,7 +52,7 @@ public class TarFactory
{
var providers = CompressionProviderRegistry.Default;
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
sharpCompressStream.StartRecording(TarWrapper.MaximumRewindBufferSize);
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();
@@ -84,7 +84,7 @@ public class TarFactory
{
var providers = CompressionProviderRegistry.Default;
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
sharpCompressStream.StartRecording(TarWrapper.MaximumRewindBufferSize);
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();
@@ -319,7 +319,7 @@ public class TarFactory
{
options ??= new ReaderOptions();
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
sharpCompressStream.StartRecording(TarWrapper.MaximumRewindBufferSize);
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();
@@ -352,7 +352,7 @@ public class TarFactory
cancellationToken.ThrowIfCancellationRequested();
options ??= new ReaderOptions();
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
sharpCompressStream.StartRecording(TarWrapper.MaximumRewindBufferSize);
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();

View File

@@ -22,7 +22,8 @@ public class TarWrapper(
Func<Stream, Stream> createStream,
Func<Stream, CancellationToken, ValueTask<Stream>> createStreamAsync,
IEnumerable<string> knownExtensions,
bool wrapInSharpCompressStream = true
bool wrapInSharpCompressStream = true,
int? minimumRewindBufferSize = null
)
{
public CompressionType CompressionType { get; } = type;
@@ -30,6 +31,15 @@ public class TarWrapper(
public Func<Stream, CancellationToken, ValueTask<bool>> IsMatchAsync { get; } = canHandleAsync;
public bool WrapInSharpCompressStream { get; } = wrapInSharpCompressStream;
/// <summary>
/// The minimum ring buffer size required to detect and probe this format.
/// Format detection reads a decompressed block to check the tar header, so
/// the ring buffer must be large enough to hold the compressed bytes consumed
/// during that probe. Defaults to <see cref="Common.Constants.RewindableBufferSize"/>.
/// </summary>
public int MinimumRewindBufferSize { get; } =
minimumRewindBufferSize ?? Common.Constants.RewindableBufferSize;
public Func<Stream, Stream> CreateStream { get; } = createStream;
public Func<Stream, CancellationToken, ValueTask<Stream>> CreateStreamAsync { get; } =
createStreamAsync;
@@ -57,7 +67,11 @@ public class TarWrapper(
await BZip2Stream
.CreateAsync(stream, CompressionMode.Decompress, false)
.ConfigureAwait(false),
["tar.bz2", "tb2", "tbz", "tbz2", "tz2"]
["tar.bz2", "tb2", "tbz", "tbz2", "tz2"],
// BZip2 decompresses in whole blocks; the compressed size of the first block
// can be close to the uncompressed maximum (9 × 100 000 = 900 000 bytes).
// The ring buffer must hold all compressed bytes read during format detection.
minimumRewindBufferSize: BZip2Constants.baseBlockSize * 9
),
new(
CompressionType.GZip,
@@ -74,7 +88,11 @@ public class TarWrapper(
ZStandardStream.IsZStandardAsync,
(stream) => new ZStandardStream(stream),
(stream, _) => new ValueTask<Stream>(new ZStandardStream(stream)),
["tar.zst", "tar.zstd", "tzst", "tzstd"]
["tar.zst", "tar.zstd", "tzst", "tzstd"],
// ZStandard decompresses in blocks; the compressed size of the first block
// can be up to ZSTD_BLOCKSIZE_MAX + ZSTD_blockHeaderSize = 131075 bytes.
// The ring buffer must hold all compressed bytes read during format detection.
minimumRewindBufferSize: ZstandardConstants.DStreamInSize
),
new(
CompressionType.LZip,
@@ -104,4 +122,25 @@ public class TarWrapper(
false
),
];
/// <summary>
/// The largest <see cref="MinimumRewindBufferSize"/> across all registered wrappers.
/// Use this as the ring buffer size when creating a stream for Tar format detection so
/// that the buffer is sized correctly at construction and never needs to be reallocated.
/// </summary>
public static int MaximumRewindBufferSize { get; } = GetMaximumRewindBufferSize();
// Computed after Wrappers is initialised so the static initialisation order is safe.
private static int GetMaximumRewindBufferSize()
{
var max = 0;
foreach (var w in Wrappers)
{
if (w.MinimumRewindBufferSize > max)
{
max = w.MinimumRewindBufferSize;
}
}
return max;
}
}

View File

@@ -80,7 +80,8 @@ internal sealed partial class SeekableSharpCompressStream : SharpCompressStream
}
}
public override void StartRecording() => _recordedPosition = _stream.Position;
public override void StartRecording(int? minBufferSize = null) =>
_recordedPosition = _stream.Position;
public override void StopRecording() => _recordedPosition = null;

View File

@@ -175,7 +175,17 @@ public partial class SharpCompressStream : Stream, IStreamStack
// (frozen recording mode) until Rewind(stopRecording: true) is called
}
public virtual void StartRecording()
/// <summary>
/// Begins recording reads so that <see cref="Rewind()"/> can replay them.
/// </summary>
/// <param name="minBufferSize">
/// Minimum ring buffer capacity in bytes. When provided and larger than
/// <see cref="Common.Constants.RewindableBufferSize"/>, the ring buffer is allocated
/// with this size. Pass the largest amount of compressed data that may be consumed
/// during format detection before the first rewind. Defaults to
/// <see cref="Common.Constants.RewindableBufferSize"/> when null or not supplied.
/// </param>
public virtual void StartRecording(int? minBufferSize = null)
{
if (_isPassthrough)
{
@@ -190,10 +200,14 @@ public partial class SharpCompressStream : Stream, IStreamStack
);
}
// Ensure ring buffer exists
// Allocate ring buffer with the requested minimum size (at least the global default).
if (_ringBuffer is null)
{
_ringBuffer = new RingBuffer(Constants.RewindableBufferSize);
var size =
minBufferSize.GetValueOrDefault() > Constants.RewindableBufferSize
? minBufferSize.GetValueOrDefault()
: Constants.RewindableBufferSize;
_ringBuffer = new RingBuffer(size);
}
// Mark current position as recording anchor

View File

@@ -2,7 +2,6 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Shrink;
namespace SharpCompress.Providers.Default;
@@ -27,12 +26,7 @@ public sealed class ShrinkCompressionProvider : ContextRequiredDecompressionProv
{
ValidateRequiredSizes(context, "Shrink");
return new ShrinkStream(
source,
CompressionMode.Decompress,
context.InputSize,
context.OutputSize
);
return new ShrinkStream(source, context.OutputSize);
}
public override async ValueTask<Stream> CreateDecompressStreamAsync(
@@ -44,13 +38,7 @@ public sealed class ShrinkCompressionProvider : ContextRequiredDecompressionProv
ValidateRequiredSizes(context, "Shrink");
return await ShrinkStream
.CreateAsync(
source,
CompressionMode.Decompress,
context.InputSize,
context.OutputSize,
cancellationToken
)
.CreateAsync(source, context.OutputSize, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -91,7 +92,10 @@ public partial class TarReader
readerOptions ??= new ReaderOptions();
var sharpCompressStream = SharpCompressStream.Create(
stream,
bufferSize: readerOptions.RewindableBufferSize
bufferSize: Math.Max(
readerOptions.RewindableBufferSize ?? 0,
TarWrapper.MaximumRewindBufferSize
)
);
long pos = sharpCompressStream.Position;
foreach (var wrapper in TarWrapper.Wrappers)
@@ -170,7 +174,10 @@ public partial class TarReader
readerOptions ??= new ReaderOptions();
var sharpCompressStream = SharpCompressStream.Create(
stream,
bufferSize: readerOptions.RewindableBufferSize
bufferSize: Math.Max(
readerOptions.RewindableBufferSize ?? 0,
TarWrapper.MaximumRewindBufferSize
)
);
long pos = sharpCompressStream.Position;
foreach (var wrapper in TarWrapper.Wrappers)

View File

@@ -268,9 +268,9 @@
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.2, )",
"resolved": "10.0.2",
"contentHash": "sXdDtMf2qcnbygw9OdE535c2lxSxrZP8gO4UhDJ0xiJbl1wIqXS1OTcTDFTIJPOFd6Mhcm8gPEthqWGUxBsTqw=="
"requested": "[10.0.5, )",
"resolved": "10.0.5",
"contentHash": "A+5ZuQ0f449tM+MQrhf6R9ZX7lYpjk/ODEwLYKrnF6111rtARx8fVsm4YznUnQiKnnXfaXNBqgxmil6RW3L3SA=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
@@ -442,9 +442,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.23, )",
"resolved": "8.0.23",
"contentHash": "GqHiB1HbbODWPbY/lc5xLQH8siEEhNA0ptpJCC6X6adtAYNEzu5ZlqV3YHA3Gh7fuEwgA8XqVwMtH2KNtuQM1Q=="
"requested": "[8.0.25, )",
"resolved": "8.0.25",
"contentHash": "sqX4nmBft05ivqKvUT4nxaN8rT3apCLt9SWFkfRrQPwra1zPwFknQAw1lleuMCKOCLvVmOWwrC2iPSm9RiXZUg=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",

View File

@@ -130,5 +130,50 @@ public class MalformedInputTests
"504b03040a0200000e001c0068646c6c6f2e7478745554ac507578000000000000000000000000000000000000000000e80300000000000068030a0000000000147f040020303a360600002e7478745554090003a8c8b6696045ac69f5780b0006ff1d000908180000e8030000000000a4810000109a9a9a8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b9a0000000000000000000000e80300000000000068030a0000009a9a9a504b03440a6fcb486c6c6f2e74ffff"
);
}
[Fact]
public void Reduce_DecompressionBomb_Method2_ThrowsLibraryException()
{
// 31-byte ZIP using Reduce method 2 with declared uncompressed size far exceeding the
// actual compressed data - the decompressor must not generate unbounded output.
VerifyMalformedInputThrowsLibraryException(
"504b03040a000000020000000200f7ff0500f7ff05ff200600180700000000"
);
}
[Fact]
public void Deflate64_HuffmanTree_IndexOutOfRange_ThrowsLibraryException()
{
// 105-byte ZIP using Deflate64 with invalid Huffman code lengths causing IOOB in CreateTable
VerifyMalformedInputThrowsLibraryException(
"504b03040a00005409000088c8b669757800009ac8b66975783606000000640028b52ffd047fff"
+ "02009a888888888820313735303600303132002030007573746172202000757001307230819b75"
+ "72756e7475410a000c2000391eeb061ffe391eeb068f0c0a000c20"
);
}
[Fact]
public void BZip2_GetAndMoveToFrontDecode_IndexOutOfRange_ThrowsLibraryException()
{
// 93-byte BZip2 stream triggering IOOB deeper in GetAndMoveToFrontDecode
VerifyMalformedInputThrowsLibraryException(
"425a6839314159265359c1c080e2000001410000100244a00100808b640006000775780b2ef2ed"
+ "0001393beb06060606060606060606f9050605060606060f0654090003ffffff7f003403"
+ "0a0002001f8b7fff0000000000e98b8b3931"
);
}
[Fact]
public void Zip_ShrinkOOM_CraftedCompressedSize_ThrowsLibraryException()
{
// 122-byte ZIP with Shrink compression and compressed size set to 0x7FFFFFFF (2 GB).
// The library must not attempt to allocate a 2 GB buffer based on the untrusted header.
VerifyMalformedInputThrowsLibraryException(
"504b03040a0000000100147f6f5c20303a36ffffff7f0600000009001c0068656c6c6f2e747874"
+ "5554090003a8c8b6696045ac6975780b01e8303a36060000000600000009001800000001004f2a"
+ "2a2a2a0c2000395d5d5d5d5d5d5d5d5d5d5d5d5d5d5d5d5d000004e8303a360600000006000000"
+ "0900180000"
);
}
}
#endif

View File

@@ -127,4 +127,58 @@ public class SharpCompressStreamSeekTest
Assert.Equal(3, readBuffer[0]);
Assert.Equal(4, readBuffer[1]);
}
[Fact]
public void StartRecording_WithLargerMinBufferSize_AllowsLargeRewind()
{
// Simulates the BZip2 scenario: the ring buffer must be large enough
// from the moment StartRecording is called so that a large probe read
// (up to 900 KB for BZip2) can be rewound without buffer overflow.
const int largeSize = 100;
const int largeReadSize = 80;
var data = new byte[100];
for (var i = 0; i < data.Length; i++)
{
data[i] = (byte)(i + 1);
}
var ms = new MemoryStream(data);
var nonSeekableMs = new NonSeekableStreamWrapper(ms);
var stream = SharpCompressStream.Create(nonSeekableMs, largeSize);
// Pass the required size upfront — no expansion needed later
stream.StartRecording(largeSize);
// Read a large amount (simulating BZip2 block decompression during IsTarFile probe)
var largeBuffer = new byte[largeReadSize];
stream.Read(largeBuffer, 0, largeReadSize);
// Rewind must succeed because the buffer was large enough from the start
stream.Rewind();
var verifyBuffer = new byte[largeReadSize];
stream.Read(verifyBuffer, 0, largeReadSize);
Assert.Equal(data[0], verifyBuffer[0]);
Assert.Equal(data[largeReadSize - 1], verifyBuffer[largeReadSize - 1]);
}
[Fact]
public void StartRecording_DefaultSize_UsesConstantsRewindableBufferSize()
{
// When no minimum is specified StartRecording uses the global default.
var ms = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 });
var nonSeekableMs = new NonSeekableStreamWrapper(ms);
var stream = SharpCompressStream.Create(nonSeekableMs);
stream.StartRecording();
var buffer = new byte[5];
stream.Read(buffer, 0, 5);
stream.Rewind();
var readBuffer = new byte[5];
stream.Read(readBuffer, 0, 5);
Assert.Equal(1, readBuffer[0]);
Assert.Equal(5, readBuffer[4]);
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Factories;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Test.Mocks;
@@ -58,6 +60,52 @@ public class TarReaderTests : ReaderTests
[Fact]
public void Tar_GZip_OldGnu_Reader() => Read("Tar.oldgnu.tar.gz", CompressionType.GZip);
[Fact]
public void Tar_BZip2_Reader_NonSeekable()
{
// Regression test for: Dynamic default RingBuffer for BZip2
// Opening a .tar.bz2 from a non-seekable stream should succeed
// because the ring buffer is sized to hold the BZip2 block before calling IsTarFile.
using var fs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
using var nonSeekable = new ForwardOnlyStream(fs);
using var reader = ReaderFactory.OpenReader(nonSeekable);
var entryCount = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
entryCount++;
}
}
Assert.True(entryCount > 0);
}
[Fact]
public void TarWrapper_BZip2_MinimumRewindBufferSize_IsMaxBZip2BlockSize()
{
// The BZip2 TarWrapper must declare a MinimumRewindBufferSize large enough
// to hold an entire maximum-size compressed BZip2 block (9 × 100 000 bytes).
var bzip2Wrapper = Array.Find(
TarWrapper.Wrappers,
w => w.CompressionType == CompressionType.BZip2
);
Assert.NotNull(bzip2Wrapper);
Assert.Equal(BZip2Constants.baseBlockSize * 9, bzip2Wrapper.MinimumRewindBufferSize);
}
[Fact]
public void TarWrapper_Default_MinimumRewindBufferSize_Is_DefaultRewindableBufferSize()
{
// Non-BZip2 wrappers that don't specify a custom size default to
// Constants.RewindableBufferSize so existing behaviour is unchanged.
var noneWrapper = Array.Find(
TarWrapper.Wrappers,
w => w.CompressionType == CompressionType.None
);
Assert.NotNull(noneWrapper);
Assert.Equal(Common.Constants.RewindableBufferSize, noneWrapper.MinimumRewindBufferSize);
}
[Fact]
public void Tar_BZip2_Entry_Stream()
{