mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 05:25:00 +00:00
Merge pull request #1005 from TwanVanDongen/master
Refactor SqueezeStream for CLS Compliance, Streaming, and Generic Test Coverage
This commit is contained in:
@@ -1,13 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.RLE90
|
||||
{
|
||||
/// <summary>
|
||||
/// Real-time streaming RLE90 decompression stream.
|
||||
/// Decompresses bytes on demand without buffering the entire file in memory.
|
||||
/// </summary>
|
||||
public class RunLength90Stream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
@@ -31,13 +31,19 @@ namespace SharpCompress.Compressors.RLE90
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly Stream _stream;
|
||||
private readonly int _compressedSize;
|
||||
private int _bytesReadFromSource;
|
||||
|
||||
private const byte DLE = 0x90;
|
||||
private int _compressedSize;
|
||||
private bool _processed = false;
|
||||
private bool _inDleMode;
|
||||
private byte _lastByte;
|
||||
private int _repeatCount;
|
||||
|
||||
private bool _endOfCompressedData;
|
||||
|
||||
public RunLength90Stream(Stream stream, int compressedSize)
|
||||
{
|
||||
_stream = stream;
|
||||
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
_compressedSize = compressedSize;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(RunLength90Stream));
|
||||
@@ -53,44 +59,93 @@ namespace SharpCompress.Compressors.RLE90
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override long Length => throw new NotImplementedException();
|
||||
|
||||
public override long Length => throw new NotSupportedException();
|
||||
public override long Position
|
||||
{
|
||||
get => _stream.Position;
|
||||
set => throw new NotImplementedException();
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Flush() => throw new NotImplementedException();
|
||||
public override void Flush() => throw new NotSupportedException();
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_processed)
|
||||
if (buffer == null)
|
||||
throw new ArgumentNullException(nameof(buffer));
|
||||
if (offset < 0 || count < 0 || offset + count > buffer.Length)
|
||||
throw new ArgumentOutOfRangeException();
|
||||
|
||||
int bytesWritten = 0;
|
||||
|
||||
while (bytesWritten < count && !_endOfCompressedData)
|
||||
{
|
||||
return 0;
|
||||
// Handle pending repeat bytes first
|
||||
if (_repeatCount > 0)
|
||||
{
|
||||
int toWrite = Math.Min(_repeatCount, count - bytesWritten);
|
||||
for (int i = 0; i < toWrite; i++)
|
||||
{
|
||||
buffer[offset + bytesWritten++] = _lastByte;
|
||||
}
|
||||
_repeatCount -= toWrite;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try to read the next byte from compressed data
|
||||
if (_bytesReadFromSource >= _compressedSize)
|
||||
{
|
||||
_endOfCompressedData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
int next = _stream.ReadByte();
|
||||
if (next == -1)
|
||||
{
|
||||
_endOfCompressedData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
_bytesReadFromSource++;
|
||||
byte c = (byte)next;
|
||||
|
||||
if (_inDleMode)
|
||||
{
|
||||
_inDleMode = false;
|
||||
|
||||
if (c == 0)
|
||||
{
|
||||
buffer[offset + bytesWritten++] = DLE;
|
||||
_lastByte = DLE;
|
||||
}
|
||||
else
|
||||
{
|
||||
_repeatCount = c - 1;
|
||||
// We’ll handle these repeats in next loop iteration.
|
||||
}
|
||||
}
|
||||
else if (c == DLE)
|
||||
{
|
||||
_inDleMode = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
buffer[offset + bytesWritten++] = c;
|
||||
_lastByte = c;
|
||||
}
|
||||
}
|
||||
_processed = true;
|
||||
|
||||
using var binaryReader = new BinaryReader(_stream);
|
||||
byte[] compressedBuffer = binaryReader.ReadBytes(_compressedSize);
|
||||
|
||||
var unpacked = RLE.UnpackRLE(compressedBuffer);
|
||||
unpacked.CopyTo(buffer);
|
||||
|
||||
return unpacked.Count;
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotImplementedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotImplementedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Squeezed
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public class SqueezeStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
@@ -35,12 +34,15 @@ namespace SharpCompress.Compressors.Squeezed
|
||||
private readonly int _compressedSize;
|
||||
private const int NUMVALS = 257;
|
||||
private const int SPEOF = 256;
|
||||
private bool _processed = false;
|
||||
|
||||
private Stream _decodedStream;
|
||||
|
||||
public SqueezeStream(Stream stream, int compressedSize)
|
||||
{
|
||||
_stream = stream;
|
||||
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
_compressedSize = compressedSize;
|
||||
_decodedStream = BuildDecodedStream();
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(SqueezeStream));
|
||||
#endif
|
||||
@@ -51,52 +53,46 @@ namespace SharpCompress.Compressors.Squeezed
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(SqueezeStream));
|
||||
#endif
|
||||
_decodedStream?.Dispose();
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override long Length => throw new NotImplementedException();
|
||||
|
||||
public override long Length => throw new NotSupportedException();
|
||||
public override long Position
|
||||
{
|
||||
get => _stream.Position;
|
||||
set => throw new NotImplementedException();
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Flush() => throw new NotImplementedException();
|
||||
public override void Flush() => throw new NotSupportedException();
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_processed)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
_processed = true;
|
||||
using var binaryReader = new BinaryReader(_stream);
|
||||
return _decodedStream.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
// Read numnodes (equivalent to convert_u16!(numnodes, buf))
|
||||
var numnodes = binaryReader.ReadUInt16();
|
||||
private Stream BuildDecodedStream()
|
||||
{
|
||||
var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true);
|
||||
int numnodes = binaryReader.ReadUInt16();
|
||||
|
||||
// Validation: numnodes should be within bounds
|
||||
if (numnodes >= NUMVALS)
|
||||
if (numnodes >= NUMVALS || numnodes == 0)
|
||||
{
|
||||
throw new InvalidDataException(
|
||||
$"Invalid number of nodes {numnodes} (max {NUMVALS - 1})"
|
||||
);
|
||||
return new MemoryStream(Array.Empty<byte>());
|
||||
}
|
||||
|
||||
// Handle the case where no nodes exist
|
||||
if (numnodes == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Build dnode (tree of nodes)
|
||||
var dnode = new int[numnodes, 2];
|
||||
for (int j = 0; j < numnodes; j++)
|
||||
{
|
||||
@@ -104,42 +100,27 @@ namespace SharpCompress.Compressors.Squeezed
|
||||
dnode[j, 1] = binaryReader.ReadInt16();
|
||||
}
|
||||
|
||||
// Initialize BitReader for reading bits
|
||||
var bitReader = new BitReader(_stream);
|
||||
var decoded = new List<byte>();
|
||||
|
||||
var huffmanDecoded = new MemoryStream();
|
||||
int i = 0;
|
||||
// Decode the buffer using the dnode tree
|
||||
|
||||
while (true)
|
||||
{
|
||||
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
|
||||
if (i < 0)
|
||||
{
|
||||
i = (short)-(i + 1);
|
||||
i = -(i + 1);
|
||||
if (i == SPEOF)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
decoded.Add((byte)i);
|
||||
i = 0;
|
||||
}
|
||||
huffmanDecoded.WriteByte((byte)i);
|
||||
i = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Unpack the decoded buffer using the RLE class
|
||||
var unpacked = RLE.UnpackRLE(decoded.ToArray());
|
||||
unpacked.CopyTo(buffer, 0);
|
||||
return unpacked.Count();
|
||||
huffmanDecoded.Position = 0;
|
||||
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotImplementedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotImplementedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ namespace SharpCompress.Readers.Arc
|
||||
|
||||
protected override IEnumerable<ArcEntry> GetEntries(Stream stream)
|
||||
{
|
||||
ArcEntryHeader headerReader = new ArcEntryHeader(new ArchiveEncoding());
|
||||
ArcEntryHeader headerReader = new ArcEntryHeader(Options.ArchiveEncoding);
|
||||
ArcEntryHeader? header;
|
||||
while ((header = headerReader.ReadHeader(stream)) != null)
|
||||
{
|
||||
|
||||
@@ -23,36 +23,9 @@ namespace SharpCompress.Test.Arc
|
||||
public void Arc_Uncompressed_Read() => Read("Arc.uncompressed.arc", CompressionType.None);
|
||||
|
||||
[Fact]
|
||||
public void Arc_Squeezed_Read()
|
||||
{
|
||||
ProcessArchive("Arc.squeezed.arc");
|
||||
}
|
||||
public void Arc_Squeezed_Read() => Read("Arc.squeezed.arc");
|
||||
|
||||
[Fact]
|
||||
public void Arc_Crunched_Read()
|
||||
{
|
||||
ProcessArchive("Arc.crunched.arc");
|
||||
}
|
||||
|
||||
private void ProcessArchive(string archiveName)
|
||||
{
|
||||
// Process a given archive by its name
|
||||
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, archiveName)))
|
||||
using (IReader reader = ArcReader.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
reader.WriteEntryToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
VerifyFilesByExtension();
|
||||
}
|
||||
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user