[ARC] Implement support for compression methods 5, 6 and 7 (crunch).

This commit is contained in:
2025-09-02 03:51:20 +01:00
parent f8e4ceab71
commit dfeda1f426
5 changed files with 135 additions and 3 deletions

View File

@@ -149,7 +149,7 @@ public sealed partial class Arc
if((int)_entries[entryNumber].Method >= 20) return ErrorNumber.InvalidArgument;
if(_entries[entryNumber].Method > Method.Squeeze) return ErrorNumber.NotSupported;
if(_entries[entryNumber].Method > Method.CrunchFastHash) return ErrorNumber.NotSupported;
Stream stream = new OffsetStream(new NonClosableStream(_stream),
_entries[entryNumber].DataOffset,
@@ -163,6 +163,15 @@ public sealed partial class Arc
if(_entries[entryNumber].Method == Method.Squeeze)
stream = new SqueezeStream(stream, _entries[entryNumber].Uncompressed);
if(_entries[entryNumber].Method == Method.CrunchOld)
stream = new CrunchStream(stream, _entries[entryNumber].Uncompressed, false, false);
if(_entries[entryNumber].Method == Method.Crunch)
stream = new CrunchStream(stream, _entries[entryNumber].Uncompressed, true, false);
if(_entries[entryNumber].Method == Method.CrunchFastHash)
stream = new CrunchStream(stream, _entries[entryNumber].Uncompressed, true, true);
filter = new ZZZNoFilter();
ErrorNumber errno = filter.Open(stream);

View File

@@ -40,6 +40,7 @@
<ItemGroup>
<Compile Include="ADC.cs"/>
<Compile Include="AppleRle.cs"/>
<Compile Include="Arc\CrunchStream.cs"/>
<Compile Include="Arc\PackStream.cs"/>
<Compile Include="Arc\SqueezeStream.cs"/>
<Compile Include="BZip2.cs"/>

View File

@@ -0,0 +1,122 @@
using System;
using System.IO;
using System.Runtime.InteropServices;
namespace Aaru.Compression.Arc;
public partial class CrunchStream : Stream
{
readonly byte[] _decoded;
readonly long _length;
long _position;
public CrunchStream(Stream compressedStream, long decompressedLength, bool rle, bool fastHash)
{
if(compressedStream == null) throw new ArgumentNullException(nameof(compressedStream));
if(!compressedStream.CanRead) throw new ArgumentException("Stream must be readable", nameof(compressedStream));
if(decompressedLength < 0) throw new ArgumentOutOfRangeException(nameof(decompressedLength));
// Read full compressed data into memory
compressedStream.Position = 0;
byte[] inBuf = new byte[compressedStream.Length];
compressedStream.ReadExactly(inBuf, 0, inBuf.Length);
// Allocate output buffer
_decoded = new byte[decompressedLength];
nint outLen = (nint)decompressedLength;
int err = rle switch
{
// Call native decompressor
false when !fastHash => arc_decompress_crunch(inBuf, inBuf.Length, _decoded, ref outLen),
true when !fastHash => arc_decompress_crunch_nrpack(inBuf, inBuf.Length, _decoded, ref outLen),
true => arc_decompress_crunch_nrpack_new(inBuf, inBuf.Length, _decoded, ref outLen),
_ => throw new InvalidOperationException("Invalid combination of RLE and FastHash options")
};
if(err != 0) throw new InvalidOperationException("LZW decompression failed");
// Adjust actual length in case it differs
_length = outLen;
_position = 0;
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => _length;
public override long Position
{
get => _position;
set => Seek(value, SeekOrigin.Begin);
}
[LibraryImport("libAaru.Compression.Native")]
public static partial int arc_decompress_crunch(byte[] in_buf, nint in_len, byte[] out_buf, ref nint out_len);
[LibraryImport("libAaru.Compression.Native")]
public static partial int
arc_decompress_crunch_nrpack(byte[] in_buf, nint in_len, byte[] out_buf, ref nint out_len);
[LibraryImport("libAaru.Compression.Native")]
public static partial int arc_decompress_crunch_nrpack_new(byte[] in_buf, nint in_len, byte[] out_buf,
ref nint out_len);
public override void Flush()
{
// no-op
}
/// <summary>
/// Reads up to <paramref name="count" /> bytes from the decompressed buffer
/// into <paramref name="buffer" />, starting at <paramref name="offset" />.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
if(buffer == null) throw new ArgumentNullException(nameof(buffer));
if(offset < 0) throw new ArgumentOutOfRangeException(nameof(offset));
if(count < 0) throw new ArgumentOutOfRangeException(nameof(count));
if(offset + count > buffer.Length) throw new ArgumentException("offset+count exceeds buffer length");
long remaining = _length - _position;
if(remaining <= 0) return 0;
int toRead = (int)Math.Min(count, remaining);
Array.Copy(_decoded, _position, buffer, offset, toRead);
_position += toRead;
return toRead;
}
/// <summary>
/// Sets the current position within the decompressed buffer.
/// </summary>
public override long Seek(long offset, SeekOrigin origin)
{
long newPos = origin switch
{
SeekOrigin.Begin => offset,
SeekOrigin.Current => _position + offset,
SeekOrigin.End => _length + offset,
_ => throw new ArgumentException("Invalid SeekOrigin", nameof(origin))
};
if(newPos < 0 || newPos > _length) throw new IOException("Attempt to seek outside the buffer");
_position = newPos;
return _position;
}
public override void SetLength(long value)
{
throw new NotSupportedException("Cannot resize decompressed buffer");
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("Stream is read-only");
}
}

View File

@@ -28,7 +28,7 @@ public partial class PackStream : Stream
// Call native decompressor
int err = arc_decompress_pack(inBuf, inBuf.Length, _decoded, ref outLen);
if(err != 0) throw new InvalidOperationException("LH5 decompression failed");
if(err != 0) throw new InvalidOperationException("Pack decompression failed");
// Adjust actual length in case it differs
_length = outLen;

View File

@@ -28,7 +28,7 @@ public partial class SqueezeStream : Stream
// Call native decompressor
int err = arc_decompress_squeeze(inBuf, inBuf.Length, _decoded, ref outLen);
if(err != 0) throw new InvalidOperationException("LH5 decompression failed");
if(err != 0) throw new InvalidOperationException("Squeeze decompression failed");
// Adjust actual length in case it differs
_length = outLen;