Compare commits

..

8 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
f796aa1fa1 Add ACE 2.0 format reading support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:28:32 +00:00
copilot-swe-agent[bot]
c783a83a9c Address code review feedback for ACE archive support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:32:01 +00:00
copilot-swe-agent[bot]
6ae63f6a47 Add ACE archive support with reader implementation
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:20:16 +00:00
copilot-swe-agent[bot]
7149781b0f Initial plan 2025-11-29 13:01:22 +00:00
Adam Hathcock
30e036f9ec Mark for 0.42.0 2025-11-28 13:24:03 +00:00
Adam Hathcock
095c871174 Merge pull request #1043 from adamhathcock/copilot/fix-divide-by-zero-exception
Fix DivideByZeroException when compressing empty files with BZip2
2025-11-27 16:47:39 +00:00
copilot-swe-agent[bot]
6d73c5b295 Fix DivideByZeroException when using BZip2 with empty files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 15:59:39 +00:00
copilot-swe-agent[bot]
cc4d28193c Initial plan 2025-11-27 15:53:13 +00:00
39 changed files with 852 additions and 172 deletions

View File

@@ -10,6 +10,7 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Ace (5) | None (stored only) | Decompress | N/A | AceReader | N/A |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
@@ -25,7 +26,8 @@
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
5. ACE is a proprietary archive format. Both ACE 1.0 and ACE 2.0 formats are supported for reading. Only stored (uncompressed) entries can be extracted due to the proprietary nature of the compression algorithms (ACE LZ77 and ACE 2.0 improved LZ77).
6. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
## Compression Streams

View File

@@ -172,9 +172,4 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
return Entries.All(x => x.IsComplete);
}
}
public virtual bool IsMultiVolume =>
_sourceStream?.Files.Count > 1 || _sourceStream?.Streams.Count > 1;
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -45,14 +45,4 @@ public interface IArchive : IDisposable
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
/// <summary>
/// Is the archive part of a multi-volume set.
/// </summary>
bool IsMultiVolume { get; }
/// <summary>
/// Does the archive support multi-threaded extraction.
/// </summary>
bool SupportsMultiThreading { get; }
}

View File

@@ -88,7 +88,7 @@ public static class IArchiveEntryExtensions
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
cancellationToken
);
@@ -124,10 +124,10 @@ public static class IArchiveEntryExtensions
entry,
destinationFileName,
options,
async (x, fm, ct) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, ct).ConfigureAwait(false);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
);

View File

@@ -0,0 +1,39 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
}

View File

@@ -0,0 +1,21 @@
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar;
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(
FileInfoRarArchiveVolume volume,
string? password,
MarkHeader mh,
FileHeader fh,
FileInfo fi
)
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
internal FileInfo FileInfo { get; }
internal override string FilePartName =>
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
}

View File

@@ -47,9 +47,9 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
var i = 0;
sourceStream.IsVolumes = true;
streams[1].Position = 0;
sourceStream.Position = 0;
@@ -57,18 +57,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
i++,
IsMultiVolume
i++
));
}
//split mode or single file
return new StreamRarArchiveVolume(
sourceStream,
ReaderOptions,
0,
IsMultiVolume
).AsEnumerable();
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -89,7 +83,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool SupportsMultiThreading => !IsMultiVolume && !IsSolid;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;

View File

@@ -134,6 +134,4 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
);
}
}
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
}

View File

@@ -1,29 +1,25 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archives.Rar;
internal class SeekableRarFilePart : RarFilePart
internal class SeekableFilePart : RarFilePart
{
private readonly Stream _stream;
private readonly string? _password;
private readonly bool _isMultiVolume;
internal SeekableRarFilePart(
internal SeekableFilePart(
MarkHeader mh,
FileHeader fh,
int index,
Stream stream,
string? password,
bool isMultiVolume
string? password
)
: base(mh, fh, index)
{
_stream = stream;
_password = password;
_isMultiVolume = isMultiVolume;
}
internal override Stream GetCompressedStream()
@@ -46,7 +42,4 @@ internal class SeekableRarFilePart : RarFilePart
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
public override bool SupportsMultiThreading =>
!_isMultiVolume && _stream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
}

View File

@@ -9,28 +9,11 @@ namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
private readonly bool _isMultiVolume;
internal StreamRarArchiveVolume(
Stream stream,
ReaderOptions options,
int index,
bool isMultiVolume
)
: base(StreamingMode.Seekable, stream, options, index)
{
_isMultiVolume = isMultiVolume;
}
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableRarFilePart(
markHeader,
fileHeader,
Index,
Stream,
ReaderOptions.Password,
_isMultiVolume
);
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}

View File

@@ -283,12 +283,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
IsMultiVolume
)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
@@ -390,6 +385,4 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
}
public override bool SupportsMultiThreading => !IsMultiVolume;
}

View File

@@ -23,7 +23,5 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public bool IsComplete => true;
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
#endregion
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.Ace;
/// <summary>
/// Represents an entry (file or directory) within an ACE archive.
/// </summary>
public class AceEntry : Entry
{
private readonly AceFilePart? _filePart;
internal AceEntry(AceFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart is null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart?.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
public override bool IsSplitAfter => false;
public override int? Attrib => _filePart?.Header.FileAttributes;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -0,0 +1,339 @@
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Ace;
/// <summary>
/// Represents header information for an ACE archive entry.
/// ACE format uses little-endian byte ordering.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public class AceEntryHeader
{
// Header type constants
private const byte HeaderTypeMain = 0;
private const byte HeaderTypeFile = 1;
private const byte HeaderTypeRecovery = 2;
// Header flags for main header
private const ushort MainFlagComment = 0x0002;
private const ushort MainFlagSfx = 0x0200;
private const ushort MainFlagLocked = 0x0400;
private const ushort MainFlagSolid = 0x0800;
private const ushort MainFlagMultiVolume = 0x1000;
private const ushort MainFlagAv = 0x2000;
private const ushort MainFlagRecovery = 0x4000;
// Header flags for file header
private const ushort FileFlagAddSize = 0x0001;
private const ushort FileFlagComment = 0x0002;
private const ushort FileFlagContinued = 0x4000;
private const ushort FileFlagContinuing = 0x8000;
public ArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public long OriginalSize { get; private set; }
public DateTime DateTime { get; private set; }
internal uint Crc32 { get; private set; }
public int FileAttributes { get; private set; }
public long DataStartPosition { get; private set; }
public bool IsDirectory { get; private set; }
/// <summary>
/// Gets the ACE archive version (10 for ACE 1.0, 20 for ACE 2.0).
/// </summary>
public byte AceVersion { get; private set; }
/// <summary>
/// Gets whether this is an ACE 2.0 archive.
/// </summary>
public bool IsAce20 => AceVersion >= 20;
/// <summary>
/// Gets the host operating system that created the archive.
/// </summary>
public byte HostOs { get; private set; }
/// <summary>
/// Gets whether the archive is solid.
/// </summary>
public bool IsSolid { get; private set; }
/// <summary>
/// Gets whether the archive is part of a multi-volume set.
/// </summary>
public bool IsMultiVolume { get; private set; }
public AceEntryHeader(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
/// <summary>
/// Reads the main archive header from the stream.
/// Returns true if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public bool ReadMainHeader(Stream stream)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerPrefix = new byte[4];
if (stream.Read(headerPrefix, 0, 4) != 4)
{
return false;
}
int headerSize = BitConverter.ToUInt16(headerPrefix, 2);
if (headerSize < 1)
{
return false;
}
// Read the rest of the header
var headerData = new byte[headerSize];
if (stream.Read(headerData, 0, headerSize) != headerSize)
{
return false;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderTypeMain)
{
return false;
}
// Header flags (2 bytes)
ushort headerFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
IsSolid = (headerFlags & MainFlagSolid) != 0;
IsMultiVolume = (headerFlags & MainFlagMultiVolume) != 0;
// Skip signature "**ACE**" (7 bytes)
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
if (offset < headerData.Length)
{
AceVersion = headerData[offset++];
}
// Extract version needed (1 byte)
if (offset < headerData.Length)
{
offset++; // Skip version needed
}
// Host OS (1 byte)
if (offset < headerData.Length)
{
HostOs = headerData[offset++];
}
// Volume number (1 byte)
if (offset < headerData.Length)
{
offset++; // Skip volume number
}
// Creation date/time (4 bytes)
if (offset + 4 <= headerData.Length)
{
offset += 4; // Skip datetime
}
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((headerFlags & MainFlagComment) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return true;
}
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public AceEntryHeader? ReadHeader(Stream stream)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerPrefix = new byte[4];
int bytesRead = stream.Read(headerPrefix, 0, 4);
if (bytesRead < 4)
{
return null; // End of archive
}
// ushort headerCrc = BitConverter.ToUInt16(headerPrefix, 0); // CRC for validation
ushort headerSize = BitConverter.ToUInt16(headerPrefix, 2);
if (headerSize == 0)
{
return null; // End of archive marker
}
// Read the header data
var headerData = new byte[headerSize];
if (stream.Read(headerData, 0, headerSize) != headerSize)
{
return null;
}
int offset = 0;
// Header type (1 byte)
byte headerType = headerData[offset++];
// Check for end marker or non-file header
if (headerType == HeaderTypeMain)
{
// Skip main header if encountered
return ReadHeader(stream);
}
// Skip recovery record headers (ACE 2.0 feature)
if (headerType == HeaderTypeRecovery)
{
// Skip to next header
return ReadHeader(stream);
}
if (headerType != HeaderTypeFile)
{
// Unknown header type - skip
return ReadHeader(stream);
}
// Header flags (2 bytes)
ushort headerFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
CompressedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
uint dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
FileAttributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionMethod = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte) - skip
offset++;
// Parameters (2 bytes) - skip
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
ushort filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Name = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Check if entry is a directory based on attributes or name
IsDirectory = (FileAttributes & 0x10) != 0 || (Name?.EndsWith('/') ?? false);
// Handle comment if present
if ((headerFlags & FileFlagComment) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
private static CompressionType GetCompressionType(byte value)
{
return value switch
{
0 => CompressionType.None, // Stored
1 => CompressionType.Ace, // ACE 1.0 LZ77 compression
2 => CompressionType.Ace2, // ACE 2.0 compression (improved LZ77)
_ => CompressionType.Unknown,
};
}
/// <summary>
/// Converts DOS date/time format to DateTime.
/// </summary>
private static DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
{
return DateTime.MinValue;
}
}
}

View File

@@ -0,0 +1,59 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Ace;
/// <summary>
/// Represents a file part within an ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public class AceFilePart : FilePart
{
private readonly Stream? _stream;
internal AceFilePart(AceEntryHeader header, Stream? stream)
: base(header.ArchiveEncoding)
{
_stream = stream;
Header = header;
}
internal AceEntryHeader Header { get; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream is null)
{
throw new InvalidOperationException("Stream is not available.");
}
switch (Header.CompressionMethod)
{
case CompressionType.None:
// Stored - no compression
return new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
case CompressionType.Ace:
case CompressionType.Ace2:
// ACE 1.0 and 2.0 use proprietary compression methods
// The algorithms are not publicly documented
throw new NotSupportedException(
$"ACE compression method '{Header.CompressionMethod}' is not supported. "
+ "Only stored (uncompressed) entries can be extracted. "
+ "ACE uses proprietary compression algorithms that are not publicly documented."
);
default:
throw new NotSupportedException(
$"ACE compression method '{Header.CompressionMethod}' is not supported. Only stored (uncompressed) entries can be extracted."
);
}
}
internal override Stream? GetRawStream() => _stream;
}

View File

@@ -0,0 +1,13 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Ace;
/// <summary>
/// Represents a volume (file) of an ACE archive.
/// </summary>
public class AceVolume : Volume
{
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}

View File

@@ -9,4 +9,5 @@ public enum ArchiveType
GZip,
Arc,
Arj,
Ace,
}

View File

@@ -30,4 +30,6 @@ public enum CompressionType
Distilled,
ZStandard,
ArjLZ77,
Ace,
Ace2,
}

View File

@@ -87,5 +87,4 @@ public abstract class Entry : IEntry
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -128,7 +128,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
Func<string, ExtractionOptions?, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -189,7 +189,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
await writeAsync(destinationFileName, options).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
Func<string, FileMode, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
@@ -225,8 +225,7 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -14,6 +14,4 @@ public abstract class FilePart
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -21,5 +21,4 @@ public interface IEntry
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
bool SupportsMultiThreading { get; }
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
@@ -8,19 +7,13 @@ internal class SeekableZipFilePart : ZipFilePart
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly bool _isMultiVolume;
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
DirectoryEntryHeader header,
Stream stream,
bool isMultiVolume
Stream stream
)
: base(header, stream)
{
_headerFactory = headerFactory;
_isMultiVolume = isMultiVolume;
}
: base(header, stream) => _headerFactory = headerFactory;
internal override Stream GetCompressedStream()
{
@@ -37,20 +30,8 @@ internal class SeekableZipFilePart : ZipFilePart
protected override Stream CreateBaseStream()
{
if (!_isMultiVolume && BaseStream is SourceStream ss)
{
if (ss.IsFileMode && ss.Files.Count == 1)
{
var fileStream = ss.CurrentFile.OpenRead();
fileStream.Position = Header.DataStartPosition.NotNull();
return fileStream;
}
}
BaseStream.Position = Header.DataStartPosition.NotNull();
return BaseStream;
}
public override bool SupportsMultiThreading =>
!_isMultiVolume && BaseStream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
}

View File

@@ -544,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private void EndBlock()
{
// Skip block processing for empty input (no data written)
if (last < 0)
{
return;
}
blockCRC = mCrc.GetFinalCRC();
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
combinedCRC ^= blockCRC;

View File

@@ -62,6 +62,10 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
base.Dispose(disposing);
if (disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(MultiVolumeReadOnlyStream));
#endif
if (filePartEnumerator != null)
{
filePartEnumerator.Dispose();

View File

@@ -82,6 +82,9 @@ internal class RarStream : Stream, IStreamStack
{
if (disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}

View File

@@ -0,0 +1,64 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Ace;
namespace SharpCompress.Factories;
/// <summary>
/// Factory for ACE archive format detection and reader creation.
/// </summary>
public class AceFactory : Factory, IReaderFactory
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public override string Name => "Ace";
public override ArchiveType? KnownArchiveType => ArchiveType.Ace;
public override IEnumerable<string> GetSupportedExtensions()
{
yield return "ace";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
{
return false;
}
// Check for "**ACE**" at offset 7
for (int i = 0; i < AceSignature.Length; i++)
{
if (bytes[7 + i] != AceSignature[i])
{
return false;
}
}
return true;
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
AceReader.Open(stream, options);
}

View File

@@ -19,6 +19,7 @@ public abstract class Factory : IFactory
RegisterFactory(new TarFactory());
RegisterFactory(new ArcFactory());
RegisterFactory(new ArjFactory());
RegisterFactory(new AceFactory());
}
private static readonly HashSet<Factory> _factories = new();

View File

@@ -15,7 +15,7 @@ public class SourceStream : Stream, IStreamStack
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _streams[_streamIndex];
Stream IStreamStack.BaseStream() => _streams[_stream];
int IStreamStack.BufferSize
{
@@ -35,7 +35,7 @@ public class SourceStream : Stream, IStreamStack
private readonly List<Stream> _streams;
private readonly Func<int, FileInfo?>? _getFilePart;
private readonly Func<int, Stream?>? _getStreamPart;
private int _streamIndex;
private int _stream;
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
: this(null, null, file, getPart, options) { }
@@ -59,7 +59,7 @@ public class SourceStream : Stream, IStreamStack
if (!IsFileMode)
{
_streams.Add(stream.NotNull("stream is null"));
_streams.Add(stream!);
_getStreamPart = getStreamPart;
_getFilePart = _ => null;
if (stream is FileStream fileStream)
@@ -69,12 +69,12 @@ public class SourceStream : Stream, IStreamStack
}
else
{
_files.Add(file.NotNull("file is null"));
_files.Add(file!);
_streams.Add(_files[0].OpenRead());
_getFilePart = getFilePart;
_getStreamPart = _ => null;
}
_streamIndex = 0;
_stream = 0;
_prevSize = 0;
#if DEBUG_STREAMS
@@ -93,12 +93,10 @@ public class SourceStream : Stream, IStreamStack
public ReaderOptions ReaderOptions { get; }
public bool IsFileMode { get; }
public IReadOnlyList<FileInfo> Files => _files;
public IReadOnlyList<Stream> Streams => _streams;
public IEnumerable<FileInfo> Files => _files;
public IEnumerable<Stream> Streams => _streams;
private Stream Current => _streams[_streamIndex];
public FileInfo CurrentFile => _files[_streamIndex];
private Stream Current => _streams[_stream];
public bool LoadStream(int index) //ensure all parts to id are loaded
{
@@ -109,7 +107,7 @@ public class SourceStream : Stream, IStreamStack
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
if (f == null)
{
_streamIndex = _streams.Count - 1;
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"File part {idx} not available.");
@@ -121,7 +119,7 @@ public class SourceStream : Stream, IStreamStack
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
if (s == null)
{
_streamIndex = _streams.Count - 1;
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"Stream part {idx} not available.");
@@ -139,10 +137,10 @@ public class SourceStream : Stream, IStreamStack
{
if (LoadStream(idx))
{
_streamIndex = idx;
_stream = idx;
}
return _streamIndex == idx;
return _stream == idx;
}
public override bool CanRead => true;
@@ -186,7 +184,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}
@@ -225,7 +223,7 @@ public class SourceStream : Stream, IStreamStack
while (_prevSize + Current.Length < pos)
{
_prevSize += Current.Length;
SetStream(_streamIndex + 1);
SetStream(_stream + 1);
}
}
@@ -275,7 +273,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}
@@ -324,7 +322,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}

View File

@@ -0,0 +1,67 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Ace;
namespace SharpCompress.Readers.Ace;
/// <summary>
/// Reader for ACE archives.
/// ACE is a proprietary archive format. This implementation supports both ACE 1.0 and ACE 2.0 formats
/// and can read archive metadata and extract uncompressed (stored) entries.
/// Compressed entries require proprietary decompression algorithms that are not publicly documented.
/// </summary>
/// <remarks>
/// ACE 2.0 additions over ACE 1.0:
/// - Improved LZ77 compression (compression type 2)
/// - Recovery record support
/// - Additional header flags
/// </remarks>
public class AceReader : AbstractReader<AceEntry, AceVolume>
{
private readonly AceEntryHeader _mainHeaderReader;
private bool _mainHeaderRead;
private AceReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.Ace)
{
Volume = new AceVolume(stream, options, 0);
_mainHeaderReader = new AceEntryHeader(Options.ArchiveEncoding);
}
public override AceVolume Volume { get; }
/// <summary>
/// Opens an AceReader for non-seeking usage with a single volume.
/// </summary>
/// <param name="stream">The stream containing the ACE archive.</param>
/// <param name="options">Reader options.</param>
/// <returns>An AceReader instance.</returns>
public static AceReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new AceReader(stream, options ?? new ReaderOptions());
}
protected override IEnumerable<AceEntry> GetEntries(Stream stream)
{
// First, skip past the main header if we haven't already
if (!_mainHeaderRead)
{
if (!_mainHeaderReader.ReadMainHeader(stream))
{
yield break;
}
_mainHeaderRead = true;
}
// Read file entries - create new header reader for each entry
// since ReadHeader modifies the object state
AceEntryHeader entryHeaderReader = new AceEntryHeader(Options.ArchiveEncoding);
AceEntryHeader? header;
while ((header = entryHeaderReader.ReadHeader(stream)) != null)
{
yield return new AceEntry(new AceFilePart(header, stream));
}
}
}

View File

@@ -82,7 +82,7 @@ public static class IReaderExtensions
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync,
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
cancellationToken
)
.ConfigureAwait(false);
@@ -101,10 +101,10 @@ public static class IReaderExtensions
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
)

View File

@@ -70,7 +70,7 @@ public static class ReaderFactory
}
throw new InvalidFormatException(
"Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
"Cannot determine compressed stream type. Supported Reader Formats: Ace, Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
);
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.41.0</VersionPrefix>
<AssemblyVersion>0.41.0</AssemblyVersion>
<FileVersion>0.41.0</FileVersion>
<VersionPrefix>0.42.0</VersionPrefix>
<AssemblyVersion>0.42.0</AssemblyVersion>
<FileVersion>0.42.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net48;net481;netstandard2.0;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>

View File

@@ -0,0 +1,92 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.Readers;
using SharpCompress.Readers.Ace;
using Xunit;
namespace SharpCompress.Test.Ace;
public class AceReaderTests : ReaderTests
{
public AceReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
}
[Fact]
public void Ace_Stored_Read()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Ace.stored.ace");
using var stream = File.OpenRead(archivePath);
using var reader = AceReader.Open(stream, new ReaderOptions());
int entryCount = 0;
while (reader.MoveToNextEntry())
{
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
Assert.False(reader.Entry.IsDirectory);
Assert.NotNull(reader.Entry.Key);
entryCount++;
}
Assert.True(entryCount > 0, "Expected at least one entry in the archive");
}
[Fact]
public void Ace_Stored_Extract()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Ace.stored.ace");
using var stream = File.OpenRead(archivePath);
using var reader = AceReader.Open(stream, new ReaderOptions());
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
// Verify that files were extracted
var extractedFiles = Directory.GetFiles(
SCRATCH_FILES_PATH,
"*.*",
SearchOption.AllDirectories
);
Assert.True(extractedFiles.Length > 0, "Expected at least one file to be extracted");
}
[Fact]
public void Ace_Factory_Detection()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Ace.stored.ace");
using var stream = File.OpenRead(archivePath);
var aceFactory = new AceFactory();
Assert.True(aceFactory.IsArchive(stream));
}
[Fact]
public void Ace_Reader_Properties()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Ace.stored.ace");
using var stream = File.OpenRead(archivePath);
using var reader = AceReader.Open(stream, new ReaderOptions());
Assert.Equal(ArchiveType.Ace, reader.ArchiveType);
Assert.NotNull(reader.Volume);
if (reader.MoveToNextEntry())
{
Assert.Equal("test.txt", reader.Entry.Key);
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
Assert.False(reader.Entry.IsEncrypted);
Assert.False(reader.Entry.IsDirectory);
Assert.False(reader.Entry.IsSplitAfter);
}
}
}

View File

@@ -134,7 +134,6 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.False(entry.SupportsMultiThreading);
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
@@ -267,31 +266,6 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected async Task ArchiveFileRead_Multithreaded(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
var tasks = new List<Task>();
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
Assert.True(archive.SupportsMultiThreading);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.True(entry.SupportsMultiThreading);
var t = entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
tasks.Add(t);
}
}
await Task.WhenAll(tasks);
VerifyFiles();
}
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
@@ -315,11 +289,6 @@ public class ArchiveTests : ReaderTests
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveFileRead_Multithreaded(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveFileRead_Multithreaded(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
string fileOrder,

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -293,15 +292,9 @@ public class RarArchiveTests : ArchiveTests
[Fact]
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
[Fact]
public Task Rar_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar.rar");
[Fact]
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
[Fact]
public Task Rar5_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar5.rar");
[Fact]
public void Rar_ArchiveFileRead_HasDirectories() =>
DoRar_ArchiveFileRead_HasDirectories("Rar.rar");
@@ -366,9 +359,6 @@ public class RarArchiveTests : ArchiveTests
[Fact]
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
[Fact]
public Task Rar2_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar2.rar");
[Fact]
public void Rar15_ArchiveFileRead()
{

View File

@@ -2,7 +2,6 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -89,10 +88,6 @@ public class ZipArchiveTests : ArchiveTests
[Fact]
public void Zip_Deflate_ArchiveFileRead() => ArchiveFileRead("Zip.deflate.zip");
[Fact]
public Task Zip_Deflate_ArchiveFileRead_Multithreaded() =>
ArchiveFileRead_Multithreaded("Zip.deflate.zip");
[Fact]
public void Zip_Deflate_ArchiveExtractToDirectory() =>
ArchiveExtractToDirectory("Zip.deflate.zip");

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.Zip;
@@ -9,6 +11,42 @@ public class ZipWriterTests : WriterTests
public ZipWriterTests()
: base(ArchiveType.Zip) { }
[Fact]
public void Zip_BZip2_Write_EmptyFile()
{
// Test that writing an empty file with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-folder/zero-byte-file.txt", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_BZip2_Write_EmptyFolder()
{
// Test that writing an empty folder entry with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-empty-folder/", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_Deflate_Write() =>
Write(

Binary file not shown.