Compare commits

...

92 Commits

Author SHA1 Message Date
Adam Hathcock
8b1ba9a00c Merge pull request #914 from adamhathcock/update-deps
Update dependencies and csharpier
2025-06-03 08:33:18 +01:00
Adam Hathcock
b02584ef9e Update deps again 2025-06-03 08:30:28 +01:00
Adam Hathcock
88cd6bfd1a Merge remote-tracking branch 'origin/master' into update-deps 2025-06-03 08:13:41 +01:00
Adam Hathcock
89a3da14c9 Merge pull request #918 from Morilli/fix-bzip2-selector-oob
[bzip2] fix possible out of bounds access due to unsanitized nSelectors usage
2025-06-03 08:11:55 +01:00
Morilli
619d987492 fix possible out of bounds access due to unsanitized nSelectors usage 2025-06-02 21:24:38 +02:00
Adam Hathcock
744e410a1a Merge pull request #916 from Morilli/rar-multipart-reader
Implement multipart rar handling for ExtractAllEntries
2025-05-14 11:33:12 +01:00
Morilli
6e51967993 format 2025-05-14 11:42:52 +02:00
Morilli
7989ab2e28 modify now-broken test
This test tested that skipping over entries using the reader interface for an encrypted multi-volume rar archive worked.
However reading those entries doesn't work and the skipping was also not working properly so I believe it's fine to "break" this functionality.
2025-05-14 11:18:41 +02:00
Morilli
a3570a568d fix AbstractReader.Skip for multipart files 2025-05-14 10:27:58 +02:00
Morilli
c0cd998836 add failing test 2025-05-14 10:27:30 +02:00
Morilli
1a452acd1c implement ExtractAllEntries for multipart rar files 2025-05-14 10:27:18 +02:00
Adam Hathcock
6c54083b08 Merge remote-tracking branch 'origin/master' into update-deps 2025-04-28 16:30:40 +01:00
Adam Hathcock
76105ebdaf fix bullseye 2025-04-28 16:30:28 +01:00
Adam Hathcock
2c452201fa Merge pull request #854 from zgabi/patch-1
return Stream.Null when 7z entry has no stream
2025-04-28 16:26:58 +01:00
Adam Hathcock
9fc7fc73f9 Merge branch 'master' into patch-1 2025-04-28 16:21:20 +01:00
Adam Hathcock
e7417e35ba Update dependencies and csharpier 2025-04-28 16:18:01 +01:00
Adam Hathcock
19eb4c7cba Merge pull request #834 from adamhathcock/exception-normalization
Add SharpCompressException and use it or children in most places
2025-04-28 16:12:25 +01:00
Adam Hathcock
1f39a0c9da Merge remote-tracking branch 'origin/master' into exception-normalization
# Conflicts:
#	src/SharpCompress/Readers/ReaderFactory.cs
2025-04-28 16:08:08 +01:00
Adam Hathcock
a480a8893c format 2025-04-28 16:06:00 +01:00
Adam Hathcock
d3a9e341a5 Merge fixes 2025-04-28 16:05:31 +01:00
Adam Hathcock
95caffe607 Merge remote-tracking branch 'origin/master' into exception-normalization
# Conflicts:
#	src/SharpCompress/Common/Rar/RarVolume.cs
#	src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
2025-04-28 15:59:34 +01:00
Adam Hathcock
fdeca61284 Merge pull request #913 from jdpurcell/pr-bss-fix
Fix regression with BufferedSubStream calculation
2025-03-26 09:00:38 +00:00
J.D. Purcell
45dc653191 Fix regression with size calculation 2025-03-25 20:43:14 -04:00
Adam Hathcock
6e48302b7b Merge pull request #912 from jdpurcell/pr-bss-optim
Optimize BufferedSubStream.ReadByte
2025-03-25 08:04:38 +00:00
J.D. Purcell
a7918d7b11 Optimize BufferedSubStream.ReadByte 2025-03-24 21:28:08 -04:00
Adam Hathcock
ec21253af9 Merge pull request #909 from Morilli/update-usage
Update USAGE.md to remove problematic extraction example
2025-03-24 08:20:17 +00:00
Adam Hathcock
97cdda8663 Merge branch 'master' into update-usage 2025-03-24 08:18:16 +00:00
Adam Hathcock
35ac2b9d71 Merge pull request #910 from jdpurcell/pr-rangedecoderoptim
Optimize LZMA range decoder
2025-03-24 08:17:28 +00:00
J.D. Purcell
14affd8ffa Optimize LZMA range decoder 2025-03-22 17:14:38 -04:00
Morilli
c48409c903 Update USAGE.md
added an explanatory comment to the ExtractAllEntries usage and removed the problematic previous code.

I've added a new example for iterating over entries that doesn't extract them.
2025-03-22 16:50:15 +01:00
Adam Hathcock
227f66f299 Merge pull request #907 from jdpurcell/pr-copyblockoptim
Optimize LZ OutWindow.CopyBlock
2025-03-20 08:24:39 +00:00
J.D. Purcell
c2d9bf94d1 Optimize LZ OutWindow.CopyBlock 2025-03-19 22:29:38 -04:00
Adam Hathcock
8f03841161 Merge pull request #906 from TwanVanDongen/master
Added ARC's crunched methods 5, 6, 7 & 8
2025-03-17 08:30:12 +00:00
Twan van Dongen
344a1ed912 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-14 19:09:56 +01:00
Twan van Dongen
ff71993b31 Trying to overcome differences in charpier versions... 2025-03-14 19:09:52 +01:00
Twan van Dongen
18bb773b2c Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-14 19:06:14 +01:00
Twan van Dongen
7f905c7940 More csharpier stuff 2025-03-14 19:06:10 +01:00
Twan van Dongen
8a4ba6fc56 Removed empty lineRemoved failing test 2025-03-14 19:03:54 +01:00
Twan van Dongen
e0b275c01c Removed empty line for CSharpier 2025-03-14 19:01:07 +01:00
Twan van Dongen
5926db85df Added ARC's crunched methods 5, 6, 7 & 8 2025-03-14 18:58:34 +01:00
Adam Hathcock
a4715a10e7 Merge pull request #905 from TwanVanDongen/master
ARC decompression methods 3 and 4 added
2025-03-12 08:21:57 +00:00
Twan van Dongen
de0f5c0fcb Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 20:20:45 +01:00
Twan van Dongen
88d85ce6ac Extra line removed for csharpier 2025-03-11 20:20:41 +01:00
Twan van Dongen
131c171d3e Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 20:16:24 +01:00
Twan van Dongen
c5ddef6ef7 An exception occurred in ReadOnlySubStream when attempting to set the position to the same value. 2025-03-11 20:15:53 +01:00
Twan van Dongen
f36486d006 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 18:57:52 +01:00
Twan van Dongen
eaf466c5c3 Implementation of squeezed and packed compression algorithms for .ARC archive format 2025-03-11 18:15:53 +01:00
Adam Hathcock
b41bcc349e Merge pull request #903 from TwanVanDongen/master
Base Reader implementation of .ARC format
2025-03-10 08:55:43 +00:00
Adam Hathcock
825c61bdcd Merge branch 'master' into master 2025-03-10 08:51:58 +00:00
Adam Hathcock
f13f49bd71 Merge pull request #904 from jdpurcell/pr-7zattr
Provide access to extended attributes for 7-zip
2025-03-10 08:49:33 +00:00
J.D. Purcell
88f5d4544b Add SevenZipEntry ExtendedAttrib property 2025-03-09 17:16:55 -04:00
J.D. Purcell
b7432a20f0 Remove some unused internal members 2025-03-09 17:16:55 -04:00
Twan van Dongen
ab57c85e66 Missing comma added for csharpier 2025-03-09 18:45:27 +01:00
Twan van Dongen
c7c41bc0d8 Missing comma added for csharpier checksBase Reader implementation of .ARC format 2025-03-09 18:40:43 +01:00
Twan van Dongen
c66f9b06a4 Base Reader implementation of .ARC format 2025-03-09 18:22:52 +01:00
Adam Hathcock
a3ac7e7ca7 Merge pull request #901 from ms264556/feature/sha256-quick-fix
Handle XZ CheckType SHA-256
2025-03-07 10:52:24 +00:00
ms264556
187b762f8a Corrected XZ BlockCheckSize formula 2025-03-07 19:53:49 +13:00
ms264556
3a7fbdfa52 Handle XZ CheckType SHA-256 2025-03-07 14:46:07 +13:00
Adam Hathcock
bbeb46b37f Merge pull request #900 from Morilli/WriteToDirectory-reader
make WriteToDirectory functions use ExtractAllEntries
2025-03-03 08:32:00 +00:00
Morilli
5450b9a700 make WriteToDirectory functions use ExtractAllEntries
for extracting files from solid archives this can be required to get acceptable extraction performance and should therefore always be called.
2025-02-28 21:46:04 +01:00
Adam Hathcock
353b28647c Merge pull request #897 from Morilli/bufferedsubstream-readbyte
Implement ReadByte for BufferedSubStream
2025-02-17 08:07:14 +00:00
Adam Hathcock
2411f4f870 Merge branch 'master' into bufferedsubstream-readbyte 2025-02-17 08:05:02 +00:00
Adam Hathcock
34cd059423 Merge pull request #898 from Morilli/lz-readbyte
Implement ReadByte for LzmaStream and LzOutWindow
2025-02-17 08:04:34 +00:00
Morilli
4b4ec12c87 Implement ReadByte for LzmaStream and LzOutWindow 2025-02-16 18:37:56 +01:00
Morilli
441147c0dc Implement ReadByte for BufferedSubStream 2025-02-16 13:41:46 +01:00
Adam Hathcock
08ceac9f46 Merge pull request #896 from adamhathcock/rar_unpack20_audio_fix
Rar2 v20,v26 Multimedia (Audio) decoder fix
2025-02-12 08:25:38 +00:00
Nanook
6bc690b50b Seriously? 2025-02-11 23:10:03 +00:00
Nanook
c41888b338 Rar2 v20,v26 Multimedia (Audio) decoder fix 2025-02-11 22:30:59 +00:00
Adam Hathcock
6d3c980b39 Merge pull request #894 from Morilli/fix-rare-solid-rar-failure
Fix condition in rar v3 code
2025-02-11 11:39:17 +00:00
Morilli
eb5db176fa revert change to rar5 code
... yeah... apparently the thing that fixes rar3 code breaks rar5 code
2025-02-11 11:21:51 +01:00
Morilli
a77c03fcaf add comment and change the same code in new Unpack50 2025-02-11 11:03:56 +01:00
Adam Hathcock
04b25011e9 Merge branch 'master' into fix-rare-solid-rar-failure 2025-02-11 08:31:49 +00:00
Adam Hathcock
96b34aec10 Merge pull request #895 from Morilli/fix-test-concurrency
use File.OpenRead instead of File.Open in tests to allow concurrent access
2025-02-11 08:30:12 +00:00
Morilli
d560b46c85 use File.OpenRead instead of File.Open to allow concurrent access 2025-02-11 03:31:49 +01:00
Morilli
94789ce455 Fix condition in rar v3 code 2025-02-11 03:04:18 +01:00
Adam Hathcock
55797e5873 Merge pull request #893 from adamhathcock/rar_unpack20
Fix for Rar4 v20 compression.
2025-02-10 09:27:27 +00:00
Nanook
675cab3074 Fix for Rar4 v20 compression. 2025-02-07 22:39:33 +00:00
Adam Hathcock
8d63ab646e Merge pull request #891 from Morilli/fix-zip-datadescriptor-header
Fix zip entry handling for entries with data descriptors
2025-01-28 08:12:40 +00:00
Adam Hathcock
37a2fa1cdc Merge branch 'master' into fix-zip-datadescriptor-header 2025-01-28 08:08:55 +00:00
Adam Hathcock
79ed9650b3 Merge pull request #892 from adamhathcock/fix-tests
don't run net48 on non-windows
2025-01-28 08:08:41 +00:00
Adam Hathcock
b6dc58164e don't run net48 on non-windows 2025-01-28 08:04:59 +00:00
Morilli
f9a974c1fe fix formatting 2025-01-28 02:24:42 +01:00
Morilli
91e672befb remove old hack trying to fix a similar thing
Introduced in af264cdc58c9d076bf83477cbdbfe7a5dad282b7; the test included in that commit passes still.
2025-01-28 02:12:16 +01:00
Morilli
c14d18b9df set local header data from directory header when flag is set
As described in section 4.4.7-4.4.9 of the zip specification when this flag is set the correct values will be in the data descriptor record and in the directory header.
2025-01-28 01:56:14 +01:00
Morilli
d2cfc1844c add failing test 2025-01-28 01:49:49 +01:00
Adam Hathcock
2fb3243a1a Tests also on net48 2025-01-16 08:38:11 +00:00
Zavarkó Gábor
471a3f63fe return Stream.Null when 7z entry has no stream 2024-07-07 00:14:00 +02:00
Adam Hathcock
5d9c99508d fmt 2024-04-23 15:08:50 +01:00
Adam Hathcock
e4d5b56951 fix more nulls and tests 2024-04-23 15:08:32 +01:00
Adam Hathcock
e31238d121 fmt 2024-04-23 14:48:50 +01:00
Adam Hathcock
a283d99e1b compiles 2024-04-23 14:48:33 +01:00
Adam Hathcock
8cb621ebed Add SharpCompressException and use it or children in most places 2024-04-23 14:47:40 +01:00
135 changed files with 1621 additions and 634 deletions

View File

@@ -3,9 +3,9 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.30.6",
"version": "1.0.2",
"commands": [
"dotnet-csharpier"
"csharpier"
],
"rollForward": false
}

View File

@@ -1,19 +1,19 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="5.0.0" />
<PackageVersion Include="FluentAssertions" Version="7.0.0" />
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.0.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.0" />
<PackageVersion Include="System.Memory" Version="4.6.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.0.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.0" />
<PackageVersion Include="xunit.SkippableFact" Version="1.5.23" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.4" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -20,6 +20,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
.editorconfig = .editorconfig
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
EndProjectSection
EndProject
Global

View File

@@ -71,18 +71,34 @@ using (var archive = ZipArchive.Create())
memoryStream.Position = 0;
```
### Extract all files from a Rar file to a directory using RarArchive
### Extract all files from a rar file to a directory using RarArchive
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
Alternatively, use `IArchive.WriteToDirectory`.
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
using (var reader = archive.ExtractAllEntries())
{
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
```
### Iterate over all files from a Rar file using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
}
}
```

View File

@@ -15,7 +15,7 @@ const string Publish = "publish";
Target(
Clean,
ForEach("**/bin", "**/obj"),
["**/bin", "**/obj"],
dir =>
{
IEnumerable<string> GetDirectories(string d)
@@ -44,14 +44,14 @@ Target(
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier --check .");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, DependsOn(Format), () => Run("dotnet", "restore"));
Target(Restore, [Format], () => Run("dotnet", "restore"));
Target(
Build,
DependsOn(Restore),
[Restore],
() =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
@@ -60,8 +60,8 @@ Target(
Target(
Test,
DependsOn(Build),
ForEach("net8.0", "net462"),
[Build],
["net8.0", "net48"],
framework =>
{
IEnumerable<string> GetFiles(string d)
@@ -69,7 +69,7 @@ Target(
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net462")
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
{
return;
}
@@ -83,13 +83,13 @@ Target(
Target(
Publish,
DependsOn(Test),
[Test],
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
}
);
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
Target("default", [Publish], () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);

View File

@@ -1,14 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
<PackageReference Include="SimpleExec" />
</ItemGroup>
</Project>

View File

@@ -4,9 +4,9 @@
"net8.0": {
"Bullseye": {
"type": "Direct",
"requested": "[5.0.0, )",
"resolved": "5.0.0",
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
"requested": "[6.0.0, )",
"resolved": "6.0.0",
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
},
"Glob": {
"type": "Direct",

View File

@@ -53,7 +53,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
if (!stream.CanSeek || !stream.CanRead)
{
throw new ArgumentException("Archive streams must be Readable and Seekable");
throw new ArchiveException("Archive streams must be Readable and Seekable");
}
return stream;
}

View File

@@ -151,7 +151,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException(
throw new ArchiveException(
"Streams must be readable and seekable to use the Writing Archive API"
);
}

View File

@@ -122,10 +122,7 @@ public static class ArchiveFactory
)
{
using var archive = Open(sourceArchive);
foreach (var entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)

View File

@@ -162,7 +162,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
if (Entries.Any())
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
@@ -176,7 +176,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
if (Entries.Count > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))

View File

@@ -17,7 +17,7 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream();
return Parts.Single().GetCompressedStream().NotNull();
}
#region IArchiveEntry Members

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -18,10 +19,8 @@ public static class IArchiveExtensions
ExtractionOptions? options = null
)
{
foreach (var entry in archive.Entries.Where(x => !x.IsDirectory))
{
entry.WriteToDirectory(destinationDirectory, options);
}
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
/// <summary>

View File

@@ -67,6 +67,16 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
protected override IReader CreateReaderForSolidExtraction()
{
if (this.IsMultipartVolume())
{
var streams = Volumes.Select(volume =>
{
volume.Stream.Position = 0;
return volume.Stream;
});
return RarReader.Open(streams, ReaderOptions);
}
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);

View File

@@ -42,7 +42,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
{
CheckIncomplete();
return BitConverter.ToUInt32(
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc,
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
0
);
}

View File

@@ -10,7 +10,7 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
#region IArchiveEntry Members

View File

@@ -9,7 +9,7 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
#region IArchiveEntry Members

View File

@@ -0,0 +1,60 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc
{
public class ArcEntry : Entry
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -0,0 +1,76 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Arc
{
public class ArcEntryHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
{
this.ArchiveEncoding = archiveEncoding;
}
public ArcEntryHeader? ReadHeader(Stream stream)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader LoadFrom(byte[] headerBytes)
{
CompressionMethod = GetCompressionType(headerBytes[1]);
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.RLE90,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
}
}

View File

@@ -0,0 +1,75 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc
{
public class ArcFilePart : FilePart
{
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.RLE90:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
);
break;
case CompressionType.Squeezed:
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,
true
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arc
{
public class ArcVolume : Volume
{
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}
}

View File

@@ -1,9 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ArchiveException : Exception
{
public ArchiveException(string message)
: base(message) { }
}

View File

@@ -7,4 +7,5 @@ public enum ArchiveType
Tar,
SevenZip,
GZip,
Arc,
}

View File

@@ -22,4 +22,10 @@ public enum CompressionType
Reduce3,
Reduce4,
Explode,
Squeezed,
RLE90,
Crunched,
Squashed,
Crushed,
Distilled,
}

View File

@@ -1,9 +0,0 @@
using System;
namespace SharpCompress.Common;
public class CryptographicException : Exception
{
public CryptographicException(string message)
: base(message) { }
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ExtractionException : Exception
{
public ExtractionException(string message)
: base(message) { }
public ExtractionException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -11,7 +11,7 @@ public abstract class FilePart
internal abstract string? FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}

View File

@@ -1,7 +0,0 @@
namespace SharpCompress.Common;
public class IncompleteArchiveException : ArchiveException
{
public IncompleteArchiveException(string message)
: base(message) { }
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class InvalidFormatException : ExtractionException
{
public InvalidFormatException(string message)
: base(message) { }
public InvalidFormatException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class MultiVolumeExtractionException : ExtractionException
{
public MultiVolumeExtractionException(string message)
: base(message) { }
public MultiVolumeExtractionException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -1,7 +0,0 @@
namespace SharpCompress.Common;
public class MultipartStreamRequiredException : ExtractionException
{
public MultipartStreamRequiredException(string message)
: base(message) { }
}

View File

@@ -17,7 +17,7 @@ internal class CryptKey5 : ICryptKey
private byte[] _pswCheck = { };
private byte[] _hashKey = { };
public CryptKey5(string password, Rar5CryptoInfo rar5CryptoInfo)
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
{
_password = password ?? "";
_cryptoInfo = rar5CryptoInfo;

View File

@@ -1,8 +1,5 @@
#nullable disable
using System;
using System.Security.Cryptography;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;

View File

@@ -1,9 +1,6 @@
#nullable disable
using System;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.IO;
#if !Rar2017_64bit
@@ -18,7 +15,7 @@ namespace SharpCompress.Common.Rar.Headers;
internal class FileHeader : RarHeader
{
private byte[] _hash;
private byte[]? _hash;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType) { }
@@ -319,6 +316,10 @@ internal class FileHeader : RarHeader
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
if (SubData is null)
{
throw new InvalidFormatException();
}
RecoverySectors =
SubData[8]
+ (SubData[9] << 8)
@@ -340,12 +341,16 @@ internal class FileHeader : RarHeader
if (RemainingHeaderBytes(reader) >= 2)
{
var extendedFlags = reader.ReadUInt16();
FileLastModifiedTime = ProcessExtendedTimeV4(
extendedFlags,
FileLastModifiedTime,
reader,
0
);
if (FileLastModifiedTime is not null)
{
FileLastModifiedTime = ProcessExtendedTimeV4(
extendedFlags,
FileLastModifiedTime,
reader,
0
);
}
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
@@ -377,7 +382,7 @@ internal class FileHeader : RarHeader
var dosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0)
if ((rmode & 4) == 0 && time is not null)
{
time = time.Value.AddSeconds(1);
}
@@ -390,7 +395,11 @@ internal class FileHeader : RarHeader
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
if (time is not null)
{
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
return null;
}
private static string ConvertPathV4(string path)
@@ -406,13 +415,13 @@ internal class FileHeader : RarHeader
return path;
}
public override string ToString() => FileName;
public override string ToString() => FileName ?? "FileHeader";
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
internal byte[] FileCrc
internal byte[]? FileCrc
{
get => _hash;
private set => _hash = value;
@@ -441,22 +450,22 @@ internal class FileHeader : RarHeader
public bool IsRedir => RedirType != 0;
public byte RedirFlags { get; private set; }
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
public string RedirTargetName { get; private set; }
public string? RedirTargetName { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }
internal byte[] R4Salt { get; private set; }
internal Rar5CryptoInfo Rar5CryptoInfo { get; private set; }
internal byte[]? R4Salt { get; private set; }
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }
internal byte[] SubData { get; private set; }
internal string? FileName { get; private set; }
internal byte[]? SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public Stream? PackedStream { get; set; }
public bool IsSplitBefore =>
IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);

View File

@@ -1,5 +1,4 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -21,7 +20,7 @@ internal class RarHeader : IRarHeader
{
return new RarHeader(reader, isRar5, archiveEncoding);
}
catch (EndOfStreamException)
catch (InvalidFormatException)
{
return null;
}

View File

@@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -160,10 +158,15 @@ public class RarHeaderFactory
{
fh.PackedStream = new RarCryptoWrapper(
ms,
fh.R4Salt is null ? fh.Rar5CryptoInfo.Salt : fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(Options.Password!, fh.Rar5CryptoInfo)
: new CryptKey3(Options.Password!)
? fh.Rar5CryptoInfo.NotNull().Salt
: fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(
Options.Password,
fh.Rar5CryptoInfo.NotNull()
)
: new CryptKey3(Options.Password)
);
}
}

View File

@@ -20,7 +20,7 @@ public abstract class RarEntry : Entry
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc, 0);
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc.NotNull(), 0);
/// <summary>
/// The path of the file internal to the Rar Archive.
@@ -68,7 +68,7 @@ public abstract class RarEntry : Entry
public bool IsRedir => FileHeader.IsRedir;
public string RedirTargetName => FileHeader.RedirTargetName;
public string? RedirTargetName => FileHeader.RedirTargetName;
public override string ToString() =>
string.Format(

View File

@@ -62,7 +62,7 @@ public abstract class RarVolume : Volume
if (fh.FileName == "CMT")
{
var buffer = new byte[fh.CompressedSize];
fh.PackedStream.Read(buffer, 0, buffer.Length);
fh.PackedStream.NotNull().ReadFully(buffer);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ReaderCancelledException : Exception
{
public ReaderCancelledException(string message)
: base(message) { }
public ReaderCancelledException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -784,7 +784,7 @@ internal class ArchiveReader
);
break;
default:
throw new InvalidOperationException();
throw new InvalidFormatException();
}
}
}
@@ -843,7 +843,7 @@ internal class ArchiveReader
outStream.ReadExact(data, 0, data.Length);
if (outStream.ReadByte() >= 0)
{
throw new InvalidOperationException("Decoded stream is longer than expected.");
throw new InvalidFormatException("Decoded stream is longer than expected.");
}
dataVector.Add(data);
@@ -854,7 +854,7 @@ internal class ArchiveReader
!= folder._unpackCrc
)
{
throw new InvalidOperationException(
throw new InvalidFormatException(
"Decoded stream does not match expected CRC."
);
}
@@ -996,6 +996,11 @@ internal class ArchiveReader
numFiles,
delegate(int i, uint? attr)
{
// Keep the original attribute value because it could potentially get
// modified in the logic that follows. Some callers of the library may
// find the original value useful.
db._files[i].ExtendedAttrib = attr;
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
// bits of the windows file attributes. This makes use of the fact that
@@ -1458,7 +1463,7 @@ internal class ArchiveReader
#if DEBUG
Log.WriteLine(_db._files[index].Name);
#endif
if (_db._files[index].CrcDefined)
if (_db._files[index].Crc.HasValue)
{
_stream = new CrcCheckStream(_db._files[index].Crc.Value);
}

View File

@@ -8,18 +8,13 @@ internal class CFileItem
{
public long Size { get; internal set; }
public uint? Attrib { get; internal set; }
public uint? ExtendedAttrib { get; internal set; }
public uint? Crc { get; internal set; }
public string Name { get; internal set; }
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }
public bool CrcDefined => Crc != null;
public bool AttribDefined => Attrib != null;
public void SetAttrib(uint attrib) => Attrib = attrib;
public DateTime? CTime { get; internal set; }
public DateTime? ATime { get; internal set; }
public DateTime? MTime { get; internal set; }

View File

@@ -38,5 +38,8 @@ public class SevenZipEntry : Entry
public override int? Attrib =>
FilePart.Header.Attrib.HasValue ? (int?)FilePart.Header.Attrib.Value : null;
public int? ExtendedAttrib =>
FilePart.Header.ExtendedAttrib.HasValue ? (int?)FilePart.Header.ExtendedAttrib.Value : null;
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}

View File

@@ -1,4 +1,3 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
@@ -41,7 +40,7 @@ internal class SevenZipFilePart : FilePart
{
if (!Header.HasStream)
{
throw new InvalidOperationException("File does not have a stream.");
return Stream.Null;
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -86,7 +85,7 @@ internal class SevenZipFilePart : FilePart
K_LZMA or K_LZMA2 => CompressionType.LZMA,
K_PPMD => CompressionType.PPMd,
K_B_ZIP2 => CompressionType.BZip2,
_ => throw new NotImplementedException(),
_ => throw new InvalidFormatException(),
};
}

View File

@@ -0,0 +1,48 @@
using System;
namespace SharpCompress.Common;
public class SharpCompressException : Exception
{
public SharpCompressException() { }
public SharpCompressException(string message)
: base(message) { }
public SharpCompressException(string message, Exception inner)
: base(message, inner) { }
}
public class ArchiveException(string message) : SharpCompressException(message);
public class IncompleteArchiveException(string message) : ArchiveException(message);
public class CryptographicException(string message) : SharpCompressException(message);
public class ReaderCancelledException(string message) : SharpCompressException(message);
public class ExtractionException : SharpCompressException
{
public ExtractionException() { }
public ExtractionException(string message)
: base(message) { }
public ExtractionException(string message, Exception inner)
: base(message, inner) { }
}
public class MultipartStreamRequiredException(string message) : ExtractionException(message);
public class MultiVolumeExtractionException(string message) : ExtractionException(message);
public class InvalidFormatException : ExtractionException
{
public InvalidFormatException() { }
public InvalidFormatException(string message)
: base(message) { }
public InvalidFormatException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -216,7 +216,7 @@ internal sealed class TarHeader
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
{
throw new InvalidOperationException("Buffer is invalid size");
throw new InvalidFormatException("Buffer is invalid size");
}
return buffer;
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
@@ -8,18 +7,13 @@ internal class SeekableZipFilePart : ZipFilePart
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly DirectoryEntryHeader _directoryEntryHeader;
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
DirectoryEntryHeader header,
Stream stream
)
: base(header, stream)
{
_headerFactory = headerFactory;
_directoryEntryHeader = header;
}
: base(header, stream) => _headerFactory = headerFactory;
internal override Stream GetCompressedStream()
{
@@ -44,16 +38,6 @@ internal class SeekableZipFilePart : ZipFilePart
{
BaseStream.Position = Header.DataStartPosition.NotNull();
if (
(Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& _directoryEntryHeader.HasData
&& (_directoryEntryHeader.CompressedSize != 0)
)
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;
}
}

View File

@@ -149,6 +149,12 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
{
throw new InvalidOperationException();
}
if (FlagUtility.HasFlag(localEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
localEntryHeader.Crc = directoryEntryHeader.Crc;
localEntryHeader.CompressedSize = directoryEntryHeader.CompressedSize;
localEntryHeader.UncompressedSize = directoryEntryHeader.UncompressedSize;
}
return localEntryHeader;
}
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Net.Sockets;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;

View File

@@ -0,0 +1,199 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
public partial class ArcLzwStream : Stream
{
private Stream _stream;
private bool _processed;
private bool _useCrunched;
private int _compressedSize;
private const int BITS = 12;
private const int CRUNCH_BITS = 12;
private const int SQUASH_BITS = 13;
private const int INIT_BITS = 9;
private const ushort FIRST = 257;
private const ushort CLEAR = 256;
private ushort oldcode;
private byte finchar;
private int n_bits;
private ushort maxcode;
private ushort[] prefix = new ushort[8191];
private byte[] suffix = new byte[8191];
private bool clearFlag;
private Stack<byte> stack = new Stack<byte>();
private ushort freeEnt;
private ushort maxcodemax;
public ArcLzwStream(Stream stream, int compressedSize, bool useCrunched = true)
{
_stream = stream;
_useCrunched = useCrunched;
_compressedSize = compressedSize;
oldcode = 0;
finchar = 0;
n_bits = 0;
maxcode = 0;
clearFlag = false;
freeEnt = FIRST;
maxcodemax = 0;
}
private ushort? GetCode(BitReader reader)
{
if (clearFlag || freeEnt > maxcode)
{
if (freeEnt > maxcode)
{
n_bits++;
maxcode = (n_bits == BITS) ? maxcodemax : (ushort)((1 << n_bits) - 1);
}
if (clearFlag)
{
clearFlag = false;
n_bits = INIT_BITS;
maxcode = (ushort)((1 << n_bits) - 1);
}
}
return (ushort?)reader.ReadBits(n_bits);
}
public List<byte> Decompress(byte[] input, bool useCrunched)
{
var result = new List<byte>();
int bits = useCrunched ? CRUNCH_BITS : SQUASH_BITS;
if (useCrunched)
{
if (input[0] != BITS)
{
throw new InvalidDataException($"File packed with {input[0]}, expected {BITS}.");
}
input = input.Skip(1).ToArray();
}
maxcodemax = (ushort)(1 << bits);
clearFlag = false;
n_bits = INIT_BITS;
maxcode = (ushort)((1 << n_bits) - 1);
for (int i = 0; i < 256; i++)
{
suffix[i] = (byte)i;
}
var reader = new BitReader(input);
freeEnt = FIRST;
if (GetCode(reader) is ushort old)
{
oldcode = old;
finchar = (byte)oldcode;
result.Add(finchar);
}
while (GetCode(reader) is ushort code)
{
if (code == CLEAR)
{
Array.Clear(prefix, 0, prefix.Length);
clearFlag = true;
freeEnt = (ushort)(FIRST - 1);
if (GetCode(reader) is ushort c)
{
code = c;
}
else
{
break;
}
}
ushort incode = code;
if (code >= freeEnt)
{
stack.Push(finchar);
code = oldcode;
}
while (code >= 256)
{
stack.Push(suffix[code]);
code = prefix[code];
}
finchar = suffix[code];
stack.Push(finchar);
while (stack.Count > 0)
{
result.Add(stack.Pop());
}
code = freeEnt;
if (code < maxcodemax)
{
prefix[code] = oldcode;
suffix[code] = finchar;
freeEnt = (ushort)(code + 1);
}
oldcode = incode;
}
return result;
}
// Stream base class implementation
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotImplementedException();
public override long Position
{
get => _stream.Position;
set => throw new NotImplementedException();
}
public override void Flush() => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_processed)
{
return 0;
}
_processed = true;
var data = new byte[_compressedSize];
_stream.Read(data, 0, _compressedSize);
var decoded = Decompress(data, _useCrunched);
var result = decoded.Count();
if (_useCrunched)
{
var unpacked = RLE.UnpackRLE(decoded.ToArray());
unpacked.CopyTo(buffer, 0);
result = unpacked.Count;
}
else
{
decoded.CopyTo(buffer, 0);
}
return result;
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}

View File

@@ -0,0 +1,56 @@
using System;
public partial class ArcLzwStream
{
public class BitReader
{
private readonly byte[] data;
private int bitPosition;
private int bytePosition;
public BitReader(byte[] inputData)
{
data = inputData;
bitPosition = 0;
bytePosition = 0;
}
public int? ReadBits(int bitCount)
{
if (bitCount <= 0 || bitCount > 16)
throw new ArgumentOutOfRangeException(
nameof(bitCount),
"Bit count must be between 1 and 16"
);
if (bytePosition >= data.Length)
return null;
int result = 0;
int bitsRead = 0;
while (bitsRead < bitCount)
{
if (bytePosition >= data.Length)
return null;
int bitsAvailable = 8 - bitPosition;
int bitsToRead = Math.Min(bitCount - bitsRead, bitsAvailable);
int mask = (1 << bitsToRead) - 1;
result |= ((data[bytePosition] >> bitPosition) & mask) << bitsRead;
bitPosition += bitsToRead;
bitsRead += bitsToRead;
if (bitPosition >= 8)
{
bitPosition = 0;
bytePosition++;
}
}
return (ushort)result;
}
}
}

View File

@@ -542,9 +542,14 @@ internal class CBZip2InputStream : Stream
{
j++;
}
selectorMtf[i] = (char)j;
if (i < BZip2Constants.MAX_SELECTORS)
{
selectorMtf[i] = (char)j;
}
}
nSelectors = Math.Min(nSelectors, BZip2Constants.MAX_SELECTORS);
/* Undo the MTF values for the selectors. */
{
var pos = new char[BZip2Constants.N_GROUPS];

View File

@@ -62,8 +62,8 @@
//
// -----------------------------------------------------------------------
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Deflate;
@@ -177,7 +177,7 @@ public enum CompressionStrategy
/// <summary>
/// A general purpose exception class for exceptions in the Zlib library.
/// </summary>
public class ZlibException : Exception
public class ZlibException : SharpCompressException
{
/// <summary>
/// The ZlibException class captures exception information generated

View File

@@ -60,7 +60,6 @@
//
// -----------------------------------------------------------------------
namespace SharpCompress.Compressors.Deflate;
/// <summary>

View File

@@ -8,6 +8,7 @@ using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
namespace SharpCompress.Compressors.Deflate64;
@@ -151,7 +152,7 @@ public sealed class Deflate64Stream : Stream
{
// The stream is either malicious or poorly implemented and returned a number of
// bytes larger than the buffer supplied to it.
throw new InvalidDataException("Deflate64: invalid data");
throw new InvalidFormatException("Deflate64: invalid data");
}
_inflater.SetInput(_buffer, 0, bytes);

View File

@@ -243,7 +243,6 @@ internal static class FastEncoderStatics
// cache locality, fewer memory operations.
//
// Encoding information for literal and Length.
// The least 5 significant bits are the length
// and the rest is the code bits.

View File

@@ -4,7 +4,7 @@
using System;
using System.Diagnostics;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Deflate64;
@@ -192,7 +192,7 @@ internal sealed class HuffmanTree
var increment = 1 << len;
if (start >= increment)
{
throw new InvalidDataException("Deflate64: invalid Huffman data");
throw new InvalidFormatException("Deflate64: invalid Huffman data");
}
// Note the bits in the table are reverted.
@@ -234,7 +234,7 @@ internal sealed class HuffmanTree
if (value > 0)
{
// prevent an IndexOutOfRangeException from array[index]
throw new InvalidDataException("Deflate64: invalid Huffman data");
throw new InvalidFormatException("Deflate64: invalid Huffman data");
}
Debug.Assert(
@@ -307,7 +307,7 @@ internal sealed class HuffmanTree
// huffman code lengths must be at least 1 bit long
if (codeLength <= 0)
{
throw new InvalidDataException("Deflate64: invalid Huffman data");
throw new InvalidFormatException("Deflate64: invalid Huffman data");
}
//

View File

@@ -30,7 +30,7 @@
using System;
using System.Diagnostics;
using System.IO;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Compressors.Deflate64;
@@ -385,7 +385,7 @@ internal sealed class InflaterManaged
}
else
{
throw new InvalidDataException("Deflate64: unknown block type");
throw new ZlibException("Deflate64: unknown block type");
}
}
@@ -411,7 +411,7 @@ internal sealed class InflaterManaged
}
else
{
throw new InvalidDataException("Deflate64: unknown block type");
throw new ZlibException("Deflate64: unknown block type");
}
//
@@ -473,7 +473,7 @@ internal sealed class InflaterManaged
// make sure complement matches
if ((ushort)_blockLength != (ushort)(~blockLengthComplement))
{
throw new InvalidDataException("Deflate64: invalid block length");
throw new ZlibException("Deflate64: invalid block length");
}
}
@@ -507,7 +507,7 @@ internal sealed class InflaterManaged
default:
Debug. /*Fail*/
Assert(false, "check why we are here!");
throw new InvalidDataException("Deflate64: unknown state");
throw new ZlibException("Deflate64: unknown state");
}
}
}
@@ -569,7 +569,7 @@ internal sealed class InflaterManaged
{
if (symbol < 0 || symbol >= S_EXTRA_LENGTH_BITS.Length)
{
throw new InvalidDataException("Deflate64: invalid data");
throw new ZlibException("Deflate64: invalid data");
}
_extraBits = S_EXTRA_LENGTH_BITS[symbol];
Debug.Assert(_extraBits != 0, "We handle other cases separately!");
@@ -591,7 +591,7 @@ internal sealed class InflaterManaged
if (_length < 0 || _length >= S_LENGTH_BASE.Length)
{
throw new InvalidDataException("Deflate64: invalid data");
throw new ZlibException("Deflate64: invalid data");
}
_length = S_LENGTH_BASE[_length] + bits;
}
@@ -649,7 +649,7 @@ internal sealed class InflaterManaged
default:
Debug. /*Fail*/
Assert(false, "check why we are here!");
throw new InvalidDataException("Deflate64: unknown state");
throw new ZlibException("Deflate64: unknown state");
}
}
@@ -781,7 +781,7 @@ internal sealed class InflaterManaged
if (_loopCounter == 0)
{
// can't have "prev code" on first code
throw new InvalidDataException();
throw new ZlibException();
}
var previousCode = _codeList[_loopCounter - 1];
@@ -789,7 +789,7 @@ internal sealed class InflaterManaged
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
throw new ZlibException();
}
for (var j = 0; j < repeatCount; j++)
@@ -809,7 +809,7 @@ internal sealed class InflaterManaged
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
throw new ZlibException();
}
for (var j = 0; j < repeatCount; j++)
@@ -830,7 +830,7 @@ internal sealed class InflaterManaged
if (_loopCounter + repeatCount > _codeArraySize)
{
throw new InvalidDataException();
throw new ZlibException();
}
for (var j = 0; j < repeatCount; j++)
@@ -846,7 +846,7 @@ internal sealed class InflaterManaged
default:
Debug. /*Fail*/
Assert(false, "check why we are here!");
throw new InvalidDataException("Deflate64: unknown state");
throw new ZlibException("Deflate64: unknown state");
}
var literalTreeCodeLength = new byte[HuffmanTree.MAX_LITERAL_TREE_ELEMENTS];
@@ -865,7 +865,7 @@ internal sealed class InflaterManaged
// Make sure there is an end-of-block code, otherwise how could we ever end?
if (literalTreeCodeLength[HuffmanTree.END_OF_BLOCK_CODE] == 0)
{
throw new InvalidDataException();
throw new ZlibException();
}
_literalLengthTree = new HuffmanTree(literalTreeCodeLength);

View File

@@ -5,8 +5,8 @@
*/
using System;
using System.IO;
using System.Runtime.CompilerServices;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Filters;
@@ -244,7 +244,7 @@ public sealed class BranchExecFilter
long size = data.Length;
if (size < 16)
{
throw new InvalidDataException("Unexpected data size");
throw new InvalidFormatException("Unexpected data size");
}
size -= 16;

View File

@@ -1,4 +1,3 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Filters

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -46,7 +47,7 @@ internal static class DecoderStreamHelper
}
}
throw new InvalidOperationException("Could not link output stream to coder.");
throw new InvalidFormatException("Could not link output stream to coder.");
}
private static void FindPrimaryOutStreamIndex(
@@ -75,7 +76,7 @@ internal static class DecoderStreamHelper
{
if (foundPrimaryOutStream)
{
throw new NotSupportedException("Multiple output streams.");
throw new InvalidFormatException("Multiple output streams.");
}
foundPrimaryOutStream = true;
@@ -87,7 +88,7 @@ internal static class DecoderStreamHelper
if (!foundPrimaryOutStream)
{
throw new NotSupportedException("No output stream.");
throw new InvalidFormatException("No output stream.");
}
}

View File

@@ -85,28 +85,56 @@ internal class OutWindow
_streamPos = _pos;
}
public void CopyBlock(int distance, int len)
public void CopyPending()
{
var size = len;
var pos = _pos - distance - 1;
if (pos < 0)
if (_pendingLen < 1)
{
pos += _windowSize;
return;
}
for (; size > 0 && _pos < _windowSize && _total < _limit; size--)
var rem = _pendingLen;
var pos = (_pendingDist < _pos ? _pos : _pos + _windowSize) - _pendingDist - 1;
while (rem > 0 && HasSpace)
{
if (pos >= _windowSize)
{
pos = 0;
}
_buffer[_pos++] = _buffer[pos++];
_total++;
PutByte(_buffer[pos++]);
rem--;
}
_pendingLen = rem;
}
public void CopyBlock(int distance, int len)
{
var rem = len;
var pos = (distance < _pos ? _pos : _pos + _windowSize) - distance - 1;
var targetSize = HasSpace ? (int)Math.Min(rem, _limit - _total) : 0;
var sizeUntilWindowEnd = Math.Min(_windowSize - _pos, _windowSize - pos);
var sizeUntilOverlap = Math.Abs(pos - _pos);
var fastSize = Math.Min(Math.Min(sizeUntilWindowEnd, sizeUntilOverlap), targetSize);
if (fastSize >= 2)
{
_buffer.AsSpan(pos, fastSize).CopyTo(_buffer.AsSpan(_pos, fastSize));
_pos += fastSize;
pos += fastSize;
_total += fastSize;
if (_pos >= _windowSize)
{
Flush();
}
rem -= fastSize;
}
_pendingLen = size;
while (rem > 0 && HasSpace)
{
if (pos >= _windowSize)
{
pos = 0;
}
PutByte(_buffer[pos++]);
rem--;
}
_pendingLen = rem;
_pendingDist = distance;
}
@@ -188,12 +216,23 @@ internal class OutWindow
return size;
}
public void CopyPending()
public int ReadByte()
{
if (_pendingLen > 0)
if (_streamPos >= _pos)
{
CopyBlock(_pendingDist, _pendingLen);
return -1;
}
int value = _buffer[_streamPos];
_streamPos++;
if (_streamPos >= _windowSize)
{
_pos = 0;
_streamPos = 0;
}
return value;
}
public int AvailableBytes => _pos - _streamPos;

View File

@@ -1,6 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -32,7 +33,7 @@ public sealed class LZipStream : Stream
var dSize = ValidateAndReadSize(stream);
if (dSize == 0)
{
throw new IOException("Not an LZip stream");
throw new InvalidFormatException("Not an LZip stream");
}
var properties = GetProperties(dSize);
_stream = new LzmaStream(properties, stream);
@@ -167,11 +168,6 @@ public sealed class LZipStream : Stream
/// </summary>
public static int ValidateAndReadSize(Stream stream)
{
if (stream is null)
{
throw new ArgumentNullException(nameof(stream));
}
// Read the header
Span<byte> header = stackalloc byte[6];
var n = stream.Read(header);
@@ -198,33 +194,25 @@ public sealed class LZipStream : Stream
return (1 << basePower) - (subtractionNumerator * (1 << (basePower - 4)));
}
private static readonly byte[] headerBytes = new byte[6]
{
private static readonly byte[] headerBytes =
[
(byte)'L',
(byte)'Z',
(byte)'I',
(byte)'P',
1,
113,
};
public static void WriteHeaderSize(Stream stream)
{
if (stream is null)
{
throw new ArgumentNullException(nameof(stream));
}
];
public static void WriteHeaderSize(Stream stream) =>
// hard coding the dictionary size encoding
stream.Write(headerBytes, 0, 6);
}
/// <summary>
/// Creates a byte array to communicate the parameters and dictionary size to LzmaStream.
/// </summary>
private static byte[] GetProperties(int dictionarySize) =>
new byte[]
{
[
// Parameters as per http://www.nongnu.org/lzip/manual/lzip_manual.html#Stream-format
// but encoded as a single byte in the format LzmaStream expects.
// literal_context_bits = 3
@@ -236,5 +224,5 @@ public sealed class LZipStream : Stream
(byte)((dictionarySize >> 8) & 0xff),
(byte)((dictionarySize >> 16) & 0xff),
(byte)((dictionarySize >> 24) & 0xff),
};
];
}

View File

@@ -2,6 +2,7 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -1611,7 +1612,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
{
if (_nowPos64 > 0)
{
throw new InvalidOperationException();
throw new InvalidFormatException();
}
_trainSize = (uint)trainStream.Length;
if (_trainSize > 0)

View File

@@ -247,6 +247,82 @@ public class LzmaStream : Stream
return total;
}
public override int ReadByte()
{
if (_endReached)
{
return -1;
}
if (_availableBytes == 0)
{
if (_isLzma2)
{
DecodeChunkHeader();
}
else
{
_endReached = true;
}
}
if (_endReached)
{
if (_inputSize >= 0 && _inputPosition != _inputSize)
{
throw new DataErrorException();
}
if (_outputSize >= 0 && _position != _outputSize)
{
throw new DataErrorException();
}
return -1;
}
_outWindow.SetLimit(1);
if (_uncompressedChunk)
{
_inputPosition += _outWindow.CopyStream(_inputStream, 1);
}
else if (_decoder.Code(_dictionarySize, _outWindow, _rangeDecoder) && _outputSize < 0)
{
_availableBytes = _outWindow.AvailableBytes;
}
var value = _outWindow.ReadByte();
_position++;
_availableBytes--;
if (_availableBytes == 0 && !_uncompressedChunk)
{
// Check range corruption scenario
if (
!_rangeDecoder.IsFinished
|| (_rangeDecoderLimit >= 0 && _rangeDecoder._total != _rangeDecoderLimit)
)
{
// Stream might have End Of Stream marker
_outWindow.SetLimit(2);
if (!_decoder.Code(_dictionarySize, _outWindow, _rangeDecoder))
{
_rangeDecoder.ReleaseStream();
throw new DataErrorException();
}
}
_rangeDecoder.ReleaseStream();
_inputPosition += _rangeDecoder._total;
if (_outWindow.HasPending)
{
throw new DataErrorException();
}
}
return value;
}
private void DecodeChunkHeader()
{
var control = _inputStream.ReadByte();

View File

@@ -1,6 +1,7 @@
#nullable disable
using System.IO;
using System.Runtime.CompilerServices;
namespace SharpCompress.Compressors.LZMA.RangeCoder;
@@ -152,6 +153,7 @@ internal class Decoder
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Normalize2()
{
if (_range < K_TOP_VALUE)

View File

@@ -107,24 +107,13 @@ internal struct BitDecoder
{
rangeDecoder._range = newBound;
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code =
(rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}
rangeDecoder.Normalize2();
return 0;
}
rangeDecoder._range -= newBound;
rangeDecoder._code -= newBound;
_prob -= (_prob) >> K_NUM_MOVE_BITS;
if (rangeDecoder._range < Decoder.K_TOP_VALUE)
{
rangeDecoder._code = (rangeDecoder._code << 8) | (byte)rangeDecoder._stream.ReadByte();
rangeDecoder._range <<= 8;
rangeDecoder._total++;
}
rangeDecoder.Normalize2();
return 1;
}
}

View File

@@ -1,7 +1,6 @@
#region Using
#endregion
namespace SharpCompress.Compressors.PPMd.I1;

View File

@@ -1,7 +1,6 @@
#region Using
#endregion
namespace SharpCompress.Compressors.PPMd.I1;

View File

@@ -0,0 +1,52 @@
using System.Collections.Generic;
using System.Linq;
namespace SharpCompress.Compressors.RLE90
{
public static class RLE
{
private const byte DLE = 0x90;
/// <summary>
/// Unpacks an RLE compressed buffer.
/// Format: <char> DLE <count>, where count == 0 -> DLE
/// </summary>
/// <param name="compressedBuffer">The compressed buffer to unpack.</param>
/// <returns>A list of unpacked bytes.</returns>
public static List<byte> UnpackRLE(byte[] compressedBuffer)
{
var result = new List<byte>(compressedBuffer.Length * 2); // Optimized initial capacity
var countMode = false;
byte last = 0;
foreach (var c in compressedBuffer)
{
if (!countMode)
{
if (c == DLE)
{
countMode = true;
}
else
{
result.Add(c);
last = c;
}
}
else
{
countMode = false;
if (c == 0)
{
result.Add(DLE);
}
else
{
result.AddRange(Enumerable.Repeat(last, c - 1));
}
}
}
return result;
}
}
}

View File

@@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.RLE90
{
public class RunLength90Stream : Stream
{
private readonly Stream _stream;
private const byte DLE = 0x90;
private int _compressedSize;
private bool _processed = false;
public RunLength90Stream(Stream stream, int compressedSize)
{
_stream = stream;
_compressedSize = compressedSize;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotImplementedException();
public override long Position
{
get => _stream.Position;
set => throw new NotImplementedException();
}
public override void Flush() => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_processed)
{
return 0;
}
_processed = true;
using var binaryReader = new BinaryReader(_stream);
byte[] compressedBuffer = binaryReader.ReadBytes(_compressedSize);
var unpacked = RLE.UnpackRLE(compressedBuffer);
unpacked.CopyTo(buffer);
return unpacked.Count;
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
@@ -93,7 +92,7 @@ internal class RarBLAKE2spStream : RarStream
{
this.readStream = readStream;
disableCRCCheck = fileHeader.IsEncrypted;
_hash = fileHeader.FileCrc;
_hash = fileHeader.FileCrc.NotNull();
_blake2sp = new BLAKE2SP();
ResetCrc();
}

View File

@@ -263,7 +263,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
UnpWriteBuf();
if (destUnpSize <= 0)
if (destUnpSize < 0)
{
return;
}
@@ -713,7 +713,9 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
private void UnpWriteData(byte[] data, int offset, int size)
{
if (destUnpSize <= 0)
// allow destUnpSize == 0 here to ensure that 0 size writes
// go through RarStream's Write so that Suspended is set correctly
if (destUnpSize < 0)
{
return;
}

View File

@@ -14,7 +14,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private readonly MultDecode[] MD = new MultDecode[4];
private readonly MultDecode[] MD = new[]
{
new MultDecode(),
new MultDecode(),
new MultDecode(),
new MultDecode(),
};
private readonly byte[] UnpOldTable20 = new byte[PackDef.MC20 * 4];
@@ -369,7 +375,7 @@ internal partial class Unpack
destUnpSize -= Length;
var DestPtr = unpPtr - Distance;
if (DestPtr < PackDef.MAXWINSIZE - 300 && unpPtr < PackDef.MAXWINSIZE - 300)
if (DestPtr >= 0 && DestPtr < PackDef.MAXWINSIZE - 300 && unpPtr < PackDef.MAXWINSIZE - 300)
{
window[unpPtr++] = window[DestPtr++];
window[unpPtr++] = window[DestPtr++];
@@ -475,6 +481,31 @@ internal partial class Unpack
{
Table[I++] = 0;
}
// Nanook. Working port from Rar C code. Added when working on Audio Decode Fix. Seems equal to above, so commented it
//byte v;
//if (Number == 16)
//{
// N = (Utility.URShift(GetBits(), 14)) + 3;
// AddBits(2);
// v = Table[I - 1];
//}
//else
//{
// N = (Number - 17) * 4;
// int bits = 3 + N;
// N += N + 3 + (Utility.URShift(GetBits(), 16 - bits));
// AddBits(bits);
// v = 0;
//}
//N += I;
//if (N > TableSize)
//{
// N = TableSize; // original unRAR
//}
//do
//{
// Table[I++] = v;
//} while (I < N);
}
}
if (inAddr > readTop)
@@ -559,8 +590,7 @@ internal partial class Unpack
PCh = (Utility.URShift(PCh, 3)) & 0xFF;
var Ch = PCh - Delta;
var D = ((byte)Delta) << 3;
var D = ((sbyte)Delta) << 3;
v.Dif[0] += Math.Abs(D); // V->Dif[0]+=abs(D);
v.Dif[1] += Math.Abs(D - v.D1); // V->Dif[1]+=abs(D-V->D1);
@@ -574,7 +604,7 @@ internal partial class Unpack
v.Dif[9] += Math.Abs(D - UnpChannelDelta); // V->Dif[9]+=abs(D-UnpChannelDelta);
v.Dif[10] += Math.Abs(D + UnpChannelDelta); // V->Dif[10]+=abs(D+UnpChannelDelta);
v.LastDelta = (byte)(Ch - v.LastChar);
v.LastDelta = (sbyte)(Ch - v.LastChar);
UnpChannelDelta = v.LastDelta;
v.LastChar = Ch; // V->LastChar=Ch;

View File

@@ -622,7 +622,6 @@ internal partial class Unpack
// WriteBorder=WrPtr;
// }
// unused
//x byte* ApplyFilter(byte *Data,uint DataSize,UnpackFilter *Flt)
// byte[] ApplyFilter(byte []Data, uint DataSize, UnpackFilter Flt)

View File

@@ -36,7 +36,6 @@ internal static class PackDef
// CODE_ENDFILE, CODE_FILTER, CODE_FILTERDATA
// };
//enum FilterType {
// These values must not be changed, because we use them directly
// in RAR5 compression and decompression code.

View File

@@ -7,7 +7,6 @@ using nuint = System.UInt64;
using size_t = System.UInt64;
#endif*/
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
@@ -69,7 +68,6 @@ internal partial class Unpack
//#endif
//}
//#if defined(LITTLE_ENDIAN) && defined(ALLOW_MISALIGNED)
//#define USE_MEM_BYTESWAP
//#endif
@@ -86,7 +84,6 @@ internal partial class Unpack
//#endif
//}
// Save integer to memory as big endian.
//inline void RawPutBE4(uint32 i,byte *mem)
//{
@@ -102,7 +99,6 @@ internal partial class Unpack
//#endif
//}
//inline uint32 ByteSwap32(uint32 i)
//{
//#ifdef _MSC_VER

View File

@@ -9,7 +9,6 @@ using nuint = System.UInt64;
using size_t = System.UInt64;
#endif*/
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack

View File

@@ -3,7 +3,6 @@
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
using int64 = System.Int64;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else

View File

@@ -140,7 +140,6 @@ internal sealed partial class Unpack : BitInput
// sharpcompress: don't need this, freshly allocated above
//memset(NewWindow,0,WinSize);
// If Window is not NULL, it means that window size has grown.
// In solid streams we need to copy data to a new window in such case.
// RAR archiving code does not allow it in solid streams now,

View File

@@ -7,7 +7,6 @@ using nuint = System.UInt64;
using size_t = System.UInt64;
#endif*/
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack

View File

@@ -159,7 +159,6 @@ if (Decoded!=NULL)
};
#endif*/
//struct UnpackFilter
internal class UnpackFilter
{

View File

@@ -1,9 +1,3 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Shrink
{
internal class BitStream

View File

@@ -1,9 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Shrink;

View File

@@ -0,0 +1,48 @@
using System;
using System.IO;
public class BitReader
{
private readonly Stream _stream;
private int _bitBuffer;
private int _bitCount;
public BitReader(Stream stream)
{
_stream = stream;
_bitBuffer = 0;
_bitCount = 0;
}
public bool ReadBit()
{
if (_bitCount == 0)
{
int nextByte = _stream.ReadByte();
if (nextByte == -1)
throw new EndOfStreamException();
_bitBuffer = nextByte;
_bitCount = 8;
}
bool bit = (_bitBuffer & 1) != 0;
_bitBuffer >>= 1;
_bitCount--;
return bit;
}
public int ReadBits(int count)
{
if (count < 1 || count > 32)
{
throw new ArgumentOutOfRangeException(nameof(count), "Count must be between 1 and 32.");
}
int value = 0;
for (int i = 0; i < count; i++)
{
value = (value << 1) | (ReadBit() ? 1 : 0);
}
return value;
}
}

View File

@@ -0,0 +1,114 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.RLE90;
using ZstdSharp.Unsafe;
namespace SharpCompress.Compressors.Squeezed
{
public class SqueezeStream : Stream
{
private readonly Stream _stream;
private readonly int _compressedSize;
private const int NUMVALS = 257;
private const int SPEOF = 256;
private bool _processed = false;
public SqueezeStream(Stream stream, int compressedSize)
{
_stream = stream;
_compressedSize = compressedSize;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotImplementedException();
public override long Position
{
get => _stream.Position;
set => throw new NotImplementedException();
}
public override void Flush() => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_processed)
{
return 0;
}
_processed = true;
using var binaryReader = new BinaryReader(_stream);
// Read numnodes (equivalent to convert_u16!(numnodes, buf))
var numnodes = binaryReader.ReadUInt16();
// Validation: numnodes should be within bounds
if (numnodes >= NUMVALS)
{
throw new InvalidDataException(
$"Invalid number of nodes {numnodes} (max {NUMVALS - 1})"
);
}
// Handle the case where no nodes exist
if (numnodes == 0)
{
return 0;
}
// Build dnode (tree of nodes)
var dnode = new int[numnodes, 2];
for (int j = 0; j < numnodes; j++)
{
dnode[j, 0] = binaryReader.ReadInt16();
dnode[j, 1] = binaryReader.ReadInt16();
}
// Initialize BitReader for reading bits
var bitReader = new BitReader(_stream);
var decoded = new List<byte>();
int i = 0;
// Decode the buffer using the dnode tree
while (true)
{
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
if (i < 0)
{
i = (short)-(i + 1);
if (i == SPEOF)
{
break;
}
else
{
decoded.Add((byte)i);
i = 0;
}
}
}
// Unpack the decoded buffer using the RLE class
var unpacked = RLE.UnpackRLE(decoded.ToArray());
unpacked.CopyTo(buffer, 0);
return unpacked.Count();
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}
}

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -25,19 +26,19 @@ public class ArmFilter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM properties unexpected length");
throw new InvalidFormatException("ARM properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM properties offset is not supported");
throw new InvalidFormatException("ARM properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARM_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -25,19 +26,19 @@ public class ArmThumbFilter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("ARM Thumb properties unexpected length");
throw new InvalidFormatException("ARM Thumb properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("ARM Thumb properties offset is not supported");
throw new InvalidFormatException("ARM Thumb properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_ARMTHUMB_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -49,7 +50,7 @@ public abstract class BlockFilter : ReadOnlyStream
var sizeOfProperties = reader.ReadXZInteger();
if (sizeOfProperties > int.MaxValue)
{
throw new InvalidDataException("Block filter information too large");
throw new InvalidFormatException("Block filter information too large");
}
var properties = reader.ReadBytes((int)sizeOfProperties);

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -25,19 +26,19 @@ public class IA64Filter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("IA64 properties unexpected length");
throw new InvalidFormatException("IA64 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("IA64 properties offset is not supported");
throw new InvalidFormatException("IA64 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_IA64_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -35,14 +36,14 @@ public class Lzma2Filter : BlockFilter
{
if (properties.Length != 1)
{
throw new InvalidDataException("LZMA properties unexpected length");
throw new InvalidFormatException("LZMA properties unexpected length");
}
_dictionarySize = (byte)(properties[0] & 0x3F);
var reserved = properties[0] & 0xC0;
if (reserved != 0)
{
throw new InvalidDataException("Reserved bits used in LZMA properties");
throw new InvalidFormatException("Reserved bits used in LZMA properties");
}
}

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -25,19 +26,19 @@ public class PowerPCFilter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("PPC properties unexpected length");
throw new InvalidFormatException("PPC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("PPC properties offset is not supported");
throw new InvalidFormatException("PPC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_PowerPC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -25,19 +26,19 @@ public class SparcFilter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("SPARC properties unexpected length");
throw new InvalidFormatException("SPARC properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("SPARC properties offset is not supported");
throw new InvalidFormatException("SPARC properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_SPARC_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -5,6 +5,7 @@
*/
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.Compressors.Xz.Filters;
@@ -27,19 +28,19 @@ public class X86Filter : BlockFilter
{
if (properties.Length != 0 && properties.Length != 4)
{
throw new InvalidDataException("X86 properties unexpected length");
throw new InvalidFormatException("X86 properties unexpected length");
}
if (properties.Length == 4)
{
// Even XZ doesn't support it.
throw new InvalidDataException("X86 properties offset is not supported");
throw new InvalidFormatException("X86 properties offset is not supported");
//_offset = BitConverter.ToUInt32(properties, 0);
//
//if (_offset % (UInt32)BranchExec.Alignment.ARCH_x86_ALIGNMENT != 0)
//{
// throw new InvalidDataException("Filter offset does not match alignment");
// throw new InvalidFormatException("Filter offset does not match alignment");
//}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Xz;
@@ -25,13 +26,13 @@ internal static class MultiByteIntegers
{
if (++i >= MaxBytes)
{
throw new InvalidDataException();
throw new InvalidFormatException();
}
LastByte = reader.ReadByte();
if (LastByte == 0)
{
throw new InvalidDataException();
throw new InvalidFormatException();
}
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);

View File

@@ -4,6 +4,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz.Filters;
namespace SharpCompress.Compressors.Xz;
@@ -80,7 +81,7 @@ public sealed class XZBlock : XZReadOnlyStream
BaseStream.Read(paddingBytes, 0, paddingBytes.Length);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
throw new InvalidFormatException("Padding bytes were non-null");
}
}
_paddingSkipped = true;
@@ -145,7 +146,7 @@ public sealed class XZBlock : XZReadOnlyStream
var calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
{
throw new InvalidDataException("Block header corrupt");
throw new InvalidFormatException("Block header corrupt");
}
return blockHeaderWithoutCrc;
@@ -159,7 +160,7 @@ public sealed class XZBlock : XZReadOnlyStream
if (reserved != 0)
{
throw new InvalidDataException(
throw new InvalidFormatException(
"Reserved bytes used, perhaps an unknown XZ implementation"
);
}
@@ -189,7 +190,7 @@ public sealed class XZBlock : XZReadOnlyStream
|| (i + 1 < _numFilters && !filter.AllowAsNonLast)
)
{
throw new InvalidDataException("Block Filters in bad order");
throw new InvalidFormatException("Block Filters in bad order");
}
if (filter.ChangesDataSize && i + 1 < _numFilters)
@@ -202,7 +203,7 @@ public sealed class XZBlock : XZReadOnlyStream
}
if (nonLastSizeChangers > 2)
{
throw new InvalidDataException(
throw new InvalidFormatException(
"More than two non-last block filters cannot change stream size"
);
}
@@ -212,7 +213,7 @@ public sealed class XZBlock : XZReadOnlyStream
var blockHeaderPadding = reader.ReadBytes(blockHeaderPaddingSize);
if (!blockHeaderPadding.All(b => b == 0))
{
throw new InvalidDataException("Block header contains unknown fields");
throw new InvalidFormatException("Block header contains unknown fields");
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz;
@@ -35,7 +36,7 @@ public class XZFooter
var myCrc = Crc32.Compute(footerBytes);
if (crc != myCrc)
{
throw new InvalidDataException("Footer corrupt");
throw new InvalidFormatException("Footer corrupt");
}
using (var stream = new MemoryStream(footerBytes))
@@ -47,7 +48,7 @@ public class XZFooter
var magBy = _reader.ReadBytes(2);
if (!magBy.AsSpan().SequenceEqual(_magicBytes))
{
throw new InvalidDataException("Magic footer missing");
throw new InvalidFormatException("Magic footer missing");
}
}
}

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz;
@@ -11,7 +12,7 @@ public class XZHeader
private readonly byte[] MagicHeader = { 0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00 };
public CheckType BlockCheckType { get; private set; }
public int BlockCheckSize => ((((int)BlockCheckType) + 2) / 3) * 4;
public int BlockCheckSize => 4 << ((((int)BlockCheckType + 2) / 3) - 1);
public XZHeader(BinaryReader reader) => _reader = reader;
@@ -37,14 +38,14 @@ public class XZHeader
var calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
{
throw new InvalidDataException("Stream header corrupt");
throw new InvalidFormatException("Stream header corrupt");
}
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
var futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
{
throw new InvalidDataException("Unknown XZ Stream Version");
throw new InvalidFormatException("Unknown XZ Stream Version");
}
}
@@ -52,7 +53,7 @@ public class XZHeader
{
if (!header.SequenceEqual(MagicHeader))
{
throw new InvalidDataException("Invalid XZ Stream");
throw new InvalidFormatException("Invalid XZ Stream");
}
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Xz;
@@ -59,7 +60,7 @@ public class XZIndex
var marker = _reader.ReadByte();
if (marker != 0)
{
throw new InvalidDataException("Not an index block");
throw new InvalidFormatException("Not an index block");
}
}
@@ -71,7 +72,7 @@ public class XZIndex
var paddingBytes = _reader.ReadBytes(4 - bytes);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
throw new InvalidFormatException("Padding bytes were non-null");
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Xz;
@@ -9,7 +10,7 @@ public abstract class XZReadOnlyStream : ReadOnlyStream
BaseStream = stream;
if (!BaseStream.CanRead)
{
throw new InvalidDataException("Must be able to read from stream");
throw new InvalidFormatException("Must be able to read from stream");
}
}
}

View File

@@ -2,6 +2,7 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Xz;
@@ -25,15 +26,12 @@ public sealed class XZStream : XZReadOnlyStream
switch (Header.BlockCheckType)
{
case CheckType.NONE:
break;
case CheckType.CRC32:
break;
case CheckType.CRC64:
break;
case CheckType.SHA256:
throw new NotImplementedException();
break;
default:
throw new NotSupportedException("Check Type unknown to this version of decoder.");
throw new InvalidFormatException("Check Type unknown to this version of decoder.");
}
}

Some files were not shown because too many files have changed in this diff Show More