mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 13:34:59 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f298ad3322 | ||
|
|
69872dd9e7 | ||
|
|
92174f49ae | ||
|
|
c39a155c8f | ||
|
|
e5944cf72c |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "0.28.1",
|
||||
"version": "0.27.3",
|
||||
"commands": [
|
||||
"dotnet-csharpier"
|
||||
]
|
||||
|
||||
@@ -70,7 +70,7 @@ indent_style = tab
|
||||
|
||||
[*.{cs,csx,cake,vb,vbx}]
|
||||
# Default Severity for all .NET Code Style rules below
|
||||
dotnet_analyzer_diagnostic.severity = silent
|
||||
dotnet_analyzer_diagnostic.severity = warning
|
||||
|
||||
##########################################
|
||||
# File Header (Uncomment to support file headers)
|
||||
@@ -269,8 +269,6 @@ dotnet_diagnostic.CA1305.severity = suggestion
|
||||
dotnet_diagnostic.CA1307.severity = suggestion
|
||||
dotnet_diagnostic.CA1309.severity = suggestion
|
||||
dotnet_diagnostic.CA1310.severity = error
|
||||
dotnet_diagnostic.CA1507.severity = suggestion
|
||||
dotnet_diagnostic.CA1513.severity = suggestion
|
||||
dotnet_diagnostic.CA1707.severity = suggestion
|
||||
dotnet_diagnostic.CA1708.severity = suggestion
|
||||
dotnet_diagnostic.CA1711.severity = suggestion
|
||||
@@ -288,7 +286,6 @@ dotnet_diagnostic.CA1834.severity = error
|
||||
dotnet_diagnostic.CA1845.severity = suggestion
|
||||
dotnet_diagnostic.CA1848.severity = suggestion
|
||||
dotnet_diagnostic.CA1852.severity = suggestion
|
||||
dotnet_diagnostic.CA1860.severity = silent
|
||||
dotnet_diagnostic.CA2016.severity = suggestion
|
||||
dotnet_diagnostic.CA2201.severity = error
|
||||
dotnet_diagnostic.CA2206.severity = error
|
||||
@@ -306,12 +303,13 @@ dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS0618.severity = error
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
dotnet_diagnostic.CS8603.severity = error
|
||||
dotnet_diagnostic.CS8625.severity = error
|
||||
dotnet_diagnostic.CS8981.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.BL0005.severity = suggestion
|
||||
|
||||
@@ -320,7 +318,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
|
||||
dotnet_diagnostic.RZ10012.severity = error
|
||||
|
||||
dotnet_diagnostic.IDE0004.severity = error # redundant cast
|
||||
dotnet_diagnostic.IDE0005.severity = suggestion
|
||||
dotnet_diagnostic.IDE0005.severity = error
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0010.severity = silent # populate switch
|
||||
@@ -331,7 +329,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
|
||||
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
|
||||
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
|
||||
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
|
||||
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
|
||||
dotnet_diagnostic.IDE0028.severity = silent
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
|
||||
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
|
||||
@@ -339,7 +337,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
|
||||
dotnet_diagnostic.IDE0041.severity = error # simplify null
|
||||
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
|
||||
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
|
||||
dotnet_diagnostic.IDE0051.severity = error # unused field
|
||||
dotnet_diagnostic.IDE0052.severity = error # unused member
|
||||
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
|
||||
@@ -353,20 +351,11 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
|
||||
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
|
||||
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
|
||||
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
|
||||
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
|
||||
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
|
||||
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
|
||||
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
|
||||
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
|
||||
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
|
||||
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
|
||||
|
||||
dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
|
||||
@@ -10,7 +10,5 @@
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="5.0.0" />
|
||||
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Memory" Version="4.5.5" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.0" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.1" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -11,7 +11,7 @@
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSourceMapping>
|
||||
<!-- key value for <packageSource> should match key values from <packageSources> element -->
|
||||
<packageSource key="nuget.org">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
</configuration>
|
||||
@@ -1,12 +1,12 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
[](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
|
||||
|
||||
## Need Help?
|
||||
|
||||
|
||||
@@ -17,9 +17,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
|
||||
@@ -79,10 +79,6 @@
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue"><Policy><Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"><ElementKinds><Kind Name="FIELD" /><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"><ElementKinds><Kind Name="FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
@@ -131,7 +127,6 @@
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue"><SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml">
|
||||
<Solution />
|
||||
</SessionState></s:String></wpf:ResourceDictionary>
|
||||
|
||||
2
USAGE.md
2
USAGE.md
@@ -27,7 +27,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" />
|
||||
<PackageReference Include="Glob" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
<PackageReference Include="Bullseye" Version="4.2.1" />
|
||||
<PackageReference Include="Glob" Version="1.1.9" />
|
||||
<PackageReference Include="SimpleExec" Version="11.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[5.0.0, )",
|
||||
"resolved": "5.0.0",
|
||||
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.1.9, )",
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[12.0.0, )",
|
||||
"resolved": "12.0.0",
|
||||
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -12,35 +12,39 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
private bool disposed;
|
||||
protected SourceStream SrcStream;
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = sourceStream.ReaderOptions;
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
ReaderOptions = srcStream.ReaderOptions;
|
||||
SrcStream = srcStream;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
|
||||
@@ -61,12 +65,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries => _lazyEntries;
|
||||
public virtual ICollection<TEntry> Entries => lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes => _lazyVolumes;
|
||||
public ICollection<TVolume> Volumes => lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
@@ -80,29 +84,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
public virtual long TotalUncompressSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
|
||||
|
||||
public virtual void Dispose()
|
||||
{
|
||||
if (!_disposed)
|
||||
if (!disposed)
|
||||
{
|
||||
_lazyVolumes.ForEach(v => v.Dispose());
|
||||
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
_sourceStream?.Dispose();
|
||||
lazyVolumes.ForEach(v => v.Dispose());
|
||||
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
SrcStream?.Dispose();
|
||||
|
||||
_disposed = true;
|
||||
disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
_lazyEntries.EnsureFullyLoaded();
|
||||
_lazyVolumes.EnsureFullyLoaded();
|
||||
lazyEntries.EnsureFullyLoaded();
|
||||
lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(
|
||||
|
||||
@@ -41,8 +41,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type) { }
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
|
||||
: base(type, sourceStream) { }
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
|
||||
: base(type, srcStream) { }
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
{
|
||||
@@ -120,10 +120,6 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
|
||||
@@ -239,6 +239,4 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(Stream stream, string? password = null) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
@@ -90,7 +90,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
@@ -99,14 +99,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
srcStream.LoadAllParts();
|
||||
var idx = 0;
|
||||
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
@@ -182,11 +184,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -30,7 +32,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string? Key { get; }
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
|
||||
@@ -17,11 +17,15 @@ public static class IArchiveEntryExtensions
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Key,
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
if (entryStream is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -54,26 +55,14 @@ public static class IArchiveExtensions
|
||||
var entry = entries.Entry;
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
// Create each directory
|
||||
var path = Path.Combine(destination, entry.Key);
|
||||
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
|
||||
{
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
// Write file
|
||||
|
||||
@@ -13,7 +13,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
|
||||
@@ -21,33 +21,35 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal RarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Rar, srcStream) { }
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var streams = SrcStream.Streams.ToArray();
|
||||
var idx = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
sourceStream.IsVolumes = true;
|
||||
SrcStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
SrcStream.Position = 0;
|
||||
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
i++
|
||||
idx++
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
else //split mode or single file
|
||||
{
|
||||
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
@@ -106,7 +108,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
private readonly Stream stream;
|
||||
private readonly string? password;
|
||||
|
||||
internal SeekableFilePart(
|
||||
MarkHeader mh,
|
||||
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
this.stream = stream;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
_stream.Position = FileHeader.DataStartPosition;
|
||||
stream.Position = FileHeader.DataStartPosition;
|
||||
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
var cryptKey = new CryptKey3(_password!);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
|
||||
var cryptKey = new CryptKey3(password!);
|
||||
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
|
||||
}
|
||||
|
||||
if (FileHeader.Rar5CryptoInfo != null)
|
||||
{
|
||||
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
}
|
||||
|
||||
return _stream;
|
||||
return stream;
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
|
||||
@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -12,14 +14,14 @@ namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
private ArchiveDatabase? _database;
|
||||
private ArchiveDatabase database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
@@ -30,7 +32,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
@@ -49,7 +51,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
ReaderOptions readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
@@ -70,7 +72,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
ReaderOptions readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
@@ -89,25 +91,27 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull("stream");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private SevenZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.SevenZip, sourceStream) { }
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal SevenZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.SevenZip, srcStream) { }
|
||||
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
@@ -131,17 +135,13 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
if (_database is null)
|
||||
var entries = new SevenZipArchiveEntry[database._files.Count];
|
||||
for (var i = 0; i < database._files.Count; i++)
|
||||
{
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
var file = database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
@@ -159,12 +159,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (_database is null)
|
||||
if (database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
|
||||
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
reader.Open(stream);
|
||||
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,14 +180,14 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
private static ReadOnlySpan<byte> SIGNATURE =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
return signatureBytes.SequenceEqual(SIGNATURE);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
@@ -196,24 +196,30 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
public override bool IsSolid =>
|
||||
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
|
||||
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
public override long TotalSize
|
||||
{
|
||||
get
|
||||
{
|
||||
var i = Entries.Count;
|
||||
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive _archive;
|
||||
private CFolder? _currentFolder;
|
||||
private Stream? _currentStream;
|
||||
private CFileItem? _currentItem;
|
||||
private readonly SevenZipArchive archive;
|
||||
private CFolder currentFolder;
|
||||
private Stream currentStream;
|
||||
private CFileItem currentItem;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
public override SevenZipVolume Volume => archive.Volumes.Single();
|
||||
|
||||
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
|
||||
{
|
||||
var entries = _archive.Entries.ToList();
|
||||
var entries = archive.Entries.ToList();
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
@@ -223,42 +229,37 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
_currentFolder = group.Key;
|
||||
currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
_currentStream = Stream.Null;
|
||||
currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_currentStream = _archive._database?.GetFolderStream(
|
||||
currentStream = archive.database.GetFolderStream(
|
||||
stream,
|
||||
_currentFolder,
|
||||
currentFolder,
|
||||
new PasswordProvider(Options.Password)
|
||||
);
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
_currentItem = entry.FilePart.Header;
|
||||
currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream() =>
|
||||
CreateEntryStream(
|
||||
new ReadOnlySubStream(
|
||||
_currentStream.NotNull("currentStream is not null"),
|
||||
_currentItem?.Size ?? 0
|
||||
)
|
||||
);
|
||||
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
{
|
||||
private readonly string? _password;
|
||||
private readonly string _password;
|
||||
|
||||
public PasswordProvider(string? password) => _password = password;
|
||||
public PasswordProvider(string password) => _password = password;
|
||||
|
||||
public string? CryptoGetTextPassword() => _password;
|
||||
public string CryptoGetTextPassword() => _password;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
tarHeader.Name.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
@@ -123,20 +123,22 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal TarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
|
||||
private TarArchive()
|
||||
internal TarArchive()
|
||||
: base(ArchiveType.Tar) { }
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
@@ -190,10 +192,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -227,12 +225,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
@@ -16,7 +16,10 @@ namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
#nullable disable
|
||||
private readonly SeekableZipHeaderFactory headerFactory;
|
||||
|
||||
#nullable enable
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
@@ -27,13 +30,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
internal ZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Zip, srcStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
sourceStream.ReaderOptions.Password,
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
srcStream.ReaderOptions.Password,
|
||||
srcStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
@@ -186,21 +189,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.Position = 0;
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
SrcStream.Position = 0;
|
||||
|
||||
var streams = stream.Streams.ToList();
|
||||
var streams = SrcStream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
if (streams.Count > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
stream.IsVolumes = true;
|
||||
SrcStream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
streams.RemoveAt(0);
|
||||
@@ -212,7 +215,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
@@ -221,13 +224,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
|
||||
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
@@ -250,14 +254,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
new SeekableZipFilePart(headerFactory, deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
@@ -278,11 +282,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ public class ArchiveEncoding
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding? Default { get; set; }
|
||||
public Encoding Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding? Password { get; set; }
|
||||
public Encoding Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
@@ -50,8 +50,6 @@ public class ArchiveEncoding
|
||||
|
||||
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
|
||||
|
||||
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder() =>
|
||||
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
|
||||
@@ -16,10 +16,5 @@ public enum CompressionType
|
||||
Unknown,
|
||||
Deflate64,
|
||||
Shrink,
|
||||
Lzw,
|
||||
Reduce1,
|
||||
Reduce2,
|
||||
Reduce3,
|
||||
Reduce4,
|
||||
Explode
|
||||
Lzw
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
|
||||
/// <summary>
|
||||
/// The string key of the file internal to the Archive.
|
||||
/// </summary>
|
||||
public abstract string? Key { get; }
|
||||
public abstract string Key { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
|
||||
/// </summary>
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
|
||||
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => Key ?? "Entry";
|
||||
public override string ToString() => Key;
|
||||
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
|
||||
|
||||
@@ -36,12 +36,10 @@ internal static class ExtractionMethods
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
var file = Path.GetFileName(entry.Key);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var folder = Path.GetDirectoryName(entry.Key)!;
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
|
||||
@@ -8,7 +8,7 @@ public abstract class FilePart
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string? FilePartName { get; }
|
||||
internal abstract string FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
|
||||
@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipEntry : Entry
|
||||
{
|
||||
private readonly GZipFilePart? _filePart;
|
||||
private readonly GZipFilePart _filePart;
|
||||
|
||||
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
|
||||
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => _filePart?.Crc ?? 0;
|
||||
public override long Crc => _filePart.Crc ?? 0;
|
||||
|
||||
public override string? Key => _filePart?.FilePartName;
|
||||
public override string Key => _filePart.FilePartName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => _filePart?.UncompressedSize ?? 0;
|
||||
public override long Size => _filePart.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart?.DateModified;
|
||||
public override DateTime? LastModifiedTime => _filePart.DateModified;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
|
||||
@@ -34,7 +34,7 @@ internal sealed class GZipFilePart : FilePart
|
||||
internal uint? Crc { get; private set; }
|
||||
internal uint? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string? FilePartName => _name;
|
||||
internal override string FilePartName => _name!;
|
||||
|
||||
internal override Stream GetCompressedStream() =>
|
||||
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
|
||||
@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipVolume : Volume
|
||||
{
|
||||
public GZipVolume(Stream stream, ReaderOptions? options, int index)
|
||||
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
: base(stream, options, index) { }
|
||||
|
||||
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
|
||||
@@ -9,7 +9,7 @@ public interface IEntry
|
||||
long CompressedSize { get; }
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string? Key { get; }
|
||||
string Key { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
|
||||
@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
|
||||
{
|
||||
int Index { get; }
|
||||
|
||||
string? FileName { get; }
|
||||
string FileName { get; }
|
||||
}
|
||||
|
||||
@@ -121,6 +121,7 @@ internal class FileHeader : RarHeader
|
||||
switch (type)
|
||||
{
|
||||
case FHEXTRA_CRYPT: // file encryption
|
||||
|
||||
{
|
||||
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
|
||||
|
||||
@@ -131,6 +132,7 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HASH:
|
||||
|
||||
{
|
||||
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
|
||||
// const uint HASH_BLAKE2 = 0x03;
|
||||
@@ -144,6 +146,7 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HTIME: // file time
|
||||
|
||||
{
|
||||
var flags = reader.ReadRarVIntUInt16();
|
||||
var isWindowsTime = (flags & 1) == 0;
|
||||
@@ -168,6 +171,7 @@ internal class FileHeader : RarHeader
|
||||
// }
|
||||
// break;
|
||||
case FHEXTRA_REDIR: // file system redirection
|
||||
|
||||
{
|
||||
RedirType = reader.ReadRarVIntByte();
|
||||
RedirFlags = reader.ReadRarVIntByte();
|
||||
@@ -280,6 +284,7 @@ internal class FileHeader : RarHeader
|
||||
switch (HeaderCode)
|
||||
{
|
||||
case HeaderCodeV.RAR4_FILE_HEADER:
|
||||
|
||||
{
|
||||
if (HasFlag(FileFlagsV4.UNICODE))
|
||||
{
|
||||
@@ -306,6 +311,7 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
|
||||
|
||||
{
|
||||
var datasize = HeaderSize - newLhdSize - nameSize;
|
||||
if (HasFlag(FileFlagsV4.SALT))
|
||||
|
||||
@@ -98,11 +98,13 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
reader.BaseStream.Position += ph.DataSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
reader.BaseStream.Skip(ph.DataSize);
|
||||
}
|
||||
@@ -144,12 +146,14 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
|
||||
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
|
||||
@@ -200,12 +204,14 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
//skip the data because it's useless?
|
||||
reader.BaseStream.Skip(fh.CompressedSize);
|
||||
|
||||
@@ -70,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing)
|
||||
if (_rijndael != null)
|
||||
{
|
||||
_rijndael.Dispose();
|
||||
_rijndael = null!;
|
||||
}
|
||||
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ public abstract class RarEntry : Entry
|
||||
/// <summary>
|
||||
/// The path of the file internal to the Rar Archive.
|
||||
/// </summary>
|
||||
public override string? Key => FileHeader.FileName;
|
||||
public override string Key => FileHeader.FileName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
|
||||
@@ -15,14 +15,17 @@ namespace SharpCompress.Common.Rar;
|
||||
public abstract class RarVolume : Volume
|
||||
{
|
||||
private readonly RarHeaderFactory _headerFactory;
|
||||
private int _maxCompressionAlgorithm;
|
||||
internal int _maxCompressionAlgorithm;
|
||||
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
|
||||
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
|
||||
|
||||
private ArchiveHeader? ArchiveHeader { get; set; }
|
||||
#nullable disable
|
||||
internal ArchiveHeader ArchiveHeader { get; private set; }
|
||||
|
||||
private StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
#nullable enable
|
||||
|
||||
internal StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
|
||||
internal abstract IEnumerable<RarFilePart> ReadFileParts();
|
||||
|
||||
@@ -36,16 +39,19 @@ public abstract class RarVolume : Volume
|
||||
switch (header.HeaderType)
|
||||
{
|
||||
case HeaderType.Mark:
|
||||
|
||||
{
|
||||
lastMarkHeader = (MarkHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.Archive:
|
||||
|
||||
{
|
||||
ArchiveHeader = (ArchiveHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.File:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
|
||||
@@ -57,12 +63,14 @@ public abstract class RarVolume : Volume
|
||||
}
|
||||
break;
|
||||
case HeaderType.Service:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (fh.FileName == "CMT")
|
||||
{
|
||||
var part = CreateFilePart(lastMarkHeader!, fh);
|
||||
var buffer = new byte[fh.CompressedSize];
|
||||
fh.PackedStream.Read(buffer, 0, buffer.Length);
|
||||
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
|
||||
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
|
||||
}
|
||||
}
|
||||
@@ -97,7 +105,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader?.IsFirstVolume ?? false;
|
||||
return ArchiveHeader.IsFirstVolume;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,7 +117,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader?.IsVolume ?? false;
|
||||
return ArchiveHeader.IsVolume;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,7 +130,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader?.IsSolid ?? false;
|
||||
return ArchiveHeader.IsSolid;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ internal class ArchiveDatabase
|
||||
_packSizes.Clear();
|
||||
_packCrCs.Clear();
|
||||
_folders.Clear();
|
||||
_numUnpackStreamsVector = null;
|
||||
_numUnpackStreamsVector = null!;
|
||||
_files.Clear();
|
||||
|
||||
_packStreamStartPositions.Clear();
|
||||
|
||||
@@ -1220,46 +1220,23 @@ internal class ArchiveReader
|
||||
|
||||
#region Public Methods
|
||||
|
||||
public void Open(Stream stream, bool lookForHeader)
|
||||
public void Open(Stream stream)
|
||||
{
|
||||
Close();
|
||||
|
||||
_streamOrigin = stream.Position;
|
||||
_streamEnding = stream.Length;
|
||||
|
||||
var canScan = lookForHeader ? 0x80000 - 20 : 0;
|
||||
while (true)
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
{
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
{
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
offset += delta;
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
if (
|
||||
!lookForHeader
|
||||
|| _header
|
||||
.AsSpan(0, length: 6)
|
||||
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
|
||||
)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (canScan == 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unable to find 7z signature");
|
||||
}
|
||||
|
||||
canScan--;
|
||||
stream.Position = ++_streamOrigin;
|
||||
offset += delta;
|
||||
}
|
||||
|
||||
_stream = stream;
|
||||
|
||||
@@ -13,7 +13,7 @@ public class SevenZipEntry : Entry
|
||||
|
||||
public override long Crc => FilePart.Header.Crc ?? 0;
|
||||
|
||||
public override string? Key => FilePart.Header.Name;
|
||||
public override string Key => FilePart.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ internal class SevenZipFilePart : FilePart
|
||||
{
|
||||
if (!Header.HasStream)
|
||||
{
|
||||
throw new InvalidOperationException("File does not have a stream.");
|
||||
return null!;
|
||||
}
|
||||
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
|
||||
|
||||
@@ -73,24 +73,34 @@ internal class SevenZipFilePart : FilePart
|
||||
private const uint K_PPMD = 0x030401;
|
||||
private const uint K_B_ZIP2 = 0x040202;
|
||||
|
||||
private CompressionType GetCompression()
|
||||
internal CompressionType GetCompression()
|
||||
{
|
||||
if (Header.IsDir)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
|
||||
var coder = Folder.NotNull()._coders.First();
|
||||
return coder._methodId._id switch
|
||||
var coder = Folder!._coders.First();
|
||||
switch (coder._methodId._id)
|
||||
{
|
||||
K_LZMA or K_LZMA2 => CompressionType.LZMA,
|
||||
K_PPMD => CompressionType.PPMd,
|
||||
K_B_ZIP2 => CompressionType.BZip2,
|
||||
_ => throw new NotImplementedException()
|
||||
};
|
||||
case K_LZMA:
|
||||
case K_LZMA2:
|
||||
{
|
||||
return CompressionType.LZMA;
|
||||
}
|
||||
case K_PPMD:
|
||||
{
|
||||
return CompressionType.PPMd;
|
||||
}
|
||||
case K_B_ZIP2:
|
||||
{
|
||||
return CompressionType.BZip2;
|
||||
}
|
||||
default:
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
internal bool IsEncrypted =>
|
||||
!Header.IsDir
|
||||
&& Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
|
||||
Header.IsDir
|
||||
? false
|
||||
: Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
@@ -11,8 +13,8 @@ internal sealed class TarHeader
|
||||
|
||||
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
|
||||
|
||||
internal string? Name { get; set; }
|
||||
internal string? LinkName { get; set; }
|
||||
internal string Name { get; set; }
|
||||
internal string LinkName { get; set; }
|
||||
|
||||
internal long Mode { get; set; }
|
||||
internal long UserId { get; set; }
|
||||
@@ -20,7 +22,7 @@ internal sealed class TarHeader
|
||||
internal long Size { get; set; }
|
||||
internal DateTime LastModifiedTime { get; set; }
|
||||
internal EntryType EntryType { get; set; }
|
||||
internal Stream? PackedStream { get; set; }
|
||||
internal Stream PackedStream { get; set; }
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
@@ -34,9 +36,7 @@ internal sealed class TarHeader
|
||||
WriteOctalBytes(0, buffer, 116, 8); // group ID
|
||||
|
||||
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
|
||||
var nameByteCount = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(Name.NotNull("Name is null"));
|
||||
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
|
||||
if (nameByteCount > 100)
|
||||
{
|
||||
// Set mock filename and filetype to indicate the next block is the actual name of the file
|
||||
@@ -46,7 +46,7 @@ internal sealed class TarHeader
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
|
||||
WriteOctalBytes(Size, buffer, 124, 12);
|
||||
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
|
||||
WriteOctalBytes(time, buffer, 136, 12);
|
||||
@@ -77,7 +77,7 @@ internal sealed class TarHeader
|
||||
//
|
||||
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
|
||||
Name = ArchiveEncoding.Decode(
|
||||
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
|
||||
ArchiveEncoding.Encode(Name),
|
||||
0,
|
||||
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
|
||||
);
|
||||
@@ -87,7 +87,7 @@ internal sealed class TarHeader
|
||||
|
||||
private void WriteLongFilenameHeader(Stream output)
|
||||
{
|
||||
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
|
||||
var nameBytes = ArchiveEncoding.Encode(Name);
|
||||
output.Write(nameBytes, 0, nameBytes.Length);
|
||||
|
||||
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
|
||||
@@ -101,85 +101,57 @@ internal sealed class TarHeader
|
||||
|
||||
internal bool Read(BinaryReader reader)
|
||||
{
|
||||
string? longName = null;
|
||||
string? longLinkName = null;
|
||||
var hasLongValue = true;
|
||||
byte[] buffer;
|
||||
EntryType entryType;
|
||||
|
||||
do
|
||||
{
|
||||
buffer = ReadBlock(reader);
|
||||
|
||||
if (buffer.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
entryType = ReadEntryType(buffer);
|
||||
|
||||
// LongName and LongLink headers can follow each other and need
|
||||
// to apply to the header that follows them.
|
||||
if (entryType == EntryType.LongName)
|
||||
{
|
||||
longName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
else if (entryType == EntryType.LongLink)
|
||||
{
|
||||
longLinkName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasLongValue = false;
|
||||
} while (hasLongValue);
|
||||
|
||||
// Check header checksum
|
||||
if (!checkChecksum(buffer))
|
||||
var buffer = ReadBlock(reader);
|
||||
if (buffer.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
EntryType = entryType;
|
||||
Size = ReadSize(buffer);
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
|
||||
if (ReadEntryType(buffer) == EntryType.SymLink)
|
||||
{
|
||||
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
}
|
||||
|
||||
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
|
||||
if (ReadEntryType(buffer) == EntryType.LongName)
|
||||
{
|
||||
Name = ReadLongName(reader, buffer);
|
||||
buffer = ReadBlock(reader);
|
||||
}
|
||||
else
|
||||
{
|
||||
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
}
|
||||
|
||||
if (entryType == EntryType.Directory)
|
||||
EntryType = ReadEntryType(buffer);
|
||||
Size = ReadSize(buffer);
|
||||
|
||||
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
|
||||
if (EntryType == EntryType.Directory)
|
||||
{
|
||||
Mode |= 0b1_000_000_000;
|
||||
}
|
||||
|
||||
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
|
||||
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
|
||||
|
||||
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
|
||||
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
|
||||
{
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
|
||||
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
|
||||
namePrefix = namePrefix.TrimNulls();
|
||||
if (!string.IsNullOrEmpty(namePrefix))
|
||||
{
|
||||
Name = namePrefix + "/" + Name;
|
||||
}
|
||||
}
|
||||
|
||||
if (entryType != EntryType.LongName && Name.Length == 0)
|
||||
if (EntryType != EntryType.LongName && Name.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -317,42 +289,6 @@ internal sealed class TarHeader
|
||||
(byte)' '
|
||||
};
|
||||
|
||||
internal static bool checkChecksum(byte[] buf)
|
||||
{
|
||||
const int eightSpacesChksum = 256;
|
||||
var buffer = new Span<byte>(buf).Slice(0, 512);
|
||||
int posix_sum = eightSpacesChksum;
|
||||
int sun_sum = eightSpacesChksum;
|
||||
|
||||
foreach (byte b in buffer)
|
||||
{
|
||||
posix_sum += b;
|
||||
sun_sum += unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Special case, empty file header
|
||||
if (posix_sum == eightSpacesChksum)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Remove current checksum from calculation
|
||||
foreach (byte b in buffer.Slice(148, 8))
|
||||
{
|
||||
posix_sum -= b;
|
||||
sun_sum -= unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Read and compare checksum for header
|
||||
var crc = ReadAsciiInt64Base8(buf, 148, 7);
|
||||
if (crc != posix_sum && crc != sun_sum)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal static int RecalculateChecksum(byte[] buf)
|
||||
{
|
||||
// Set default value for checksum. That is 8 spaces.
|
||||
@@ -387,5 +323,5 @@ internal sealed class TarHeader
|
||||
|
||||
public long? DataStartPosition { get; set; }
|
||||
|
||||
public string? Magic { get; set; }
|
||||
public string Magic { get; set; }
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -8,9 +10,9 @@ namespace SharpCompress.Common.Tar;
|
||||
|
||||
public class TarEntry : Entry
|
||||
{
|
||||
private readonly TarFilePart? _filePart;
|
||||
private readonly TarFilePart _filePart;
|
||||
|
||||
internal TarEntry(TarFilePart? filePart, CompressionType type)
|
||||
internal TarEntry(TarFilePart filePart, CompressionType type)
|
||||
{
|
||||
_filePart = filePart;
|
||||
CompressionType = type;
|
||||
@@ -20,15 +22,15 @@ public class TarEntry : Entry
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
public override string Key => _filePart.Header.Name;
|
||||
|
||||
public override string? LinkTarget => _filePart?.Header.LinkName;
|
||||
public override string LinkTarget => _filePart.Header.LinkName;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.Size ?? 0;
|
||||
public override long CompressedSize => _filePart.Header.Size;
|
||||
|
||||
public override long Size => _filePart?.Header.Size ?? 0;
|
||||
public override long Size => _filePart.Header.Size;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime;
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
@@ -38,17 +40,17 @@ public class TarEntry : Entry
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory;
|
||||
public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
public long Mode => _filePart?.Header.Mode ?? 0;
|
||||
public long Mode => _filePart.Header.Mode;
|
||||
|
||||
public long UserID => _filePart?.Header.UserId ?? 0;
|
||||
public long UserID => _filePart.Header.UserId;
|
||||
|
||||
public long GroupId => _filePart?.Header.GroupId ?? 0;
|
||||
public long GroupId => _filePart.Header.GroupId;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
|
||||
internal static IEnumerable<TarEntry> GetEntries(
|
||||
StreamingMode mode,
|
||||
@@ -57,17 +59,17 @@ public class TarEntry : Entry
|
||||
ArchiveEncoding archiveEncoding
|
||||
)
|
||||
{
|
||||
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
|
||||
foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
|
||||
{
|
||||
if (header != null)
|
||||
if (h != null)
|
||||
{
|
||||
if (mode == StreamingMode.Seekable)
|
||||
{
|
||||
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
|
||||
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
|
||||
}
|
||||
else
|
||||
{
|
||||
yield return new TarEntry(new TarFilePart(header, null), compressionType);
|
||||
yield return new TarEntry(new TarFilePart(h, null), compressionType);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
@@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar;
|
||||
|
||||
internal sealed class TarFilePart : FilePart
|
||||
{
|
||||
private readonly Stream? _seekableStream;
|
||||
private readonly Stream _seekableStream;
|
||||
|
||||
internal TarFilePart(TarHeader header, Stream? seekableStream)
|
||||
internal TarFilePart(TarHeader header, Stream seekableStream)
|
||||
: base(header.ArchiveEncoding)
|
||||
{
|
||||
_seekableStream = seekableStream;
|
||||
@@ -16,16 +16,16 @@ internal sealed class TarFilePart : FilePart
|
||||
|
||||
internal TarHeader Header { get; }
|
||||
|
||||
internal override string? FilePartName => Header?.Name;
|
||||
internal override string FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_seekableStream != null)
|
||||
{
|
||||
_seekableStream.Position = Header.DataStartPosition ?? 0;
|
||||
_seekableStream.Position = Header.DataStartPosition!.Value;
|
||||
return new TarReadOnlySubStream(_seekableStream, Header.Size);
|
||||
}
|
||||
return Header.PackedStream.NotNull();
|
||||
return Header.PackedStream;
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => null;
|
||||
|
||||
@@ -28,6 +28,7 @@ internal static class TarHeaderFactory
|
||||
switch (mode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
header.DataStartPosition = reader.BaseStream.Position;
|
||||
|
||||
@@ -36,6 +37,7 @@ internal static class TarHeaderFactory
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
|
||||
}
|
||||
|
||||
@@ -9,11 +9,11 @@ public abstract class Volume : IVolume
|
||||
{
|
||||
private readonly Stream _actualStream;
|
||||
|
||||
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
|
||||
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
{
|
||||
Index = index;
|
||||
ReaderOptions = readerOptions ?? new ReaderOptions();
|
||||
if (ReaderOptions.LeaveStreamOpen)
|
||||
ReaderOptions = readerOptions;
|
||||
if (readerOptions.LeaveStreamOpen)
|
||||
{
|
||||
stream = NonDisposingStream.Create(stream);
|
||||
}
|
||||
@@ -32,7 +32,7 @@ public abstract class Volume : IVolume
|
||||
|
||||
public virtual int Index { get; internal set; }
|
||||
|
||||
public string? FileName => (_actualStream as FileStream)?.Name;
|
||||
public string FileName => (_actualStream as FileStream)?.Name!;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
@@ -18,21 +20,21 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
{
|
||||
get
|
||||
{
|
||||
if (Name?.EndsWith('/') ?? false)
|
||||
if (Name.EndsWith('/'))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
|
||||
return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false);
|
||||
return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\');
|
||||
}
|
||||
}
|
||||
|
||||
internal Stream? PackedStream { get; set; }
|
||||
internal Stream PackedStream { get; set; }
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal string? Name { get; set; }
|
||||
internal string Name { get; set; }
|
||||
|
||||
internal HeaderFlags Flags { get; set; }
|
||||
|
||||
@@ -46,7 +48,7 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
|
||||
internal List<ExtraData> Extra { get; set; }
|
||||
|
||||
public string? Password { get; set; }
|
||||
public string Password { get; set; }
|
||||
|
||||
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
|
||||
{
|
||||
@@ -63,7 +65,7 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
return encryptionData;
|
||||
}
|
||||
|
||||
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
|
||||
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The last modified date as read from the Local or Central Directory header.
|
||||
@@ -117,7 +119,7 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
}
|
||||
}
|
||||
|
||||
internal ZipFilePart? Part { get; set; }
|
||||
internal ZipFilePart Part { get; set; }
|
||||
|
||||
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
|
||||
|
||||
internal byte[] StringToByteArray(string value)
|
||||
{
|
||||
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
|
||||
var a = _archiveEncoding.Password.GetBytes(value);
|
||||
return a;
|
||||
}
|
||||
|
||||
|
||||
@@ -42,16 +42,16 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
|
||||
protected override Stream CreateBaseStream()
|
||||
{
|
||||
BaseStream.Position = Header.DataStartPosition.NotNull();
|
||||
BaseStream.Position = Header.DataStartPosition!.Value;
|
||||
|
||||
if (
|
||||
(Header.CompressedSize == 0)
|
||||
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
&& _directoryEntryHeader.HasData
|
||||
&& (_directoryEntryHeader.CompressedSize != 0)
|
||||
&& (_directoryEntryHeader?.HasData == true)
|
||||
&& (_directoryEntryHeader?.CompressedSize != 0)
|
||||
)
|
||||
{
|
||||
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
|
||||
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
|
||||
}
|
||||
|
||||
return BaseStream;
|
||||
|
||||
@@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
|
||||
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
|
||||
: base(header, stream) { }
|
||||
|
||||
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
|
||||
protected override Stream CreateBaseStream() => Header.PackedStream;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
|
||||
@@ -36,19 +36,16 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
uint headerBytes = 0;
|
||||
if (
|
||||
_lastEntryHeader != null
|
||||
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
&& (
|
||||
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
|| _lastEntryHeader.IsZip64
|
||||
)
|
||||
)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
ref rewindableStream
|
||||
);
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
@@ -81,60 +78,6 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
}
|
||||
}
|
||||
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
continue;
|
||||
|
||||
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
ref rewindableStream
|
||||
);
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
headerBytes = reader.ReadUInt32();
|
||||
|
||||
var version = reader.ReadUInt16();
|
||||
var flags = (HeaderFlags)reader.ReadUInt16();
|
||||
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
var lastModifiedDate = reader.ReadUInt16();
|
||||
var lastModifiedTime = reader.ReadUInt16();
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
crc = reader.ReadUInt32();
|
||||
}
|
||||
_lastEntryHeader.Crc = crc;
|
||||
|
||||
// The DataDescriptor can be either 64bit or 32bit
|
||||
var compressed_size = reader.ReadUInt32();
|
||||
var uncompressed_size = reader.ReadUInt32();
|
||||
|
||||
// Check if we have header or 64bit DataDescriptor
|
||||
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
|
||||
|
||||
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
|
||||
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
|
||||
{
|
||||
_lastEntryHeader.UncompressedSize =
|
||||
((long)reader.ReadUInt32() << 32) | headerBytes;
|
||||
headerBytes = reader.ReadUInt32();
|
||||
}
|
||||
else
|
||||
{
|
||||
_lastEntryHeader.UncompressedSize = uncompressed_size;
|
||||
}
|
||||
|
||||
if (pos.HasValue)
|
||||
{
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
|
||||
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
|
||||
rewindableStream.Position = pos.Value + 4;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
headerBytes = reader.ReadUInt32();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
@@ -8,7 +10,12 @@ internal class WinzipAesEncryptionData
|
||||
{
|
||||
private const int RFC2898_ITERATIONS = 1000;
|
||||
|
||||
private readonly byte[] _salt;
|
||||
private readonly WinzipAesKeySize _keySize;
|
||||
private readonly byte[] _passwordVerifyValue;
|
||||
private readonly string _password;
|
||||
|
||||
private byte[] _generatedVerifyValue;
|
||||
|
||||
internal WinzipAesEncryptionData(
|
||||
WinzipAesKeySize keySize,
|
||||
@@ -18,28 +25,10 @@ internal class WinzipAesEncryptionData
|
||||
)
|
||||
{
|
||||
_keySize = keySize;
|
||||
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
|
||||
#else
|
||||
var rfc2898 = new Rfc2898DeriveBytes(
|
||||
password,
|
||||
salt,
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1
|
||||
);
|
||||
#endif
|
||||
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
var generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
|
||||
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
|
||||
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
|
||||
if (verify != generated)
|
||||
{
|
||||
throw new InvalidFormatException("bad password");
|
||||
}
|
||||
_salt = salt;
|
||||
_passwordVerifyValue = passwordVerifyValue;
|
||||
_password = password;
|
||||
Initialize();
|
||||
}
|
||||
|
||||
internal byte[] IvBytes { get; set; }
|
||||
@@ -56,4 +45,32 @@ internal class WinzipAesEncryptionData
|
||||
WinzipAesKeySize.KeySize256 => 32,
|
||||
_ => throw new InvalidOperationException(),
|
||||
};
|
||||
|
||||
private void Initialize()
|
||||
{
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
|
||||
#else
|
||||
var rfc2898 = new Rfc2898DeriveBytes(
|
||||
_password,
|
||||
_salt,
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1
|
||||
);
|
||||
#endif
|
||||
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
_generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
|
||||
var verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
|
||||
if (_password != null)
|
||||
{
|
||||
var generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
|
||||
if (verify != generated)
|
||||
{
|
||||
throw new InvalidFormatException("bad password");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,6 @@ internal enum ZipCompressionMethod
|
||||
{
|
||||
None = 0,
|
||||
Shrink = 1,
|
||||
Reduce1 = 2,
|
||||
Reduce2 = 3,
|
||||
Reduce3 = 4,
|
||||
Reduce4 = 5,
|
||||
Explode = 6,
|
||||
Deflate = 8,
|
||||
Deflate64 = 9,
|
||||
BZip2 = 12,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -6,23 +8,22 @@ namespace SharpCompress.Common.Zip;
|
||||
|
||||
public class ZipEntry : Entry
|
||||
{
|
||||
private readonly ZipFilePart? _filePart;
|
||||
private readonly ZipFilePart _filePart;
|
||||
|
||||
internal ZipEntry(ZipFilePart? filePart)
|
||||
internal ZipEntry(ZipFilePart filePart)
|
||||
{
|
||||
if (filePart == null)
|
||||
if (filePart != null)
|
||||
{
|
||||
return;
|
||||
_filePart = filePart;
|
||||
LastModifiedTime = Utility.DosDateToDateTime(
|
||||
filePart.Header.LastModifiedDate,
|
||||
filePart.Header.LastModifiedTime
|
||||
);
|
||||
}
|
||||
_filePart = filePart;
|
||||
LastModifiedTime = Utility.DosDateToDateTime(
|
||||
filePart.Header.LastModifiedDate,
|
||||
filePart.Header.LastModifiedTime
|
||||
);
|
||||
}
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod switch
|
||||
_filePart.Header.CompressionMethod switch
|
||||
{
|
||||
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
|
||||
ZipCompressionMethod.Deflate => CompressionType.Deflate,
|
||||
@@ -31,23 +32,18 @@ public class ZipEntry : Entry
|
||||
ZipCompressionMethod.PPMd => CompressionType.PPMd,
|
||||
ZipCompressionMethod.None => CompressionType.None,
|
||||
ZipCompressionMethod.Shrink => CompressionType.Shrink,
|
||||
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
|
||||
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
|
||||
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
|
||||
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
|
||||
ZipCompressionMethod.Explode => CompressionType.Explode,
|
||||
_ => CompressionType.Unknown
|
||||
};
|
||||
|
||||
public override long Crc => _filePart?.Header.Crc ?? 0;
|
||||
public override long Crc => _filePart.Header.Crc;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
public override string Key => _filePart.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
public override string LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
public override long CompressedSize => _filePart.Header.CompressedSize;
|
||||
|
||||
public override long Size => _filePart?.Header.UncompressedSize ?? 0;
|
||||
public override long Size => _filePart.Header.UncompressedSize;
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
@@ -58,11 +54,11 @@ public class ZipEntry : Entry
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted =>
|
||||
FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted);
|
||||
FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted);
|
||||
|
||||
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
|
||||
public override bool IsDirectory => _filePart.Header.IsDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
}
|
||||
|
||||
@@ -7,10 +7,8 @@ using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.BZip2;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Compressors.Deflate64;
|
||||
using SharpCompress.Compressors.Explode;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Compressors.PPMd;
|
||||
using SharpCompress.Compressors.Reduce;
|
||||
using SharpCompress.Compressors.Shrink;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using SharpCompress.IO;
|
||||
@@ -31,7 +29,7 @@ internal abstract class ZipFilePart : FilePart
|
||||
internal Stream BaseStream { get; }
|
||||
internal ZipFileEntry Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
internal override string FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
@@ -91,32 +89,6 @@ internal abstract class ZipFilePart : FilePart
|
||||
Header.UncompressedSize
|
||||
);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce1:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce2:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce3:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce4:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
|
||||
}
|
||||
case ZipCompressionMethod.Explode:
|
||||
{
|
||||
return new ExplodeStream(
|
||||
stream,
|
||||
Header.CompressedSize,
|
||||
Header.UncompressedSize,
|
||||
Header.Flags
|
||||
);
|
||||
}
|
||||
|
||||
case ZipCompressionMethod.Deflate:
|
||||
{
|
||||
return new DeflateStream(stream, CompressionMode.Decompress);
|
||||
@@ -231,10 +203,6 @@ internal abstract class ZipFilePart : FilePart
|
||||
{
|
||||
case ZipCompressionMethod.None:
|
||||
case ZipCompressionMethod.Shrink:
|
||||
case ZipCompressionMethod.Reduce1:
|
||||
case ZipCompressionMethod.Reduce2:
|
||||
case ZipCompressionMethod.Reduce3:
|
||||
case ZipCompressionMethod.Reduce4:
|
||||
case ZipCompressionMethod.Deflate:
|
||||
case ZipCompressionMethod.Deflate64:
|
||||
case ZipCompressionMethod.BZip2:
|
||||
|
||||
@@ -55,13 +55,7 @@ internal class ZipHeaderFactory
|
||||
}
|
||||
case POST_DATA_DESCRIPTOR:
|
||||
{
|
||||
if (
|
||||
_lastEntryHeader != null
|
||||
&& FlagUtility.HasFlag(
|
||||
_lastEntryHeader.NotNull().Flags,
|
||||
HeaderFlags.UsePostDataDescriptor
|
||||
)
|
||||
)
|
||||
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
|
||||
{
|
||||
_lastEntryHeader.Crc = reader.ReadUInt32();
|
||||
_lastEntryHeader.CompressedSize = zip64
|
||||
|
||||
@@ -69,7 +69,7 @@ public sealed class BZip2Stream : Stream
|
||||
|
||||
public override void SetLength(long value) => stream.SetLength(value);
|
||||
|
||||
#if !NETFRAMEWORK&& !NETSTANDARD2_0
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
|
||||
public override int Read(Span<byte> buffer) => stream.Read(buffer);
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
@@ -42,17 +42,14 @@ internal class CBZip2InputStream : Stream
|
||||
private static void Cadvise()
|
||||
{
|
||||
//System.out.Println("CRC Error");
|
||||
throw new InvalidOperationException("BZip2 error");
|
||||
//throw new CCoruptionError();
|
||||
}
|
||||
|
||||
private static void BadBGLengths() => Cadvise();
|
||||
|
||||
private static void BitStreamEOF() => Cadvise();
|
||||
|
||||
private static void CompressedStreamEOF()
|
||||
{
|
||||
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
|
||||
}
|
||||
private static void CompressedStreamEOF() => Cadvise();
|
||||
|
||||
private void MakeMaps()
|
||||
{
|
||||
|
||||
@@ -1,746 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
|
||||
namespace SharpCompress.Compressors.Explode;
|
||||
|
||||
public class ExplodeStream : Stream
|
||||
{
|
||||
private const int INVALID_CODE = 99;
|
||||
private const int WSIZE = 64 * 1024;
|
||||
|
||||
private readonly long unCompressedSize;
|
||||
private readonly int compressedSize;
|
||||
private readonly HeaderFlags generalPurposeBitFlag;
|
||||
private readonly Stream inStream;
|
||||
|
||||
private huftNode[]? hufLiteralCodeTable; /* literal code table */
|
||||
private huftNode[] hufLengthCodeTable = []; /* length code table */
|
||||
private huftNode[] hufDistanceCodeTable = []; /* distance code table */
|
||||
|
||||
private int bitsForLiteralCodeTable;
|
||||
private int bitsForLengthCodeTable;
|
||||
private int bitsForDistanceCodeTable;
|
||||
private int numOfUncodedLowerDistanceBits; /* number of uncoded lower distance bits */
|
||||
|
||||
private ulong bitBuffer;
|
||||
private int bitBufferCount;
|
||||
|
||||
private readonly byte[] windowsBuffer;
|
||||
private uint maskForLiteralCodeTable;
|
||||
private uint maskForLengthCodeTable;
|
||||
private uint maskForDistanceCodeTable;
|
||||
private uint maskForDistanceLowBits;
|
||||
private long outBytesCount;
|
||||
|
||||
private int windowIndex;
|
||||
private int distance;
|
||||
private int length;
|
||||
|
||||
internal ExplodeStream(
|
||||
Stream inStr,
|
||||
long compressedSize,
|
||||
long uncompressedSize,
|
||||
HeaderFlags generalPurposeBitFlag
|
||||
)
|
||||
{
|
||||
inStream = inStr;
|
||||
this.compressedSize = (int)compressedSize;
|
||||
unCompressedSize = (long)uncompressedSize;
|
||||
this.generalPurposeBitFlag = generalPurposeBitFlag;
|
||||
explode_SetTables();
|
||||
|
||||
windowsBuffer = new byte[WSIZE];
|
||||
explode_var_init();
|
||||
}
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => false;
|
||||
public override long Length => unCompressedSize;
|
||||
public override long Position
|
||||
{
|
||||
get => outBytesCount;
|
||||
set { }
|
||||
}
|
||||
|
||||
static uint[] mask_bits = new uint[]
|
||||
{
|
||||
0x0000,
|
||||
0x0001,
|
||||
0x0003,
|
||||
0x0007,
|
||||
0x000f,
|
||||
0x001f,
|
||||
0x003f,
|
||||
0x007f,
|
||||
0x00ff,
|
||||
0x01ff,
|
||||
0x03ff,
|
||||
0x07ff,
|
||||
0x0fff,
|
||||
0x1fff,
|
||||
0x3fff,
|
||||
0x7fff,
|
||||
0xffff
|
||||
};
|
||||
|
||||
/* Tables for length and distance */
|
||||
static int[] cplen2 = new int[]
|
||||
{
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65
|
||||
};
|
||||
|
||||
static int[] cplen3 = new int[]
|
||||
{
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65,
|
||||
66
|
||||
};
|
||||
|
||||
static int[] extra = new int[]
|
||||
{
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
8
|
||||
};
|
||||
|
||||
static int[] cpdist4 = new int[]
|
||||
{
|
||||
1,
|
||||
65,
|
||||
129,
|
||||
193,
|
||||
257,
|
||||
321,
|
||||
385,
|
||||
449,
|
||||
513,
|
||||
577,
|
||||
641,
|
||||
705,
|
||||
769,
|
||||
833,
|
||||
897,
|
||||
961,
|
||||
1025,
|
||||
1089,
|
||||
1153,
|
||||
1217,
|
||||
1281,
|
||||
1345,
|
||||
1409,
|
||||
1473,
|
||||
1537,
|
||||
1601,
|
||||
1665,
|
||||
1729,
|
||||
1793,
|
||||
1857,
|
||||
1921,
|
||||
1985,
|
||||
2049,
|
||||
2113,
|
||||
2177,
|
||||
2241,
|
||||
2305,
|
||||
2369,
|
||||
2433,
|
||||
2497,
|
||||
2561,
|
||||
2625,
|
||||
2689,
|
||||
2753,
|
||||
2817,
|
||||
2881,
|
||||
2945,
|
||||
3009,
|
||||
3073,
|
||||
3137,
|
||||
3201,
|
||||
3265,
|
||||
3329,
|
||||
3393,
|
||||
3457,
|
||||
3521,
|
||||
3585,
|
||||
3649,
|
||||
3713,
|
||||
3777,
|
||||
3841,
|
||||
3905,
|
||||
3969,
|
||||
4033
|
||||
};
|
||||
|
||||
static int[] cpdist8 = new int[]
|
||||
{
|
||||
1,
|
||||
129,
|
||||
257,
|
||||
385,
|
||||
513,
|
||||
641,
|
||||
769,
|
||||
897,
|
||||
1025,
|
||||
1153,
|
||||
1281,
|
||||
1409,
|
||||
1537,
|
||||
1665,
|
||||
1793,
|
||||
1921,
|
||||
2049,
|
||||
2177,
|
||||
2305,
|
||||
2433,
|
||||
2561,
|
||||
2689,
|
||||
2817,
|
||||
2945,
|
||||
3073,
|
||||
3201,
|
||||
3329,
|
||||
3457,
|
||||
3585,
|
||||
3713,
|
||||
3841,
|
||||
3969,
|
||||
4097,
|
||||
4225,
|
||||
4353,
|
||||
4481,
|
||||
4609,
|
||||
4737,
|
||||
4865,
|
||||
4993,
|
||||
5121,
|
||||
5249,
|
||||
5377,
|
||||
5505,
|
||||
5633,
|
||||
5761,
|
||||
5889,
|
||||
6017,
|
||||
6145,
|
||||
6273,
|
||||
6401,
|
||||
6529,
|
||||
6657,
|
||||
6785,
|
||||
6913,
|
||||
7041,
|
||||
7169,
|
||||
7297,
|
||||
7425,
|
||||
7553,
|
||||
7681,
|
||||
7809,
|
||||
7937,
|
||||
8065
|
||||
};
|
||||
|
||||
private int get_tree(int[] arrBitLengths, int numberExpected)
|
||||
/* Get the bit lengths for a code representation from the compressed
|
||||
stream. If get_tree() returns 4, then there is an error in the data.
|
||||
Otherwise zero is returned. */
|
||||
{
|
||||
/* get bit lengths */
|
||||
int inIndex = inStream.ReadByte() + 1; /* length/count pairs to read */
|
||||
int outIndex = 0; /* next code */
|
||||
do
|
||||
{
|
||||
int nextByte = inStream.ReadByte();
|
||||
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
|
||||
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
|
||||
if (outIndex + numOfCodes > numberExpected)
|
||||
return 4; /* don't overflow arrBitLengths[] */
|
||||
do
|
||||
{
|
||||
arrBitLengths[outIndex++] = bitLengthOfCodes;
|
||||
} while ((--numOfCodes) != 0);
|
||||
} while ((--inIndex) != 0);
|
||||
|
||||
return outIndex != numberExpected ? 4 : 0; /* should have read numberExpected of them */
|
||||
}
|
||||
|
||||
private int explode_SetTables()
|
||||
{
|
||||
int returnCode; /* return codes */
|
||||
int[] arrBitLengthsForCodes = new int[256]; /* bit lengths for codes */
|
||||
|
||||
bitsForLiteralCodeTable = 0; /* bits for tb */
|
||||
bitsForLengthCodeTable = 7;
|
||||
bitsForDistanceCodeTable = (compressedSize) > 200000 ? 8 : 7;
|
||||
|
||||
if ((generalPurposeBitFlag & HeaderFlags.Bit2) != 0)
|
||||
/* With literal tree--minimum match length is 3 */
|
||||
{
|
||||
bitsForLiteralCodeTable = 9; /* base table size for literals */
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
|
||||
return returnCode;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
256,
|
||||
256,
|
||||
[],
|
||||
[],
|
||||
out hufLiteralCodeTable,
|
||||
ref bitsForLiteralCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return returnCode;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cplen3,
|
||||
extra,
|
||||
out hufLengthCodeTable,
|
||||
ref bitsForLengthCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
}
|
||||
else
|
||||
/* No literal tree--minimum match length is 2 */
|
||||
{
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return returnCode;
|
||||
|
||||
hufLiteralCodeTable = null;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cplen2,
|
||||
extra,
|
||||
out hufLengthCodeTable,
|
||||
ref bitsForLengthCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return (int)returnCode;
|
||||
|
||||
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
|
||||
{
|
||||
numOfUncodedLowerDistanceBits = 7;
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cpdist8,
|
||||
extra,
|
||||
out hufDistanceCodeTable,
|
||||
ref bitsForDistanceCodeTable
|
||||
);
|
||||
}
|
||||
else /* else 4K */
|
||||
{
|
||||
numOfUncodedLowerDistanceBits = 6;
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cpdist4,
|
||||
extra,
|
||||
out hufDistanceCodeTable,
|
||||
ref bitsForDistanceCodeTable
|
||||
);
|
||||
}
|
||||
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
private void NeedBits(int numberOfBits)
|
||||
{
|
||||
while (bitBufferCount < (numberOfBits))
|
||||
{
|
||||
bitBuffer |= (uint)inStream.ReadByte() << bitBufferCount;
|
||||
bitBufferCount += 8;
|
||||
}
|
||||
}
|
||||
|
||||
private void DumpBits(int numberOfBits)
|
||||
{
|
||||
bitBuffer >>= numberOfBits;
|
||||
bitBufferCount -= numberOfBits;
|
||||
}
|
||||
|
||||
int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, out int e)
|
||||
{
|
||||
NeedBits(bits);
|
||||
|
||||
int tabOffset = (int)(~bitBuffer & mask);
|
||||
huftPointer = htab[tabOffset];
|
||||
|
||||
while (true)
|
||||
{
|
||||
DumpBits(huftPointer.NumberOfBitsUsed);
|
||||
e = huftPointer.NumberOfExtraBits;
|
||||
if (e <= 32)
|
||||
break;
|
||||
if (e == INVALID_CODE)
|
||||
return 1;
|
||||
|
||||
e &= 31;
|
||||
NeedBits(e);
|
||||
|
||||
tabOffset = (int)(~bitBuffer & mask_bits[e]);
|
||||
huftPointer = huftPointer.ChildNodes[tabOffset];
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void explode_var_init()
|
||||
{
|
||||
/* explode the coded data */
|
||||
bitBuffer = 0;
|
||||
bitBufferCount = 0;
|
||||
maskForLiteralCodeTable = mask_bits[bitsForLiteralCodeTable]; //only used in explode_lit
|
||||
maskForLengthCodeTable = mask_bits[bitsForLengthCodeTable];
|
||||
maskForDistanceCodeTable = mask_bits[bitsForDistanceCodeTable];
|
||||
maskForDistanceLowBits = mask_bits[numOfUncodedLowerDistanceBits];
|
||||
outBytesCount = 0;
|
||||
|
||||
windowIndex = 0; /* initialize bit buffer, window */
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
int countIndex = 0;
|
||||
while (countIndex < count && outBytesCount < unCompressedSize) /* do until unCompressedSize bytes uncompressed */
|
||||
{
|
||||
if (length == 0)
|
||||
{
|
||||
NeedBits(1);
|
||||
bool literal = (bitBuffer & 1) == 1;
|
||||
DumpBits(1);
|
||||
|
||||
huftNode huftPointer;
|
||||
if (literal) /* then literal--decode it */
|
||||
{
|
||||
byte nextByte;
|
||||
if (hufLiteralCodeTable != null)
|
||||
{
|
||||
/* get coded literal */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufLiteralCodeTable,
|
||||
bitsForLiteralCodeTable,
|
||||
maskForLiteralCodeTable,
|
||||
out huftPointer,
|
||||
out _
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding literal value");
|
||||
|
||||
nextByte = (byte)huftPointer.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
NeedBits(8);
|
||||
nextByte = (byte)bitBuffer;
|
||||
DumpBits(8);
|
||||
}
|
||||
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
NeedBits(numOfUncodedLowerDistanceBits); /* get distance low bits */
|
||||
distance = (int)(bitBuffer & maskForDistanceLowBits);
|
||||
DumpBits(numOfUncodedLowerDistanceBits);
|
||||
|
||||
/* get coded distance high bits */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufDistanceCodeTable,
|
||||
bitsForDistanceCodeTable,
|
||||
maskForDistanceCodeTable,
|
||||
out huftPointer,
|
||||
out _
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding distance high bits");
|
||||
|
||||
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
|
||||
|
||||
/* get coded length */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufLengthCodeTable,
|
||||
bitsForLengthCodeTable,
|
||||
maskForLengthCodeTable,
|
||||
out huftPointer,
|
||||
out int extraBitLength
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding coded length");
|
||||
|
||||
length = huftPointer.Value;
|
||||
|
||||
if (extraBitLength != 0) /* get length extra bits */
|
||||
{
|
||||
NeedBits(8);
|
||||
length += (int)(bitBuffer & 0xff);
|
||||
DumpBits(8);
|
||||
}
|
||||
|
||||
if (length > (unCompressedSize - outBytesCount))
|
||||
length = (int)(unCompressedSize - outBytesCount);
|
||||
|
||||
distance &= WSIZE - 1;
|
||||
}
|
||||
|
||||
while (length != 0 && countIndex < count)
|
||||
{
|
||||
byte nextByte = windowsBuffer[distance++];
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
|
||||
if (distance == WSIZE)
|
||||
distance = 0;
|
||||
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
length--;
|
||||
}
|
||||
}
|
||||
|
||||
return countIndex;
|
||||
}
|
||||
}
|
||||
@@ -1,269 +0,0 @@
|
||||
/*
|
||||
* This code has been converted to C# based on the original huft_tree code found in
|
||||
* inflate.c -- by Mark Adler version c17e, 30 Mar 2007
|
||||
*/
|
||||
|
||||
namespace SharpCompress.Compressors.Explode;
|
||||
|
||||
public class huftNode
|
||||
{
|
||||
public int NumberOfExtraBits; /* number of extra bits or operation */
|
||||
public int NumberOfBitsUsed; /* number of bits in this code or subcode */
|
||||
public int Value; /* literal, length base, or distance base */
|
||||
public huftNode[] ChildNodes = []; /* next level of table */
|
||||
}
|
||||
|
||||
public static class HuftTree
|
||||
{
|
||||
private const int INVALID_CODE = 99;
|
||||
|
||||
/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
|
||||
private const int BMAX = 16; /* maximum bit length of any code (16 for explode) */
|
||||
private const int N_MAX = 288; /* maximum number of codes in any set */
|
||||
|
||||
public static int huftbuid(
|
||||
int[] arrBitLengthForCodes,
|
||||
int numberOfCodes,
|
||||
int numberOfSimpleValueCodes,
|
||||
int[] arrBaseValuesForNonSimpleCodes,
|
||||
int[] arrExtraBitsForNonSimpleCodes,
|
||||
out huftNode[] outHufTable,
|
||||
ref int outBitsForTable
|
||||
)
|
||||
/* Given a list of code lengths and a maximum table size, make a set of
|
||||
tables to decode that set of codes. Return zero on success, one if
|
||||
the given code set is incomplete (the tables are still built in this
|
||||
case), two if the input is invalid (all zero length codes or an
|
||||
oversubscribed set of lengths), and three if not enough memory.
|
||||
The code with value 256 is special, and the tables are constructed
|
||||
so that no bits beyond that code are fetched when that code is
|
||||
decoded. */
|
||||
{
|
||||
outHufTable = [];
|
||||
|
||||
/* Generate counts for each bit length */
|
||||
int lengthOfEOBcode = numberOfCodes > 256 ? arrBitLengthForCodes[256] : BMAX; /* set length of EOB code, if any */
|
||||
|
||||
int[] arrBitLengthCount = new int[BMAX + 1];
|
||||
for (int i = 0; i < BMAX + 1; i++)
|
||||
arrBitLengthCount[i] = 0;
|
||||
|
||||
int pIndex = 0;
|
||||
int counterCurrentCode = numberOfCodes;
|
||||
do
|
||||
{
|
||||
arrBitLengthCount[arrBitLengthForCodes[pIndex]]++;
|
||||
pIndex++; /* assume all entries <= BMAX */
|
||||
} while ((--counterCurrentCode) != 0);
|
||||
|
||||
if (arrBitLengthCount[0] == numberOfCodes) /* null input--all zero length codes */
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Find minimum and maximum length, bound *outBitsForTable by those */
|
||||
int counter;
|
||||
for (counter = 1; counter <= BMAX; counter++)
|
||||
if (arrBitLengthCount[counter] != 0)
|
||||
break;
|
||||
|
||||
int numberOfBitsInCurrentCode = counter; /* minimum code length */
|
||||
if (outBitsForTable < counter)
|
||||
outBitsForTable = counter;
|
||||
|
||||
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
|
||||
if (arrBitLengthCount[counterCurrentCode] != 0)
|
||||
break;
|
||||
|
||||
int maximumCodeLength = counterCurrentCode; /* maximum code length */
|
||||
if (outBitsForTable > counterCurrentCode)
|
||||
outBitsForTable = counterCurrentCode;
|
||||
|
||||
/* Adjust last length count to fill out codes, if needed */
|
||||
int numberOfDummyCodesAdded;
|
||||
for (
|
||||
numberOfDummyCodesAdded = 1 << counter;
|
||||
counter < counterCurrentCode;
|
||||
counter++, numberOfDummyCodesAdded <<= 1
|
||||
)
|
||||
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
|
||||
return 2; /* bad input: more codes than bits */
|
||||
|
||||
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
|
||||
return 2;
|
||||
|
||||
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
|
||||
|
||||
/* Generate starting offsets into the value table for each length */
|
||||
int[] bitOffset = new int[BMAX + 1];
|
||||
bitOffset[1] = 0;
|
||||
counter = 0;
|
||||
pIndex = 1;
|
||||
int xIndex = 2;
|
||||
while ((--counterCurrentCode) != 0)
|
||||
{ /* note that i == g from above */
|
||||
bitOffset[xIndex++] = (counter += arrBitLengthCount[pIndex++]);
|
||||
}
|
||||
|
||||
/* Make a table of values in order of bit lengths */
|
||||
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
|
||||
for (int i = 0; i < N_MAX; i++)
|
||||
arrValuesInOrderOfBitLength[i] = 0;
|
||||
|
||||
pIndex = 0;
|
||||
counterCurrentCode = 0;
|
||||
do
|
||||
{
|
||||
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
|
||||
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
|
||||
} while (++counterCurrentCode < numberOfCodes);
|
||||
|
||||
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
|
||||
|
||||
/* Generate the Huffman codes and for each, make the table entries */
|
||||
bitOffset[0] = counterCurrentCode = 0; /* first Huffman code is zero */
|
||||
pIndex = 0; /* grab values in bit order */
|
||||
int tableLevel = -1; /* no tables yet--level -1 */
|
||||
int bitsBeforeThisTable = 0;
|
||||
int[] arrLX = new int[BMAX + 1];
|
||||
int stackOfBitsPerTable = 1; /* stack of bits per table */
|
||||
arrLX[stackOfBitsPerTable - 1] = 0; /* no bits decoded yet */
|
||||
|
||||
huftNode[][] arrHufTableStack = new huftNode[BMAX][];
|
||||
huftNode[] pointerToCurrentTable = [];
|
||||
int numberOfEntriesInCurrentTable = 0;
|
||||
|
||||
bool first = true;
|
||||
|
||||
/* go through the bit lengths (k already is bits in shortest code) */
|
||||
for (; numberOfBitsInCurrentCode <= maximumCodeLength; numberOfBitsInCurrentCode++)
|
||||
{
|
||||
int counterForCodes = arrBitLengthCount[numberOfBitsInCurrentCode];
|
||||
while ((counterForCodes--) != 0)
|
||||
{
|
||||
/* here i is the Huffman code of length k bits for value *p */
|
||||
/* make tables up to required level */
|
||||
while (
|
||||
numberOfBitsInCurrentCode
|
||||
> bitsBeforeThisTable + arrLX[stackOfBitsPerTable + tableLevel]
|
||||
)
|
||||
{
|
||||
bitsBeforeThisTable += arrLX[stackOfBitsPerTable + (tableLevel++)]; /* add bits already decoded */
|
||||
|
||||
/* compute minimum size table less than or equal to *outBitsForTable bits */
|
||||
numberOfEntriesInCurrentTable =
|
||||
(numberOfEntriesInCurrentTable = maximumCodeLength - bitsBeforeThisTable)
|
||||
> outBitsForTable
|
||||
? outBitsForTable
|
||||
: numberOfEntriesInCurrentTable; /* upper limit */
|
||||
int fBitCounter1 =
|
||||
1 << (counter = numberOfBitsInCurrentCode - bitsBeforeThisTable);
|
||||
if (fBitCounter1 > counterForCodes + 1) /* try a k-w bit table */
|
||||
{ /* too few codes for k-w bit table */
|
||||
fBitCounter1 -= counterForCodes + 1; /* deduct codes from patterns left */
|
||||
xIndex = numberOfBitsInCurrentCode;
|
||||
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
|
||||
{
|
||||
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
|
||||
break; /* enough codes to use up j bits */
|
||||
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
|
||||
}
|
||||
}
|
||||
if (
|
||||
bitsBeforeThisTable + counter > lengthOfEOBcode
|
||||
&& bitsBeforeThisTable < lengthOfEOBcode
|
||||
)
|
||||
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
|
||||
|
||||
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
|
||||
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
|
||||
|
||||
/* allocate and link in new table */
|
||||
pointerToCurrentTable = new huftNode[numberOfEntriesInCurrentTable];
|
||||
|
||||
// set the pointer, pointed to by *outHufTable to the second huft in pointertoCurrentTable
|
||||
if (first)
|
||||
{
|
||||
outHufTable = pointerToCurrentTable; /* link to list for huft_free() */
|
||||
first = false;
|
||||
}
|
||||
|
||||
arrHufTableStack[tableLevel] = pointerToCurrentTable; /* table starts after link */
|
||||
|
||||
/* connect to last table, if there is one */
|
||||
if (tableLevel != 0)
|
||||
{
|
||||
bitOffset[tableLevel] = counterCurrentCode; /* save pattern for backing up */
|
||||
|
||||
huftNode vHuft = new huftNode
|
||||
{
|
||||
NumberOfBitsUsed = arrLX[stackOfBitsPerTable + tableLevel - 1], /* bits to dump before this table */
|
||||
NumberOfExtraBits = 32 + counter, /* bits in this table */
|
||||
ChildNodes = pointerToCurrentTable /* pointer to this table */
|
||||
};
|
||||
|
||||
counter =
|
||||
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1))
|
||||
>> (bitsBeforeThisTable - arrLX[stackOfBitsPerTable + tableLevel - 1]);
|
||||
arrHufTableStack[tableLevel - 1][counter] = vHuft; /* connect to last table */
|
||||
}
|
||||
}
|
||||
|
||||
/* set up table entry in r */
|
||||
huftNode vHuft1 = new huftNode
|
||||
{
|
||||
NumberOfBitsUsed = numberOfBitsInCurrentCode - bitsBeforeThisTable
|
||||
};
|
||||
|
||||
if (pIndex >= numberOfCodes)
|
||||
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
|
||||
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
|
||||
{
|
||||
vHuft1.NumberOfExtraBits = (
|
||||
arrValuesInOrderOfBitLength[pIndex] < 256 ? 32 : 31
|
||||
); /* 256 is end-of-block code */
|
||||
vHuft1.Value = arrValuesInOrderOfBitLength[pIndex++]; /* simple code is just the value */
|
||||
}
|
||||
else
|
||||
{
|
||||
vHuft1.NumberOfExtraBits = arrExtraBitsForNonSimpleCodes[
|
||||
arrValuesInOrderOfBitLength[pIndex] - numberOfSimpleValueCodes
|
||||
]; /* non-simple--look up in lists */
|
||||
vHuft1.Value = arrBaseValuesForNonSimpleCodes[
|
||||
arrValuesInOrderOfBitLength[pIndex++] - numberOfSimpleValueCodes
|
||||
];
|
||||
}
|
||||
|
||||
/* fill code-like entries with r */
|
||||
int fBitCounter2 = 1 << (numberOfBitsInCurrentCode - bitsBeforeThisTable);
|
||||
for (
|
||||
counter = counterCurrentCode >> bitsBeforeThisTable;
|
||||
counter < numberOfEntriesInCurrentTable;
|
||||
counter += fBitCounter2
|
||||
)
|
||||
pointerToCurrentTable[counter] = vHuft1;
|
||||
|
||||
/* backwards increment the k-bit code i */
|
||||
for (
|
||||
counter = 1 << (numberOfBitsInCurrentCode - 1);
|
||||
(counterCurrentCode & counter) != 0;
|
||||
counter >>= 1
|
||||
)
|
||||
counterCurrentCode ^= counter;
|
||||
counterCurrentCode ^= counter;
|
||||
|
||||
/* backup over finished tables */
|
||||
while (
|
||||
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
|
||||
)
|
||||
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
|
||||
}
|
||||
}
|
||||
|
||||
/* return actual size of base table */
|
||||
outBitsForTable = arrLX[stackOfBitsPerTable];
|
||||
|
||||
/* Return true (1) if we were given an incomplete table */
|
||||
return (numberOfDummyCodesAdded != 0 && maximumCodeLength != 1) ? 1 : 0;
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Filters;
|
||||
|
||||
internal class BCJFilterARM64 : Filter
|
||||
{
|
||||
private int _pos;
|
||||
|
||||
public BCJFilterARM64(bool isEncoder, Stream baseStream)
|
||||
: base(isEncoder, baseStream, 8) => _pos = 0;
|
||||
|
||||
protected override int Transform(byte[] buffer, int offset, int count)
|
||||
{
|
||||
var end = offset + count - 4;
|
||||
int i;
|
||||
|
||||
for (i = offset; i <= end; i += 4)
|
||||
{
|
||||
uint pc = (uint)(_pos + i - offset);
|
||||
uint instr = BinaryPrimitives.ReadUInt32LittleEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i, 4)
|
||||
);
|
||||
|
||||
if ((instr >> 26) == 0x25)
|
||||
{
|
||||
uint src = instr;
|
||||
instr = 0x94000000;
|
||||
|
||||
pc >>= 2;
|
||||
if (!_isEncoder)
|
||||
pc = 0U - pc;
|
||||
|
||||
instr |= (src + pc) & 0x03FFFFFF;
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
|
||||
}
|
||||
else if ((instr & 0x9F000000) == 0x90000000)
|
||||
{
|
||||
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
|
||||
|
||||
if (((src + 0x00020000) & 0x001C0000) != 0)
|
||||
continue;
|
||||
|
||||
instr &= 0x9000001F;
|
||||
|
||||
pc >>= 12;
|
||||
if (!_isEncoder)
|
||||
pc = 0U - pc;
|
||||
|
||||
uint dest = src + pc;
|
||||
instr |= (dest & 3) << 29;
|
||||
instr |= (dest & 0x0003FFFC) << 3;
|
||||
instr |= (0U - (dest & 0x00020000)) & 0x00E00000;
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
|
||||
}
|
||||
}
|
||||
|
||||
i -= offset;
|
||||
_pos += i;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Filters;
|
||||
|
||||
internal class BCJFilterRISCV : Filter
|
||||
{
|
||||
private int _pos;
|
||||
|
||||
public BCJFilterRISCV(bool isEncoder, Stream baseStream)
|
||||
: base(isEncoder, baseStream, 8) => _pos = 0;
|
||||
|
||||
private int Decode(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (count < 8)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var end = offset + count - 8;
|
||||
int i;
|
||||
for (i = offset; i <= end; i += 2)
|
||||
{
|
||||
uint inst = buffer[i];
|
||||
if (inst == 0xEF)
|
||||
{
|
||||
uint b1 = buffer[i + 1];
|
||||
if ((b1 & 0x0D) != 0)
|
||||
continue;
|
||||
|
||||
uint b2 = buffer[i + 2];
|
||||
uint b3 = buffer[i + 3];
|
||||
uint pc = (uint)(_pos + i);
|
||||
|
||||
uint addr = ((b1 & 0xF0) << 13) | (b2 << 9) | (b3 << 1);
|
||||
|
||||
addr -= pc;
|
||||
|
||||
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 8) & 0xF0));
|
||||
|
||||
buffer[i + 2] = (byte)(
|
||||
((addr >> 16) & 0x0F) | ((addr >> 7) & 0x10) | ((addr << 4) & 0xE0)
|
||||
);
|
||||
|
||||
buffer[i + 3] = (byte)(((addr >> 4) & 0x7F) | ((addr >> 13) & 0x80));
|
||||
|
||||
i += 4 - 2;
|
||||
}
|
||||
else if ((inst & 0x7F) == 0x17)
|
||||
{
|
||||
uint inst2 = 0;
|
||||
inst |= (uint)buffer[i + 1] << 8;
|
||||
inst |= (uint)buffer[i + 2] << 16;
|
||||
inst |= (uint)buffer[i + 3] << 24;
|
||||
|
||||
if ((inst & 0xE80) != 0)
|
||||
{
|
||||
inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i + 4, 4)
|
||||
);
|
||||
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
|
||||
{
|
||||
i += 6 - 2;
|
||||
continue;
|
||||
}
|
||||
uint addr = inst & 0xFFFFF000;
|
||||
addr += inst2 >> 20;
|
||||
|
||||
inst = 0x17 | (2 << 7) | (inst2 << 12);
|
||||
inst2 = addr;
|
||||
}
|
||||
else
|
||||
{
|
||||
uint inst2_rs1 = inst >> 27;
|
||||
if ((uint)(((inst) - 0x3117) << 18) >= ((inst2_rs1) & 0x1D))
|
||||
{
|
||||
i += 4 - 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
uint addr = BinaryPrimitives.ReadUInt32BigEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i + 4, 4)
|
||||
);
|
||||
|
||||
addr -= (uint)(_pos + i);
|
||||
|
||||
inst2 = (inst >> 12) | (addr << 20);
|
||||
|
||||
inst = 0x17 | (inst2_rs1 << 7) | ((addr + 0x800) & 0xFFFFF000);
|
||||
}
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i + 4, 4), inst2);
|
||||
|
||||
i += 8 - 2;
|
||||
}
|
||||
}
|
||||
i -= offset;
|
||||
_pos += i;
|
||||
return i;
|
||||
}
|
||||
|
||||
private int Encode(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (count < 8)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var end = offset + count - 8;
|
||||
int i;
|
||||
for (i = offset; i <= end; i += 2)
|
||||
{
|
||||
uint inst = buffer[i];
|
||||
if (inst == 0xEF)
|
||||
{
|
||||
uint b1 = buffer[i + 1];
|
||||
if ((b1 & 0x0D) != 0)
|
||||
continue;
|
||||
|
||||
uint b2 = buffer[i + 2];
|
||||
uint b3 = buffer[i + 3];
|
||||
uint pc = (uint)(_pos + i);
|
||||
|
||||
uint addr =
|
||||
((b1 & 0xF0) << 8)
|
||||
| ((b2 & 0x0F) << 16)
|
||||
| ((b2 & 0x10) << 7)
|
||||
| ((b2 & 0xE0) >> 4)
|
||||
| ((b3 & 0x7F) << 4)
|
||||
| ((b3 & 0x80) << 13);
|
||||
|
||||
addr += pc;
|
||||
|
||||
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 13) & 0xF0));
|
||||
|
||||
buffer[i + 2] = (byte)(addr >> 9);
|
||||
|
||||
buffer[i + 3] = (byte)(addr >> 1);
|
||||
|
||||
i += 4 - 2;
|
||||
}
|
||||
else if ((inst & 0x7F) == 0x17)
|
||||
{
|
||||
inst |= (uint)buffer[i + 1] << 8;
|
||||
inst |= (uint)buffer[i + 2] << 16;
|
||||
inst |= (uint)buffer[i + 3] << 24;
|
||||
|
||||
if ((inst & 0xE80) != 0)
|
||||
{
|
||||
uint inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i + 4, 4)
|
||||
);
|
||||
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
|
||||
{
|
||||
i += 6 - 2;
|
||||
continue;
|
||||
}
|
||||
uint addr = inst & 0xFFFFF000;
|
||||
addr += (inst2 >> 20) - ((inst2 >> 19) & 0x1000);
|
||||
|
||||
addr += (uint)(_pos + i);
|
||||
inst = 0x17 | (2 << 7) | (inst2 << 12);
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
|
||||
BinaryPrimitives.WriteUInt32BigEndian(new Span<byte>(buffer, i + 4, 4), addr);
|
||||
}
|
||||
else
|
||||
{
|
||||
uint fake_rs1 = inst >> 27;
|
||||
if ((uint)(((inst) - 0x3117) << 18) >= ((fake_rs1) & 0x1D))
|
||||
{
|
||||
i += 4 - 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
uint fake_addr = BinaryPrimitives.ReadUInt32LittleEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i + 4, 4)
|
||||
);
|
||||
|
||||
uint fake_inst2 = (inst >> 12) | (fake_addr << 20);
|
||||
|
||||
inst = 0x17 | (fake_rs1 << 7) | (fake_addr & 0xFFFFF000);
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(
|
||||
new Span<byte>(buffer, i + 4, 4),
|
||||
fake_inst2
|
||||
);
|
||||
}
|
||||
i += 8 - 2;
|
||||
}
|
||||
}
|
||||
i -= offset;
|
||||
_pos += i;
|
||||
return i;
|
||||
}
|
||||
|
||||
protected override int Transform(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (_isEncoder)
|
||||
{
|
||||
return Encode(buffer, offset, count);
|
||||
}
|
||||
else
|
||||
{
|
||||
return Decode(buffer, offset, count);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,8 +20,7 @@ internal sealed class AesDecoderStream : DecoderStream2
|
||||
|
||||
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
|
||||
{
|
||||
var password = pass.CryptoGetTextPassword();
|
||||
if (password == null)
|
||||
if (pass.CryptoGetTextPassword() == null)
|
||||
{
|
||||
throw new SharpCompress.Common.CryptographicException(
|
||||
"Encrypted 7Zip archive has no password specified."
|
||||
@@ -38,8 +37,8 @@ internal sealed class AesDecoderStream : DecoderStream2
|
||||
|
||||
Init(info, out var numCyclesPower, out var salt, out var seed);
|
||||
|
||||
var passwordBytes = Encoding.Unicode.GetBytes(password);
|
||||
var key = InitKey(numCyclesPower, salt, passwordBytes);
|
||||
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
|
||||
var key = InitKey(numCyclesPower, salt, password);
|
||||
if (key == null)
|
||||
{
|
||||
throw new InvalidOperationException("Initialized with null key");
|
||||
@@ -208,6 +207,28 @@ internal sealed class AesDecoderStream : DecoderStream2
|
||||
}
|
||||
else
|
||||
{
|
||||
#if NETSTANDARD2_0
|
||||
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
var counter = new byte[8];
|
||||
var numRounds = 1L << mNumCyclesPower;
|
||||
for (long round = 0; round < numRounds; round++)
|
||||
{
|
||||
sha.AppendData(salt, 0, salt.Length);
|
||||
sha.AppendData(pass, 0, pass.Length);
|
||||
sha.AppendData(counter, 0, 8);
|
||||
|
||||
// This mirrors the counter so we don't have to convert long to byte[] each round.
|
||||
// (It also ensures the counter is little endian, which BitConverter does not.)
|
||||
for (var i = 0; i < 8; i++)
|
||||
{
|
||||
if (++counter[i] != 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return sha.GetHashAndReset();
|
||||
#else
|
||||
using var sha = SHA256.Create();
|
||||
var counter = new byte[8];
|
||||
var numRounds = 1L << mNumCyclesPower;
|
||||
@@ -230,6 +251,7 @@ internal sealed class AesDecoderStream : DecoderStream2
|
||||
|
||||
sha.TransformFinalBlock(counter, 0, 0);
|
||||
return sha.Hash;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,18 +63,18 @@ public sealed class LZipStream : Stream
|
||||
var crc32Stream = (Crc32Stream)_stream;
|
||||
crc32Stream.WrappedStream.Dispose();
|
||||
crc32Stream.Dispose();
|
||||
var compressedCount = _countingWritableSubStream.NotNull().Count;
|
||||
var compressedCount = _countingWritableSubStream!.Count;
|
||||
|
||||
Span<byte> intBuf = stackalloc byte[8];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
|
||||
_countingWritableSubStream?.Write(intBuf.Slice(0, 4));
|
||||
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
|
||||
|
||||
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
|
||||
_countingWritableSubStream?.Write(intBuf);
|
||||
_countingWritableSubStream.Write(intBuf);
|
||||
|
||||
//total with headers
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
|
||||
_countingWritableSubStream?.Write(intBuf);
|
||||
_countingWritableSubStream.Write(intBuf);
|
||||
}
|
||||
_finished = true;
|
||||
}
|
||||
|
||||
@@ -25,8 +25,6 @@ internal static class DecoderRegistry
|
||||
private const uint K_ARM = 0x03030501;
|
||||
private const uint K_ARMT = 0x03030701;
|
||||
private const uint K_SPARC = 0x03030805;
|
||||
private const uint K_ARM64 = 0x0A;
|
||||
private const uint K_RISCV = 0x0B;
|
||||
private const uint K_DEFLATE = 0x040108;
|
||||
private const uint K_B_ZIP2 = 0x040202;
|
||||
private const uint K_ZSTD = 0x4F71101;
|
||||
@@ -68,10 +66,6 @@ internal static class DecoderRegistry
|
||||
return new BCJFilterARMT(false, inStreams.Single());
|
||||
case K_SPARC:
|
||||
return new BCJFilterSPARC(false, inStreams.Single());
|
||||
case K_ARM64:
|
||||
return new BCJFilterARM64(false, inStreams.Single());
|
||||
case K_RISCV:
|
||||
return new BCJFilterRISCV(false, inStreams.Single());
|
||||
case K_B_ZIP2:
|
||||
return new BZip2Stream(inStreams.Single(), CompressionMode.Decompress, true);
|
||||
case K_PPMD:
|
||||
|
||||
@@ -2,5 +2,5 @@ namespace SharpCompress.Compressors.LZMA.Utilites;
|
||||
|
||||
internal interface IPasswordProvider
|
||||
{
|
||||
string? CryptoGetTextPassword();
|
||||
string CryptoGetTextPassword();
|
||||
}
|
||||
|
||||
@@ -530,6 +530,7 @@ internal partial class Unpack
|
||||
{
|
||||
case FILTER_E8:
|
||||
case FILTER_E8E9:
|
||||
|
||||
{
|
||||
var FileOffset = (uint)WrittenFileSize;
|
||||
|
||||
@@ -568,6 +569,7 @@ internal partial class Unpack
|
||||
}
|
||||
return SrcData;
|
||||
case FILTER_ARM:
|
||||
|
||||
{
|
||||
var FileOffset = (uint)WrittenFileSize;
|
||||
// DataSize is unsigned, so we use "CurPos+3" and not "DataSize-3"
|
||||
|
||||
@@ -228,6 +228,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_CMP:
|
||||
|
||||
{
|
||||
var value1 = (VMFlags)GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var result = value1 - GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -246,6 +247,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_CMPB:
|
||||
|
||||
{
|
||||
var value1 = (VMFlags)GetValue(true, Mem, op1);
|
||||
var result = value1 - GetValue(true, Mem, op2);
|
||||
@@ -263,6 +265,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_CMPD:
|
||||
|
||||
{
|
||||
var value1 = (VMFlags)GetValue(false, Mem, op1);
|
||||
var result = value1 - GetValue(false, Mem, op2);
|
||||
@@ -280,6 +283,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_ADD:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var result = (int)(
|
||||
@@ -347,6 +351,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_SUB:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var result = (int)(
|
||||
@@ -406,6 +411,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_INC:
|
||||
|
||||
{
|
||||
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFffL + 1L));
|
||||
if (cmd.IsByteMode)
|
||||
@@ -434,6 +440,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_DEC:
|
||||
|
||||
{
|
||||
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFff - 1));
|
||||
SetValue(cmd.IsByteMode, Mem, op1, result);
|
||||
@@ -456,6 +463,7 @@ internal sealed class RarVM : BitInput
|
||||
continue;
|
||||
|
||||
case VMCommands.VM_XOR:
|
||||
|
||||
{
|
||||
var result =
|
||||
GetValue(cmd.IsByteMode, Mem, op1) ^ GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -467,6 +475,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_AND:
|
||||
|
||||
{
|
||||
var result =
|
||||
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -478,6 +487,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_OR:
|
||||
|
||||
{
|
||||
var result =
|
||||
GetValue(cmd.IsByteMode, Mem, op1) | GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -489,6 +499,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_TEST:
|
||||
|
||||
{
|
||||
var result =
|
||||
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -567,6 +578,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_SHL:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -584,6 +596,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_SHR:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -597,6 +610,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_SAR:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
|
||||
@@ -610,6 +624,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_NEG:
|
||||
|
||||
{
|
||||
var result = -GetValue(cmd.IsByteMode, Mem, op1);
|
||||
flags = (VMFlags)(
|
||||
@@ -630,6 +645,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_PUSHA:
|
||||
|
||||
{
|
||||
for (int i = 0, SP = R[7] - 4; i < regCount; i++, SP -= 4)
|
||||
{
|
||||
@@ -640,6 +656,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_POPA:
|
||||
|
||||
{
|
||||
for (int i = 0, SP = R[7]; i < regCount; i++, SP += 4)
|
||||
{
|
||||
@@ -667,6 +684,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_XCHG:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
SetValue(cmd.IsByteMode, Mem, op1, GetValue(cmd.IsByteMode, Mem, op2));
|
||||
@@ -675,6 +693,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_MUL:
|
||||
|
||||
{
|
||||
var result = (int)(
|
||||
(
|
||||
@@ -688,6 +707,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_DIV:
|
||||
|
||||
{
|
||||
var divider = GetValue(cmd.IsByteMode, Mem, op2);
|
||||
if (divider != 0)
|
||||
@@ -699,6 +719,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_ADC:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var FC = (int)(flags & VMFlags.VM_FC);
|
||||
@@ -728,6 +749,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMCommands.VM_SBB:
|
||||
|
||||
{
|
||||
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
|
||||
var FC = (int)(flags & VMFlags.VM_FC);
|
||||
@@ -1134,6 +1156,7 @@ internal sealed class RarVM : BitInput
|
||||
{
|
||||
case VMStandardFilters.VMSF_E8:
|
||||
case VMStandardFilters.VMSF_E8E9:
|
||||
|
||||
{
|
||||
var dataSize = R[4];
|
||||
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
|
||||
@@ -1188,6 +1211,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMStandardFilters.VMSF_ITANIUM:
|
||||
|
||||
{
|
||||
var dataSize = R[4];
|
||||
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
|
||||
@@ -1245,6 +1269,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMStandardFilters.VMSF_DELTA:
|
||||
|
||||
{
|
||||
var dataSize = R[4] & unchecked((int)0xFFffFFff);
|
||||
var channels = R[0] & unchecked((int)0xFFffFFff);
|
||||
@@ -1275,6 +1300,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMStandardFilters.VMSF_RGB:
|
||||
|
||||
{
|
||||
// byte *SrcData=Mem,*DestData=SrcData+DataSize;
|
||||
int dataSize = R[4],
|
||||
@@ -1340,6 +1366,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMStandardFilters.VMSF_AUDIO:
|
||||
|
||||
{
|
||||
int dataSize = R[4],
|
||||
channels = R[0];
|
||||
@@ -1470,6 +1497,7 @@ internal sealed class RarVM : BitInput
|
||||
break;
|
||||
|
||||
case VMStandardFilters.VMSF_UPCASE:
|
||||
|
||||
{
|
||||
int dataSize = R[4],
|
||||
srcPos = 0,
|
||||
|
||||
@@ -1,249 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Reduce;
|
||||
|
||||
public class ReduceStream : Stream
|
||||
{
|
||||
private readonly long unCompressedSize;
|
||||
private readonly long compressedSize;
|
||||
private readonly Stream inStream;
|
||||
|
||||
private long inByteCount;
|
||||
private const int EOF = 1234;
|
||||
|
||||
private readonly int factor;
|
||||
private readonly int distanceMask;
|
||||
private readonly int lengthMask;
|
||||
|
||||
private long outBytesCount;
|
||||
|
||||
private readonly byte[] windowsBuffer;
|
||||
private int windowIndex;
|
||||
private int length;
|
||||
private int distance;
|
||||
|
||||
public ReduceStream(Stream inStr, long compsize, long unCompSize, int factor)
|
||||
{
|
||||
inStream = inStr;
|
||||
compressedSize = compsize;
|
||||
unCompressedSize = unCompSize;
|
||||
inByteCount = 0;
|
||||
outBytesCount = 0;
|
||||
|
||||
this.factor = factor;
|
||||
distanceMask = (int)mask_bits[factor] << 8;
|
||||
lengthMask = 0xff >> factor;
|
||||
|
||||
windowIndex = 0;
|
||||
length = 0;
|
||||
distance = 0;
|
||||
|
||||
windowsBuffer = new byte[WSIZE];
|
||||
|
||||
outByte = 0;
|
||||
|
||||
LoadBitLengthTable();
|
||||
LoadNextByteTable();
|
||||
}
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => false;
|
||||
public override long Length => unCompressedSize;
|
||||
public override long Position
|
||||
{
|
||||
get => outBytesCount;
|
||||
set { }
|
||||
}
|
||||
|
||||
private const int RunLengthCode = 144;
|
||||
private const int WSIZE = 0x4000;
|
||||
|
||||
private readonly uint[] mask_bits = new uint[]
|
||||
{
|
||||
0x0000,
|
||||
0x0001,
|
||||
0x0003,
|
||||
0x0007,
|
||||
0x000f,
|
||||
0x001f,
|
||||
0x003f,
|
||||
0x007f,
|
||||
0x00ff,
|
||||
0x01ff,
|
||||
0x03ff,
|
||||
0x07ff,
|
||||
0x0fff,
|
||||
0x1fff,
|
||||
0x3fff,
|
||||
0x7fff,
|
||||
0xffff
|
||||
};
|
||||
|
||||
private int bitBufferCount;
|
||||
private ulong bitBuffer;
|
||||
|
||||
private int NEXTBYTE()
|
||||
{
|
||||
if (inByteCount == compressedSize)
|
||||
return EOF;
|
||||
inByteCount++;
|
||||
return inStream.ReadByte();
|
||||
}
|
||||
|
||||
private void READBITS(int nbits, out byte zdest)
|
||||
{
|
||||
if (nbits > bitBufferCount)
|
||||
{
|
||||
int temp;
|
||||
while (bitBufferCount <= 8 * (int)(4 - 1) && (temp = NEXTBYTE()) != EOF)
|
||||
{
|
||||
bitBuffer |= (ulong)temp << bitBufferCount;
|
||||
bitBufferCount += 8;
|
||||
}
|
||||
}
|
||||
zdest = (byte)(bitBuffer & (ulong)mask_bits[nbits]);
|
||||
bitBuffer >>= nbits;
|
||||
bitBufferCount -= nbits;
|
||||
}
|
||||
|
||||
private byte[] bitCountTable = [];
|
||||
|
||||
private void LoadBitLengthTable()
|
||||
{
|
||||
byte[] bitPos = { 0, 2, 4, 8, 16, 32, 64, 128, 255 };
|
||||
bitCountTable = new byte[256];
|
||||
|
||||
for (byte i = 1; i <= 8; i++)
|
||||
{
|
||||
int vMin = bitPos[i - 1] + 1;
|
||||
int vMax = bitPos[i];
|
||||
for (int j = vMin; j <= vMax; j++)
|
||||
{
|
||||
bitCountTable[j] = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private byte[][] nextByteTable = [];
|
||||
|
||||
private void LoadNextByteTable()
|
||||
{
|
||||
nextByteTable = new byte[256][];
|
||||
for (int x = 255; x >= 0; x--)
|
||||
{
|
||||
READBITS(6, out byte Slen);
|
||||
nextByteTable[x] = new byte[Slen];
|
||||
for (int i = 0; i < Slen; i++)
|
||||
{
|
||||
READBITS(8, out nextByteTable[x][i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private byte outByte;
|
||||
|
||||
private byte GetNextByte()
|
||||
{
|
||||
if (nextByteTable[outByte].Length == 0)
|
||||
{
|
||||
READBITS(8, out outByte);
|
||||
return outByte;
|
||||
}
|
||||
READBITS(1, out byte nextBit);
|
||||
if (nextBit == 1)
|
||||
{
|
||||
READBITS(8, out outByte);
|
||||
return outByte;
|
||||
}
|
||||
READBITS(bitCountTable[nextByteTable[outByte].Length], out byte nextByteIndex);
|
||||
outByte = nextByteTable[outByte][nextByteIndex];
|
||||
return outByte;
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
int countIndex = 0;
|
||||
while (countIndex < count && outBytesCount < unCompressedSize)
|
||||
{
|
||||
if (length == 0)
|
||||
{
|
||||
byte nextByte = GetNextByte();
|
||||
if (nextByte != RunLengthCode)
|
||||
{
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
nextByte = GetNextByte();
|
||||
if (nextByte == 0)
|
||||
{
|
||||
buffer[offset + (countIndex++)] = RunLengthCode;
|
||||
windowsBuffer[windowIndex++] = RunLengthCode;
|
||||
outBytesCount++;
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
int lengthDistanceByte = nextByte;
|
||||
length = lengthDistanceByte & lengthMask;
|
||||
if (length == lengthMask)
|
||||
{
|
||||
length += GetNextByte();
|
||||
}
|
||||
length += 3;
|
||||
|
||||
int distanceHighByte = (lengthDistanceByte << factor) & distanceMask;
|
||||
distance = windowIndex - (distanceHighByte + GetNextByte() + 1);
|
||||
|
||||
distance &= WSIZE - 1;
|
||||
}
|
||||
|
||||
while (length != 0 && countIndex < count)
|
||||
{
|
||||
byte nextByte = windowsBuffer[distance++];
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
|
||||
if (distance == WSIZE)
|
||||
distance = 0;
|
||||
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
length--;
|
||||
}
|
||||
}
|
||||
|
||||
return countIndex;
|
||||
}
|
||||
}
|
||||
@@ -11,8 +11,8 @@ public class SourceStream : Stream
|
||||
private long _prevSize;
|
||||
private readonly List<FileInfo> _files;
|
||||
private readonly List<Stream> _streams;
|
||||
private readonly Func<int, FileInfo?>? _getFilePart;
|
||||
private readonly Func<int, Stream?>? _getStreamPart;
|
||||
private readonly Func<int, FileInfo?> _getFilePart;
|
||||
private readonly Func<int, Stream?> _getStreamPart;
|
||||
private int _stream;
|
||||
|
||||
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
|
||||
@@ -38,8 +38,8 @@ public class SourceStream : Stream
|
||||
if (!IsFileMode)
|
||||
{
|
||||
_streams.Add(stream!);
|
||||
_getStreamPart = getStreamPart;
|
||||
_getFilePart = _ => null;
|
||||
_getStreamPart = getStreamPart!;
|
||||
_getFilePart = _ => null!;
|
||||
if (stream is FileStream fileStream)
|
||||
{
|
||||
_files.Add(new FileInfo(fileStream.Name));
|
||||
@@ -49,8 +49,8 @@ public class SourceStream : Stream
|
||||
{
|
||||
_files.Add(file!);
|
||||
_streams.Add(_files[0].OpenRead());
|
||||
_getFilePart = getFilePart;
|
||||
_getStreamPart = _ => null;
|
||||
_getFilePart = getFilePart!;
|
||||
_getStreamPart = _ => null!;
|
||||
}
|
||||
_stream = 0;
|
||||
_prevSize = 0;
|
||||
@@ -78,7 +78,7 @@ public class SourceStream : Stream
|
||||
{
|
||||
if (IsFileMode)
|
||||
{
|
||||
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
|
||||
var f = _getFilePart(_streams.Count);
|
||||
if (f == null)
|
||||
{
|
||||
_stream = _streams.Count - 1;
|
||||
@@ -90,7 +90,7 @@ public class SourceStream : Stream
|
||||
}
|
||||
else
|
||||
{
|
||||
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
|
||||
var s = _getStreamPart(_streams.Count);
|
||||
if (s == null)
|
||||
{
|
||||
_stream = _streams.Count - 1;
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
namespace SharpCompress;
|
||||
|
||||
public static class NotNullExtensions
|
||||
{
|
||||
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) =>
|
||||
source ?? Enumerable.Empty<T>();
|
||||
|
||||
public static IEnumerable<T> Empty<T>(this T? source)
|
||||
{
|
||||
if (source is null)
|
||||
{
|
||||
return Enumerable.Empty<T>();
|
||||
}
|
||||
return source.AsEnumerable();
|
||||
}
|
||||
|
||||
#if NETFRAMEWORK || NETSTANDARD
|
||||
public static T NotNull<T>(this T? obj, string? message = null)
|
||||
where T : class
|
||||
{
|
||||
if (obj is null)
|
||||
{
|
||||
throw new ArgumentNullException(message ?? "Value is null");
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
public static T NotNull<T>(this T? obj, string? message = null)
|
||||
where T : struct
|
||||
{
|
||||
if (obj is null)
|
||||
{
|
||||
throw new ArgumentNullException(message ?? "Value is null");
|
||||
}
|
||||
return obj.Value;
|
||||
}
|
||||
#else
|
||||
|
||||
public static T NotNull<T>(
|
||||
[NotNull] this T? obj,
|
||||
[CallerArgumentExpression(nameof(obj))] string? paramName = null
|
||||
)
|
||||
where T : class
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(obj, paramName);
|
||||
return obj;
|
||||
}
|
||||
|
||||
public static T NotNull<T>(
|
||||
[NotNull] this T? obj,
|
||||
[CallerArgumentExpression(nameof(obj))] string? paramName = null
|
||||
)
|
||||
where T : struct
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(obj, paramName);
|
||||
return obj.Value;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@@ -13,9 +13,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
where TEntry : Entry
|
||||
where TVolume : Volume
|
||||
{
|
||||
private bool _completed;
|
||||
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
|
||||
private bool _wroteCurrentEntry;
|
||||
private bool completed;
|
||||
private IEnumerator<TEntry>? entriesForCurrentReadStream;
|
||||
private bool wroteCurrentEntry;
|
||||
|
||||
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
|
||||
|
||||
@@ -35,18 +35,18 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
/// <summary>
|
||||
/// Current volume that the current entry resides in
|
||||
/// </summary>
|
||||
public abstract TVolume? Volume { get; }
|
||||
public abstract TVolume Volume { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Current file entry
|
||||
/// </summary>
|
||||
public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current;
|
||||
public TEntry Entry => entriesForCurrentReadStream!.Current;
|
||||
|
||||
#region IDisposable Members
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_entriesForCurrentReadStream?.Dispose();
|
||||
entriesForCurrentReadStream?.Dispose();
|
||||
Volume?.Dispose();
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
/// </summary>
|
||||
public void Cancel()
|
||||
{
|
||||
if (!_completed)
|
||||
if (!completed)
|
||||
{
|
||||
Cancelled = true;
|
||||
}
|
||||
@@ -69,7 +69,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
|
||||
public bool MoveToNextEntry()
|
||||
{
|
||||
if (_completed)
|
||||
if (completed)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -77,27 +77,27 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
{
|
||||
throw new ReaderCancelledException("Reader has been cancelled.");
|
||||
}
|
||||
if (_entriesForCurrentReadStream is null)
|
||||
if (entriesForCurrentReadStream is null)
|
||||
{
|
||||
return LoadStreamForReading(RequestInitialStream());
|
||||
}
|
||||
if (!_wroteCurrentEntry)
|
||||
if (!wroteCurrentEntry)
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
_wroteCurrentEntry = false;
|
||||
wroteCurrentEntry = false;
|
||||
if (NextEntryForCurrentStream())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
_completed = true;
|
||||
completed = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
protected bool LoadStreamForReading(Stream stream)
|
||||
{
|
||||
_entriesForCurrentReadStream?.Dispose();
|
||||
if (stream is null || !stream.CanRead)
|
||||
entriesForCurrentReadStream?.Dispose();
|
||||
if ((stream is null) || (!stream.CanRead))
|
||||
{
|
||||
throw new MultipartStreamRequiredException(
|
||||
"File is split into multiple archives: '"
|
||||
@@ -105,15 +105,13 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
+ "'. A new readable stream is required. Use Cancel if it was intended."
|
||||
);
|
||||
}
|
||||
_entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
|
||||
return _entriesForCurrentReadStream.MoveNext();
|
||||
entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
|
||||
return entriesForCurrentReadStream.MoveNext();
|
||||
}
|
||||
|
||||
protected virtual Stream RequestInitialStream() =>
|
||||
Volume.NotNull("Volume isn't loaded.").Stream;
|
||||
protected virtual Stream RequestInitialStream() => Volume.Stream;
|
||||
|
||||
internal virtual bool NextEntryForCurrentStream() =>
|
||||
_entriesForCurrentReadStream.NotNull().MoveNext();
|
||||
internal virtual bool NextEntryForCurrentStream() => entriesForCurrentReadStream!.MoveNext();
|
||||
|
||||
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
|
||||
|
||||
@@ -151,7 +149,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
|
||||
public void WriteEntryTo(Stream writableStream)
|
||||
{
|
||||
if (_wroteCurrentEntry)
|
||||
if (wroteCurrentEntry)
|
||||
{
|
||||
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
|
||||
}
|
||||
@@ -168,7 +166,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
}
|
||||
|
||||
Write(writableStream);
|
||||
_wroteCurrentEntry = true;
|
||||
wroteCurrentEntry = true;
|
||||
}
|
||||
|
||||
internal void Write(Stream writeStream)
|
||||
@@ -180,12 +178,12 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
|
||||
|
||||
public EntryStream OpenEntryStream()
|
||||
{
|
||||
if (_wroteCurrentEntry)
|
||||
if (wroteCurrentEntry)
|
||||
{
|
||||
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
|
||||
}
|
||||
var stream = GetEntryStream();
|
||||
_wroteCurrentEntry = true;
|
||||
wroteCurrentEntry = true;
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
||||
@@ -7,8 +7,8 @@ namespace SharpCompress.Readers.GZip;
|
||||
|
||||
public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
|
||||
{
|
||||
private GZipReader(Stream stream, ReaderOptions options)
|
||||
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0);
|
||||
internal GZipReader(Stream stream, ReaderOptions options)
|
||||
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options);
|
||||
|
||||
public override GZipVolume Volume { get; }
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ internal class MultiVolumeRarReader : RarReader
|
||||
internal MultiVolumeRarReader(IEnumerable<Stream> streams, ReaderOptions options)
|
||||
: base(options) => this.streams = streams.GetEnumerator();
|
||||
|
||||
protected override void ValidateArchive(RarVolume archive) { }
|
||||
internal override void ValidateArchive(RarVolume archive) { }
|
||||
|
||||
protected override Stream RequestInitialStream()
|
||||
{
|
||||
|
||||
@@ -14,16 +14,16 @@ namespace SharpCompress.Readers.Rar;
|
||||
public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
|
||||
{
|
||||
private RarVolume? volume;
|
||||
private Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
private Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
internal RarReader(ReaderOptions options)
|
||||
: base(options, ArchiveType.Rar) { }
|
||||
|
||||
protected abstract void ValidateArchive(RarVolume archive);
|
||||
internal abstract void ValidateArchive(RarVolume archive);
|
||||
|
||||
public override RarVolume? Volume => volume;
|
||||
public override RarVolume Volume => volume!;
|
||||
|
||||
/// <summary>
|
||||
/// Opens a RarReader for Non-seeking usage with a single volume
|
||||
@@ -51,7 +51,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
|
||||
|
||||
protected override IEnumerable<RarReaderEntry> GetEntries(Stream stream)
|
||||
{
|
||||
volume = new RarReaderVolume(stream, Options, 0);
|
||||
volume = new RarReaderVolume(stream, Options);
|
||||
foreach (var fp in volume.ReadFileParts())
|
||||
{
|
||||
ValidateArchive(volume);
|
||||
|
||||
@@ -8,7 +8,7 @@ namespace SharpCompress.Readers.Rar;
|
||||
|
||||
public class RarReaderVolume : RarVolume
|
||||
{
|
||||
internal RarReaderVolume(Stream stream, ReaderOptions options, int index)
|
||||
internal RarReaderVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
: base(StreamingMode.Streaming, stream, options, index) { }
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
|
||||
@@ -11,7 +11,7 @@ internal class SingleVolumeRarReader : RarReader
|
||||
internal SingleVolumeRarReader(Stream stream, ReaderOptions options)
|
||||
: base(options) => this.stream = stream;
|
||||
|
||||
protected override void ValidateArchive(RarVolume archive)
|
||||
internal override void ValidateArchive(RarVolume archive)
|
||||
{
|
||||
if (archive.IsMultiVolume)
|
||||
{
|
||||
|
||||
@@ -69,6 +69,7 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.LocalEntry:
|
||||
|
||||
{
|
||||
yield return new ZipEntry(
|
||||
new StreamingZipFilePart((LocalEntryHeader)h, stream)
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
<PropertyGroup>
|
||||
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
|
||||
<NeutralLanguage>en-US</NeutralLanguage>
|
||||
<VersionPrefix>0.38.0</VersionPrefix>
|
||||
<AssemblyVersion>0.38.0</AssemblyVersion>
|
||||
<FileVersion>0.38.0</FileVersion>
|
||||
<VersionPrefix>0.36.0</VersionPrefix>
|
||||
<AssemblyVersion>0.36.0</AssemblyVersion>
|
||||
<FileVersion>0.36.0</FileVersion>
|
||||
<Authors>Adam Hathcock</Authors>
|
||||
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net8.0</TargetFrameworks>
|
||||
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net7.0;net8.0</TargetFrameworks>
|
||||
<AssemblyName>SharpCompress</AssemblyName>
|
||||
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
|
||||
<SignAssembly>true</SignAssembly>
|
||||
@@ -17,37 +17,33 @@
|
||||
<Copyright>Copyright (c) 2014 Adam Hathcock</Copyright>
|
||||
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
|
||||
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
|
||||
<Description>SharpCompress is a compression library for NET Standard 2.0/NET Standard 2.1/NET 6.0/NET 8.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
|
||||
<Description>SharpCompress is a compression library for NET Standard 2.0/2.1/NET 6.0/NET 7.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
|
||||
<PublishRepositoryUrl>true</PublishRepositoryUrl>
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
<IsTrimmable>true</IsTrimmable>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<PackageReadmeFile>README.md</PackageReadmeFile>
|
||||
<ContinuousIntegrationBuild>true</ContinuousIntegrationBuild>
|
||||
<EmbedUntrackedSources>true</EmbedUntrackedSources>
|
||||
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
|
||||
<IsTrimmable>true</IsTrimmable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="ZstdSharp.Port" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
|
||||
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" />
|
||||
<ItemGroup>
|
||||
<Compile Remove="Compressors\Lzw\LzwException.cs" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" PrivateAssets="All" />
|
||||
<PackageReference Include="ZstdSharp.Port" Version="0.7.4" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageReference Include="System.Memory" Version="4.5.5" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
|
||||
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" />
|
||||
<PackageReference Include="System.Memory" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' == 'NETFRAMEWORK' ">
|
||||
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" />
|
||||
<PackageReference Include="System.Memory" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageReference Include="System.Memory" Version="4.5.5" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="..\..\README.md" Pack="true" PackagePath="\" />
|
||||
|
||||
@@ -2,7 +2,8 @@ using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress;
|
||||
@@ -278,42 +279,8 @@ public static class Utility
|
||||
long total = 0;
|
||||
while (ReadTransferBlock(source, array, out var count))
|
||||
{
|
||||
destination.Write(array, 0, count);
|
||||
total += count;
|
||||
}
|
||||
return total;
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(array);
|
||||
}
|
||||
}
|
||||
|
||||
public static long TransferTo(this Stream source, Stream destination, long maxLength)
|
||||
{
|
||||
var array = GetTransferByteArray();
|
||||
var maxReadSize = array.Length;
|
||||
try
|
||||
{
|
||||
long total = 0;
|
||||
var remaining = maxLength;
|
||||
if (remaining < maxReadSize)
|
||||
{
|
||||
maxReadSize = (int)remaining;
|
||||
}
|
||||
while (ReadTransferBlock(source, array, maxReadSize, out var count))
|
||||
{
|
||||
destination.Write(array, 0, count);
|
||||
total += count;
|
||||
if (remaining - count < 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
remaining -= count;
|
||||
if (remaining < maxReadSize)
|
||||
{
|
||||
maxReadSize = (int)remaining;
|
||||
}
|
||||
}
|
||||
return total;
|
||||
}
|
||||
@@ -353,16 +320,6 @@ public static class Utility
|
||||
private static bool ReadTransferBlock(Stream source, byte[] array, out int count) =>
|
||||
(count = source.Read(array, 0, array.Length)) != 0;
|
||||
|
||||
private static bool ReadTransferBlock(Stream source, byte[] array, int size, out int count)
|
||||
{
|
||||
if (size > array.Length)
|
||||
{
|
||||
size = array.Length;
|
||||
}
|
||||
count = source.Read(array, 0, size);
|
||||
return count != 0;
|
||||
}
|
||||
|
||||
private static byte[] GetTransferByteArray() => ArrayPool<byte>.Shared.Rent(81920);
|
||||
|
||||
public static bool ReadFully(this Stream stream, byte[] buffer)
|
||||
@@ -436,16 +393,9 @@ public static class Utility
|
||||
buffer[offset + 3] = (byte)number;
|
||||
}
|
||||
|
||||
public static string ReplaceInvalidFileNameChars(string fileName)
|
||||
{
|
||||
var invalidChars = new HashSet<char>(Path.GetInvalidFileNameChars());
|
||||
var sb = new StringBuilder(fileName.Length);
|
||||
foreach (var c in fileName)
|
||||
{
|
||||
var newChar = invalidChars.Contains(c) ? '_' : c;
|
||||
sb.Append(newChar);
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
public static async ValueTask WriteAsync(
|
||||
this Stream stream,
|
||||
byte[] bytes,
|
||||
CancellationToken cancellationToken
|
||||
) => await stream.WriteAsync(bytes, 0, bytes.Length, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -1,26 +1,42 @@
|
||||
using System;
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Writers;
|
||||
|
||||
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
|
||||
public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions) : IWriter
|
||||
public abstract class AbstractWriter : IWriter
|
||||
{
|
||||
private bool _isDisposed;
|
||||
|
||||
//always initializes the stream
|
||||
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
|
||||
{
|
||||
WriterType = type;
|
||||
WriterOptions = writerOptions;
|
||||
}
|
||||
|
||||
protected void InitializeStream(Stream stream) => OutputStream = stream;
|
||||
protected void InitalizeStream(Stream stream) => OutputStream = stream;
|
||||
|
||||
protected Stream OutputStream { get; private set; }
|
||||
#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
|
||||
|
||||
public ArchiveType WriterType { get; } = type;
|
||||
public ArchiveType WriterType { get; }
|
||||
|
||||
protected WriterOptions WriterOptions { get; } = writerOptions;
|
||||
protected WriterOptions WriterOptions { get; }
|
||||
|
||||
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public abstract ValueTask WriteAsync(
|
||||
string filename,
|
||||
Stream source,
|
||||
DateTime? modificationTime,
|
||||
CancellationToken cancellationToken
|
||||
);
|
||||
|
||||
public abstract ValueTask DisposeAsync();
|
||||
#endif
|
||||
|
||||
protected virtual void Dispose(bool isDisposing)
|
||||
{
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
@@ -18,7 +20,7 @@ public sealed class GZipWriter : AbstractWriter
|
||||
{
|
||||
destination = NonDisposingStream.Create(destination);
|
||||
}
|
||||
InitializeStream(
|
||||
InitalizeStream(
|
||||
new GZipStream(
|
||||
destination,
|
||||
CompressionMode.Compress,
|
||||
@@ -50,4 +52,15 @@ public sealed class GZipWriter : AbstractWriter
|
||||
source.TransferTo(stream);
|
||||
_wroteToStream = true;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override ValueTask DisposeAsync() => throw new NotImplementedException();
|
||||
|
||||
public override ValueTask WriteAsync(
|
||||
string filename,
|
||||
Stream source,
|
||||
DateTime? modificationTime,
|
||||
CancellationToken cancellationToken
|
||||
) => throw new NotImplementedException();
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Writers;
|
||||
|
||||
public interface IWriter : IDisposable
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
, IAsyncDisposable
|
||||
#endif
|
||||
{
|
||||
ArchiveType WriterType { get; }
|
||||
void Write(string filename, Stream source, DateTime? modificationTime);
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
ValueTask WriteAsync(
|
||||
string filename,
|
||||
Stream source,
|
||||
DateTime? modificationTime,
|
||||
CancellationToken cancellationToken
|
||||
);
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
@@ -32,16 +34,19 @@ public class TarWriter : AbstractWriter
|
||||
case CompressionType.None:
|
||||
break;
|
||||
case CompressionType.BZip2:
|
||||
|
||||
{
|
||||
destination = new BZip2Stream(destination, CompressionMode.Compress, false);
|
||||
}
|
||||
break;
|
||||
case CompressionType.GZip:
|
||||
|
||||
{
|
||||
destination = new GZipStream(destination, CompressionMode.Compress);
|
||||
}
|
||||
break;
|
||||
case CompressionType.LZip:
|
||||
|
||||
{
|
||||
destination = new LZipStream(destination, CompressionMode.Compress);
|
||||
}
|
||||
@@ -53,7 +58,7 @@ public class TarWriter : AbstractWriter
|
||||
);
|
||||
}
|
||||
}
|
||||
InitializeStream(destination);
|
||||
InitalizeStream(destination);
|
||||
}
|
||||
|
||||
public override void Write(string filename, Stream source, DateTime? modificationTime) =>
|
||||
@@ -87,7 +92,8 @@ public class TarWriter : AbstractWriter
|
||||
header.Name = NormalizeFilename(filename);
|
||||
header.Size = realSize;
|
||||
header.Write(OutputStream);
|
||||
size = source.TransferTo(OutputStream, realSize);
|
||||
|
||||
size = source.TransferTo(OutputStream);
|
||||
PadTo512(size.Value);
|
||||
}
|
||||
|
||||
@@ -122,4 +128,15 @@ public class TarWriter : AbstractWriter
|
||||
}
|
||||
base.Dispose(isDisposing);
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override ValueTask DisposeAsync() => throw new NotImplementedException();
|
||||
|
||||
public override ValueTask WriteAsync(
|
||||
string filename,
|
||||
Stream source,
|
||||
DateTime? modificationTime,
|
||||
CancellationToken cancellationToken
|
||||
) => throw new NotImplementedException();
|
||||
#endif
|
||||
}
|
||||
|
||||
560
src/SharpCompress/Writers/Zip/ZipWriter.Legacy.cs
Normal file
560
src/SharpCompress/Writers/Zip/ZipWriter.Legacy.cs
Normal file
@@ -0,0 +1,560 @@
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.BZip2;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Compressors.PPMd;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Writers.Zip;
|
||||
|
||||
public class ZipWriter : AbstractWriter
|
||||
{
|
||||
private readonly CompressionType compressionType;
|
||||
private readonly CompressionLevel compressionLevel;
|
||||
private readonly List<ZipCentralDirectoryEntry> entries = new();
|
||||
private readonly string zipComment;
|
||||
private long streamPosition;
|
||||
private PpmdProperties? ppmdProps;
|
||||
private readonly bool isZip64;
|
||||
|
||||
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
|
||||
: base(ArchiveType.Zip, zipWriterOptions)
|
||||
{
|
||||
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
|
||||
isZip64 = zipWriterOptions.UseZip64;
|
||||
if (destination.CanSeek)
|
||||
{
|
||||
streamPosition = destination.Position;
|
||||
}
|
||||
|
||||
compressionType = zipWriterOptions.CompressionType;
|
||||
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
|
||||
|
||||
if (WriterOptions.LeaveStreamOpen)
|
||||
{
|
||||
destination = NonDisposingStream.Create(destination);
|
||||
}
|
||||
InitalizeStream(destination);
|
||||
}
|
||||
|
||||
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
|
||||
|
||||
protected override void Dispose(bool isDisposing)
|
||||
{
|
||||
if (isDisposing)
|
||||
{
|
||||
ulong size = 0;
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
size += entry.Write(OutputStream);
|
||||
}
|
||||
WriteEndRecord(size);
|
||||
}
|
||||
base.Dispose(isDisposing);
|
||||
}
|
||||
|
||||
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
|
||||
compressionType switch
|
||||
{
|
||||
CompressionType.None => ZipCompressionMethod.None,
|
||||
CompressionType.Deflate => ZipCompressionMethod.Deflate,
|
||||
CompressionType.BZip2 => ZipCompressionMethod.BZip2,
|
||||
CompressionType.LZMA => ZipCompressionMethod.LZMA,
|
||||
CompressionType.PPMd => ZipCompressionMethod.PPMd,
|
||||
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
|
||||
};
|
||||
|
||||
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
|
||||
Write(
|
||||
entryPath,
|
||||
source,
|
||||
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime }
|
||||
);
|
||||
|
||||
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
|
||||
{
|
||||
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
|
||||
source.TransferTo(output);
|
||||
}
|
||||
|
||||
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
|
||||
{
|
||||
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
|
||||
|
||||
entryPath = NormalizeFilename(entryPath);
|
||||
options.ModificationDateTime ??= DateTime.Now;
|
||||
options.EntryComment ??= string.Empty;
|
||||
var entry = new ZipCentralDirectoryEntry(
|
||||
compression,
|
||||
entryPath,
|
||||
(ulong)streamPosition,
|
||||
WriterOptions.ArchiveEncoding
|
||||
)
|
||||
{
|
||||
Comment = options.EntryComment,
|
||||
ModificationTime = options.ModificationDateTime
|
||||
};
|
||||
|
||||
// Use the archive default setting for zip64 and allow overrides
|
||||
var useZip64 = isZip64;
|
||||
if (options.EnableZip64.HasValue)
|
||||
{
|
||||
useZip64 = options.EnableZip64.Value;
|
||||
}
|
||||
|
||||
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
|
||||
streamPosition += headersize;
|
||||
return new ZipWritingStream(
|
||||
this,
|
||||
OutputStream,
|
||||
entry,
|
||||
compression,
|
||||
options.DeflateCompressionLevel ?? compressionLevel
|
||||
);
|
||||
}
|
||||
|
||||
private string NormalizeFilename(string filename)
|
||||
{
|
||||
filename = filename.Replace('\\', '/');
|
||||
|
||||
var pos = filename.IndexOf(':');
|
||||
if (pos >= 0)
|
||||
{
|
||||
filename = filename.Remove(0, pos + 1);
|
||||
}
|
||||
|
||||
return filename.Trim('/');
|
||||
}
|
||||
|
||||
private int WriteHeader(
|
||||
string filename,
|
||||
ZipWriterEntryOptions zipWriterEntryOptions,
|
||||
ZipCentralDirectoryEntry entry,
|
||||
bool useZip64
|
||||
)
|
||||
{
|
||||
// We err on the side of caution until the zip specification clarifies how to support this
|
||||
if (!OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Zip64 extensions are not supported on non-seekable streams"
|
||||
);
|
||||
}
|
||||
|
||||
var explicitZipCompressionInfo = ToZipCompressionMethod(
|
||||
zipWriterEntryOptions.CompressionType ?? compressionType
|
||||
);
|
||||
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
|
||||
|
||||
Span<byte> intBuf = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
|
||||
OutputStream.Write(intBuf);
|
||||
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
|
||||
{
|
||||
if (OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
OutputStream.Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64
|
||||
}
|
||||
else
|
||||
{
|
||||
OutputStream.Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
OutputStream.Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA
|
||||
}
|
||||
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
|
||||
? HeaderFlags.Efs
|
||||
: 0;
|
||||
if (!OutputStream.CanSeek)
|
||||
{
|
||||
flags |= HeaderFlags.UsePostDataDescriptor;
|
||||
|
||||
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
|
||||
{
|
||||
flags |= HeaderFlags.Bit1; // eos marker
|
||||
}
|
||||
}
|
||||
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
|
||||
OutputStream.Write(intBuf.Slice(0, 2));
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
|
||||
OutputStream.Write(intBuf.Slice(0, 2)); // zipping method
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(
|
||||
intBuf,
|
||||
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
|
||||
);
|
||||
OutputStream.Write(intBuf);
|
||||
|
||||
// zipping date and time
|
||||
OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
|
||||
|
||||
// unused CRC, un/compressed size, updated later
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
|
||||
OutputStream.Write(intBuf.Slice(0, 2)); // filename length
|
||||
|
||||
var extralength = 0;
|
||||
if (OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
extralength = 2 + 2 + 8 + 8;
|
||||
}
|
||||
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
|
||||
OutputStream.Write(intBuf.Slice(0, 2)); // extra length
|
||||
OutputStream.Write(encodedFilename, 0, encodedFilename.Length);
|
||||
|
||||
if (extralength != 0)
|
||||
{
|
||||
OutputStream.Write(new byte[extralength], 0, extralength); // reserve space for zip64 data
|
||||
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
|
||||
}
|
||||
|
||||
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
|
||||
}
|
||||
|
||||
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
|
||||
{
|
||||
Span<byte> intBuf = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
|
||||
OutputStream.Write(intBuf);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
|
||||
OutputStream.Write(intBuf);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
|
||||
OutputStream.Write(intBuf);
|
||||
}
|
||||
|
||||
private void WriteEndRecord(ulong size)
|
||||
{
|
||||
var zip64EndOfCentralDirectoryNeeded =
|
||||
entries.Count > ushort.MaxValue
|
||||
|| streamPosition >= uint.MaxValue
|
||||
|| size >= uint.MaxValue;
|
||||
|
||||
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
|
||||
var streampositionvalue =
|
||||
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
|
||||
|
||||
Span<byte> intBuf = stackalloc byte[8];
|
||||
if (zip64EndOfCentralDirectoryNeeded)
|
||||
{
|
||||
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
|
||||
|
||||
// Write zip64 end of central directory record
|
||||
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 });
|
||||
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
|
||||
OutputStream.Write(intBuf); // Size of zip64 end of central directory record
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
|
||||
OutputStream.Write(intBuf.Slice(0, 2)); // Made by
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
|
||||
OutputStream.Write(intBuf.Slice(0, 2)); // Version needed
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
|
||||
OutputStream.Write(intBuf.Slice(0, 4)); // Disk number
|
||||
OutputStream.Write(intBuf.Slice(0, 4)); // Central dir disk
|
||||
|
||||
// TODO: entries.Count is int, so max 2^31 files
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
|
||||
OutputStream.Write(intBuf); // Entries in this disk
|
||||
OutputStream.Write(intBuf); // Total entries
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
|
||||
OutputStream.Write(intBuf); // Central Directory size
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
|
||||
OutputStream.Write(intBuf); // Disk offset
|
||||
|
||||
// Write zip64 end of central directory locator
|
||||
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
|
||||
OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
|
||||
OutputStream.Write(intBuf); // Offset to the zip64 central directory
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
|
||||
OutputStream.Write(intBuf.Slice(0, 4)); // Number of disks
|
||||
|
||||
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
|
||||
}
|
||||
|
||||
// Write normal end of central directory record
|
||||
OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 });
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(
|
||||
intBuf,
|
||||
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
|
||||
);
|
||||
OutputStream.Write(intBuf.Slice(0, 2));
|
||||
OutputStream.Write(intBuf.Slice(0, 2));
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
|
||||
OutputStream.Write(intBuf.Slice(0, 4));
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
|
||||
OutputStream.Write(intBuf.Slice(0, 4));
|
||||
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
|
||||
OutputStream.Write(intBuf.Slice(0, 2));
|
||||
OutputStream.Write(encodedComment, 0, encodedComment.Length);
|
||||
}
|
||||
|
||||
#region Nested type: ZipWritingStream
|
||||
|
||||
internal class ZipWritingStream : Stream
|
||||
{
|
||||
private readonly CRC32 crc = new();
|
||||
private readonly ZipCentralDirectoryEntry entry;
|
||||
private readonly Stream originalStream;
|
||||
private readonly Stream writeStream;
|
||||
private readonly ZipWriter writer;
|
||||
private readonly ZipCompressionMethod zipCompressionMethod;
|
||||
private readonly CompressionLevel compressionLevel;
|
||||
private CountingWritableSubStream? counting;
|
||||
private ulong decompressed;
|
||||
|
||||
// Flag to prevent throwing exceptions on Dispose
|
||||
private bool _limitsExceeded;
|
||||
private bool isDisposed;
|
||||
|
||||
internal ZipWritingStream(
|
||||
ZipWriter writer,
|
||||
Stream originalStream,
|
||||
ZipCentralDirectoryEntry entry,
|
||||
ZipCompressionMethod zipCompressionMethod,
|
||||
CompressionLevel compressionLevel
|
||||
)
|
||||
{
|
||||
this.writer = writer;
|
||||
this.originalStream = originalStream;
|
||||
this.entry = entry;
|
||||
this.zipCompressionMethod = zipCompressionMethod;
|
||||
this.compressionLevel = compressionLevel;
|
||||
writeStream = GetWriteStream(originalStream);
|
||||
}
|
||||
|
||||
public override bool CanRead => false;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => true;
|
||||
|
||||
public override long Length => throw new NotSupportedException();
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
private Stream GetWriteStream(Stream writeStream)
|
||||
{
|
||||
counting = new CountingWritableSubStream(writeStream);
|
||||
Stream output = counting;
|
||||
switch (zipCompressionMethod)
|
||||
{
|
||||
case ZipCompressionMethod.None:
|
||||
{
|
||||
return output;
|
||||
}
|
||||
case ZipCompressionMethod.Deflate:
|
||||
{
|
||||
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel);
|
||||
}
|
||||
case ZipCompressionMethod.BZip2:
|
||||
{
|
||||
return new BZip2Stream(counting, CompressionMode.Compress, false);
|
||||
}
|
||||
case ZipCompressionMethod.LZMA:
|
||||
{
|
||||
counting.WriteByte(9);
|
||||
counting.WriteByte(20);
|
||||
counting.WriteByte(5);
|
||||
counting.WriteByte(0);
|
||||
|
||||
var lzmaStream = new LzmaStream(
|
||||
new LzmaEncoderProperties(!originalStream.CanSeek),
|
||||
false,
|
||||
counting
|
||||
);
|
||||
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
|
||||
return lzmaStream;
|
||||
}
|
||||
case ZipCompressionMethod.PPMd:
|
||||
{
|
||||
counting.Write(writer.PpmdProperties.Properties, 0, 2);
|
||||
return new PpmdStream(writer.PpmdProperties, counting, true);
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
isDisposed = true;
|
||||
|
||||
base.Dispose(disposing);
|
||||
if (disposing)
|
||||
{
|
||||
writeStream.Dispose();
|
||||
|
||||
if (_limitsExceeded)
|
||||
{
|
||||
// We have written invalid data into the archive,
|
||||
// so we destroy it now, instead of allowing the user to continue
|
||||
// with a defunct archive
|
||||
originalStream.Dispose();
|
||||
return;
|
||||
}
|
||||
|
||||
entry.Crc = (uint)crc.Crc32Result;
|
||||
entry.Compressed = counting!.Count;
|
||||
entry.Decompressed = decompressed;
|
||||
|
||||
var zip64 =
|
||||
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
|
||||
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
|
||||
|
||||
if (originalStream.CanSeek)
|
||||
{
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 6);
|
||||
originalStream.WriteByte(0);
|
||||
|
||||
if (counting.Count == 0 && entry.Decompressed == 0)
|
||||
{
|
||||
// set compression to STORED for zero byte files (no compression data)
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 8);
|
||||
originalStream.WriteByte(0);
|
||||
originalStream.WriteByte(0);
|
||||
}
|
||||
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 14);
|
||||
|
||||
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
|
||||
|
||||
// Ideally, we should not throw from Dispose()
|
||||
// We should not get here as the Write call checks the limits
|
||||
if (zip64 && entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
|
||||
);
|
||||
}
|
||||
|
||||
// If we have pre-allocated space for zip64 data,
|
||||
// fill it out, even if it is not required
|
||||
if (entry.Zip64HeaderOffset != 0)
|
||||
{
|
||||
originalStream.Position = (long)(
|
||||
entry.HeaderOffset + entry.Zip64HeaderOffset
|
||||
);
|
||||
Span<byte> intBuf = stackalloc byte[8];
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
|
||||
originalStream.Write(intBuf.Slice(0, 2));
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
|
||||
originalStream.Write(intBuf.Slice(0, 2));
|
||||
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
|
||||
originalStream.Write(intBuf);
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
|
||||
originalStream.Write(intBuf);
|
||||
}
|
||||
|
||||
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
|
||||
writer.streamPosition += (long)entry.Compressed;
|
||||
}
|
||||
else
|
||||
{
|
||||
// We have a streaming archive, so we should add a post-data-descriptor,
|
||||
// but we cannot as it does not hold the zip64 values
|
||||
// Throwing an exception until the zip specification is clarified
|
||||
|
||||
// Ideally, we should not throw from Dispose()
|
||||
// We should not get here as the Write call checks the limits
|
||||
if (zip64)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Streams larger than 4GiB are not supported for non-seekable streams"
|
||||
);
|
||||
}
|
||||
|
||||
Span<byte> intBuf = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(
|
||||
intBuf,
|
||||
ZipHeaderFactory.POST_DATA_DESCRIPTOR
|
||||
);
|
||||
originalStream.Write(intBuf);
|
||||
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
|
||||
writer.streamPosition += (long)entry.Compressed + 16;
|
||||
}
|
||||
writer.entries.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
public override void Flush() => writeStream.Flush();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// We check the limits first, because we can keep the archive consistent
|
||||
// if we can prevent the writes from happening
|
||||
if (entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
|
||||
if (
|
||||
_limitsExceeded
|
||||
|| ((decompressed + (uint)count) > uint.MaxValue)
|
||||
|| (counting!.Count + (uint)count) > uint.MaxValue
|
||||
)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
decompressed += (uint)count;
|
||||
crc.SlurpBlock(buffer, offset, count);
|
||||
writeStream.Write(buffer, offset, count);
|
||||
|
||||
if (entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
// Post-check, this is accurate
|
||||
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
|
||||
{
|
||||
// We have written the data, so the archive is now broken
|
||||
// Throwing the exception here, allows us to avoid
|
||||
// throwing an exception in Dispose() which is discouraged
|
||||
// as it can mask other errors
|
||||
_limitsExceeded = true;
|
||||
throw new NotSupportedException(
|
||||
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion Nested type: ZipWritingStream
|
||||
}
|
||||
#endif
|
||||
@@ -1,8 +1,12 @@
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -17,6 +21,8 @@ namespace SharpCompress.Writers.Zip;
|
||||
|
||||
public class ZipWriter : AbstractWriter
|
||||
{
|
||||
private static readonly byte[] ZIP64eND_OFdIRECTORY = [80, 75, 6, 6];
|
||||
private static readonly byte[] END_OFdIRECTORY = [80, 75, 6, 7];
|
||||
private readonly CompressionType compressionType;
|
||||
private readonly CompressionLevel compressionLevel;
|
||||
private readonly List<ZipCentralDirectoryEntry> entries = new();
|
||||
@@ -24,6 +30,7 @@ public class ZipWriter : AbstractWriter
|
||||
private long streamPosition;
|
||||
private PpmdProperties? ppmdProps;
|
||||
private readonly bool isZip64;
|
||||
private bool isDisposed;
|
||||
|
||||
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
|
||||
: base(ArchiveType.Zip, zipWriterOptions)
|
||||
@@ -42,14 +49,29 @@ public class ZipWriter : AbstractWriter
|
||||
{
|
||||
destination = NonDisposingStream.Create(destination);
|
||||
}
|
||||
InitializeStream(destination);
|
||||
InitalizeStream(destination);
|
||||
}
|
||||
|
||||
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
|
||||
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
ulong size = 0;
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
size += entry.Write(OutputStream);
|
||||
}
|
||||
await WriteEndRecordAsync(size, CancellationToken.None).ConfigureAwait(false);
|
||||
isDisposed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool isDisposing)
|
||||
{
|
||||
if (isDisposing && OutputStream is not null)
|
||||
if (isDisposing)
|
||||
{
|
||||
ulong size = 0;
|
||||
foreach (var entry in entries)
|
||||
@@ -61,8 +83,9 @@ public class ZipWriter : AbstractWriter
|
||||
base.Dispose(isDisposing);
|
||||
}
|
||||
|
||||
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
|
||||
compressionType switch
|
||||
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType)
|
||||
{
|
||||
return compressionType switch
|
||||
{
|
||||
CompressionType.None => ZipCompressionMethod.None,
|
||||
CompressionType.Deflate => ZipCompressionMethod.Deflate,
|
||||
@@ -71,6 +94,7 @@ public class ZipWriter : AbstractWriter
|
||||
CompressionType.PPMd => ZipCompressionMethod.PPMd,
|
||||
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
|
||||
};
|
||||
}
|
||||
|
||||
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
|
||||
Write(
|
||||
@@ -85,6 +109,34 @@ public class ZipWriter : AbstractWriter
|
||||
source.TransferTo(output);
|
||||
}
|
||||
|
||||
public override async ValueTask WriteAsync(
|
||||
string entryPath,
|
||||
Stream source,
|
||||
DateTime? modificationTime,
|
||||
CancellationToken cancellationToken
|
||||
) =>
|
||||
await WriteAsync(
|
||||
entryPath,
|
||||
source,
|
||||
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime },
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
public async ValueTask WriteAsync(
|
||||
string entryPath,
|
||||
Stream source,
|
||||
ZipWriterEntryOptions zipWriterEntryOptions,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
await using var output = await WriteToStreamAsync(
|
||||
entryPath,
|
||||
zipWriterEntryOptions,
|
||||
cancellationToken
|
||||
);
|
||||
await source.CopyToAsync(output, cancellationToken);
|
||||
}
|
||||
|
||||
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
|
||||
{
|
||||
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
|
||||
@@ -114,7 +166,49 @@ public class ZipWriter : AbstractWriter
|
||||
streamPosition += headersize;
|
||||
return new ZipWritingStream(
|
||||
this,
|
||||
OutputStream.NotNull(),
|
||||
OutputStream,
|
||||
entry,
|
||||
compression,
|
||||
options.DeflateCompressionLevel ?? compressionLevel
|
||||
);
|
||||
}
|
||||
|
||||
public async ValueTask<Stream> WriteToStreamAsync(
|
||||
string entryPath,
|
||||
ZipWriterEntryOptions options,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
|
||||
|
||||
entryPath = NormalizeFilename(entryPath);
|
||||
options.ModificationDateTime ??= DateTime.Now;
|
||||
options.EntryComment ??= string.Empty;
|
||||
var entry = new ZipCentralDirectoryEntry(
|
||||
compression,
|
||||
entryPath,
|
||||
(ulong)streamPosition,
|
||||
WriterOptions.ArchiveEncoding
|
||||
)
|
||||
{
|
||||
Comment = options.EntryComment,
|
||||
ModificationTime = options.ModificationDateTime
|
||||
};
|
||||
|
||||
// Use the archive default setting for zip64 and allow overrides
|
||||
var useZip64 = isZip64;
|
||||
if (options.EnableZip64.HasValue)
|
||||
{
|
||||
useZip64 = options.EnableZip64.Value;
|
||||
}
|
||||
|
||||
var headersize = (uint)
|
||||
await WriteHeaderAsync(entryPath, options, entry, useZip64, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
streamPosition += headersize;
|
||||
return new ZipWritingStream(
|
||||
this,
|
||||
OutputStream,
|
||||
entry,
|
||||
compression,
|
||||
options.DeflateCompressionLevel ?? compressionLevel
|
||||
@@ -221,6 +315,105 @@ public class ZipWriter : AbstractWriter
|
||||
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
|
||||
}
|
||||
|
||||
private async ValueTask<int> WriteHeaderAsync(
|
||||
string filename,
|
||||
ZipWriterEntryOptions zipWriterEntryOptions,
|
||||
ZipCentralDirectoryEntry entry,
|
||||
bool useZip64,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
// We err on the side of caution until the zip specification clarifies how to support this
|
||||
if (!OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Zip64 extensions are not supported on non-seekable streams"
|
||||
);
|
||||
}
|
||||
|
||||
var explicitZipCompressionInfo = ToZipCompressionMethod(
|
||||
zipWriterEntryOptions.CompressionType ?? compressionType
|
||||
);
|
||||
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
|
||||
|
||||
var intBuf = ArrayPool<byte>.Shared.Rent(4);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
|
||||
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
|
||||
{
|
||||
if (OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
await OutputStream
|
||||
.WriteAsync([45, 0], 0, 2, cancellationToken)
|
||||
.ConfigureAwait(false); //smallest allowed version for zip64
|
||||
}
|
||||
else
|
||||
{
|
||||
await OutputStream
|
||||
.WriteAsync([20, 0], 0, 2, cancellationToken)
|
||||
.ConfigureAwait(false); //older version which is more compatible
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
await OutputStream.WriteAsync([63, 0], 0, 2, cancellationToken).ConfigureAwait(false); //version says we used PPMd or LZMA
|
||||
}
|
||||
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
|
||||
? HeaderFlags.Efs
|
||||
: 0;
|
||||
if (!OutputStream.CanSeek)
|
||||
{
|
||||
flags |= HeaderFlags.UsePostDataDescriptor;
|
||||
|
||||
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
|
||||
{
|
||||
flags |= HeaderFlags.Bit1; // eos marker
|
||||
}
|
||||
}
|
||||
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
|
||||
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // zipping method
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(
|
||||
intBuf,
|
||||
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
|
||||
);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// zipping date and time
|
||||
await OutputStream
|
||||
.WriteAsync([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// unused CRC, un/compressed size, updated later
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
|
||||
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // filename length
|
||||
|
||||
var extralength = 0;
|
||||
if (OutputStream.CanSeek && useZip64)
|
||||
{
|
||||
extralength = 2 + 2 + 8 + 8;
|
||||
}
|
||||
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // extra length
|
||||
await OutputStream.WriteAsync(encodedFilename, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (extralength != 0)
|
||||
{
|
||||
await OutputStream
|
||||
.WriteAsync(new byte[extralength], cancellationToken)
|
||||
.ConfigureAwait(false); // reserve space for zip64 data
|
||||
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
|
||||
}
|
||||
|
||||
ArrayPool<byte>.Shared.Return(intBuf);
|
||||
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
|
||||
}
|
||||
|
||||
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
|
||||
{
|
||||
Span<byte> intBuf = stackalloc byte[4];
|
||||
@@ -232,6 +425,23 @@ public class ZipWriter : AbstractWriter
|
||||
OutputStream.Write(intBuf);
|
||||
}
|
||||
|
||||
private async ValueTask WriteFooterAsync(
|
||||
uint crc,
|
||||
uint compressed,
|
||||
uint uncompressed,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var intBuf = ArrayPool<byte>.Shared.Rent(4);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
|
||||
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
|
||||
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
|
||||
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
|
||||
ArrayPool<byte>.Shared.Return(intBuf);
|
||||
}
|
||||
|
||||
private void WriteEndRecord(ulong size)
|
||||
{
|
||||
var zip64EndOfCentralDirectoryNeeded =
|
||||
@@ -302,6 +512,82 @@ public class ZipWriter : AbstractWriter
|
||||
OutputStream.Write(encodedComment, 0, encodedComment.Length);
|
||||
}
|
||||
|
||||
private async ValueTask WriteEndRecordAsync(ulong size, CancellationToken cancellationToken)
|
||||
{
|
||||
var zip64EndOfCentralDirectoryNeeded =
|
||||
entries.Count > ushort.MaxValue
|
||||
|| streamPosition >= uint.MaxValue
|
||||
|| size >= uint.MaxValue;
|
||||
|
||||
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
|
||||
var streampositionvalue =
|
||||
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
|
||||
|
||||
var intBuf = ArrayPool<byte>.Shared.Rent(8);
|
||||
if (zip64EndOfCentralDirectoryNeeded)
|
||||
{
|
||||
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
|
||||
|
||||
// Write zip64 end of central directory record
|
||||
await OutputStream
|
||||
.WriteAsync(ZIP64eND_OFdIRECTORY, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Size of zip64 end of central directory record
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Made by
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
|
||||
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Version needed
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Disk number
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Central dir disk
|
||||
|
||||
// TODO: entries.Count is int, so max 2^31 files
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Entries in this disk
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Total entries
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Central Directory size
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Disk offset
|
||||
|
||||
// Write zip64 end of central directory locator
|
||||
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
|
||||
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Entry disk
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Offset to the zip64 central directory
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Number of disks
|
||||
|
||||
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
|
||||
}
|
||||
|
||||
// Write normal end of central directory record
|
||||
OutputStream.Write(END_OFdIRECTORY);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(
|
||||
intBuf,
|
||||
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
|
||||
);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
|
||||
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
|
||||
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
|
||||
await OutputStream
|
||||
.WriteAsync(encodedComment, 0, encodedComment.Length, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
ArrayPool<byte>.Shared.Return(intBuf);
|
||||
}
|
||||
|
||||
#region Nested type: ZipWritingStream
|
||||
|
||||
internal class ZipWritingStream : Stream
|
||||
@@ -328,7 +614,6 @@ public class ZipWriter : AbstractWriter
|
||||
CompressionLevel compressionLevel
|
||||
)
|
||||
{
|
||||
this.writer = writer;
|
||||
this.originalStream = originalStream;
|
||||
this.writer = writer;
|
||||
this.entry = entry;
|
||||
@@ -396,6 +681,131 @@ public class ZipWriter : AbstractWriter
|
||||
}
|
||||
}
|
||||
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
isDisposed = true;
|
||||
|
||||
await writeStream.DisposeAsync();
|
||||
|
||||
if (limitsExceeded)
|
||||
{
|
||||
// We have written invalid data into the archive,
|
||||
// so we destroy it now, instead of allowing the user to continue
|
||||
// with a defunct archive
|
||||
await originalStream.DisposeAsync();
|
||||
return;
|
||||
}
|
||||
|
||||
entry.Crc = (uint)crc.Crc32Result;
|
||||
entry.Compressed = counting!.Count;
|
||||
entry.Decompressed = decompressed;
|
||||
|
||||
var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
|
||||
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
|
||||
|
||||
if (originalStream.CanSeek)
|
||||
{
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 6);
|
||||
originalStream.WriteByte(0);
|
||||
|
||||
if (counting.Count == 0 && entry.Decompressed == 0)
|
||||
{
|
||||
// set compression to STORED for zero byte files (no compression data)
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 8);
|
||||
originalStream.WriteByte(0);
|
||||
originalStream.WriteByte(0);
|
||||
}
|
||||
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 14);
|
||||
|
||||
await writer.WriteFooterAsync(
|
||||
entry.Crc,
|
||||
compressedvalue,
|
||||
decompressedvalue,
|
||||
CancellationToken.None
|
||||
);
|
||||
|
||||
// Ideally, we should not throw from Dispose()
|
||||
// We should not get here as the Write call checks the limits
|
||||
if (zip64 && entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
|
||||
);
|
||||
}
|
||||
|
||||
// If we have pre-allocated space for zip64 data,
|
||||
// fill it out, even if it is not required
|
||||
if (entry.Zip64HeaderOffset != 0)
|
||||
{
|
||||
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
|
||||
var intBuf = ArrayPool<byte>.Shared.Rent(8);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
|
||||
await originalStream
|
||||
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
|
||||
.ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
|
||||
await originalStream
|
||||
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
|
||||
await originalStream
|
||||
.WriteAsync(intBuf, CancellationToken.None)
|
||||
.ConfigureAwait(false);
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
|
||||
await originalStream
|
||||
.WriteAsync(intBuf, CancellationToken.None)
|
||||
.ConfigureAwait(false);
|
||||
ArrayPool<byte>.Shared.Return(intBuf);
|
||||
}
|
||||
|
||||
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
|
||||
writer.streamPosition += (long)entry.Compressed;
|
||||
}
|
||||
else
|
||||
{
|
||||
// We have a streaming archive, so we should add a post-data-descriptor,
|
||||
// but we cannot as it does not hold the zip64 values
|
||||
// Throwing an exception until the zip specification is clarified
|
||||
|
||||
// Ideally, we should not throw from Dispose()
|
||||
// We should not get here as the Write call checks the limits
|
||||
if (zip64)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"Streams larger than 4GiB are not supported for non-seekable streams"
|
||||
);
|
||||
}
|
||||
|
||||
var intBuf = ArrayPool<byte>.Shared.Rent(4);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(
|
||||
intBuf,
|
||||
ZipHeaderFactory.POST_DATA_DESCRIPTOR
|
||||
);
|
||||
await originalStream
|
||||
.WriteAsync(intBuf, CancellationToken.None)
|
||||
.ConfigureAwait(false);
|
||||
await writer
|
||||
.WriteFooterAsync(
|
||||
entry.Crc,
|
||||
compressedvalue,
|
||||
decompressedvalue,
|
||||
CancellationToken.None
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
writer.streamPosition += (long)entry.Compressed + 16;
|
||||
ArrayPool<byte>.Shared.Return(intBuf);
|
||||
}
|
||||
writer.entries.Add(entry);
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (isDisposed)
|
||||
@@ -419,14 +829,13 @@ public class ZipWriter : AbstractWriter
|
||||
return;
|
||||
}
|
||||
|
||||
var countingCount = counting?.Count ?? 0;
|
||||
entry.Crc = (uint)crc.Crc32Result;
|
||||
entry.Compressed = countingCount;
|
||||
entry.Compressed = counting!.Count;
|
||||
entry.Decompressed = decompressed;
|
||||
|
||||
var zip64 =
|
||||
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
|
||||
var compressedvalue = zip64 ? uint.MaxValue : (uint)countingCount;
|
||||
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
|
||||
|
||||
if (originalStream.CanSeek)
|
||||
@@ -434,7 +843,7 @@ public class ZipWriter : AbstractWriter
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 6);
|
||||
originalStream.WriteByte(0);
|
||||
|
||||
if (countingCount == 0 && entry.Decompressed == 0)
|
||||
if (counting.Count == 0 && entry.Decompressed == 0)
|
||||
{
|
||||
// set compression to STORED for zero byte files (no compression data)
|
||||
originalStream.Position = (long)(entry.HeaderOffset + 8);
|
||||
@@ -521,12 +930,11 @@ public class ZipWriter : AbstractWriter
|
||||
// if we can prevent the writes from happening
|
||||
if (entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
var countingCount = counting?.Count ?? 0;
|
||||
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
|
||||
if (
|
||||
limitsExceeded
|
||||
|| ((decompressed + (uint)count) > uint.MaxValue)
|
||||
|| (countingCount + (uint)count) > uint.MaxValue
|
||||
|| (counting!.Count + (uint)count) > uint.MaxValue
|
||||
)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
@@ -541,9 +949,8 @@ public class ZipWriter : AbstractWriter
|
||||
|
||||
if (entry.Zip64HeaderOffset == 0)
|
||||
{
|
||||
var countingCount = counting?.Count ?? 0;
|
||||
// Post-check, this is accurate
|
||||
if ((decompressed > uint.MaxValue) || countingCount > uint.MaxValue)
|
||||
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
|
||||
{
|
||||
// We have written the data, so the archive is now broken
|
||||
// Throwing the exception here, allows us to avoid
|
||||
@@ -560,3 +967,4 @@ public class ZipWriter : AbstractWriter
|
||||
|
||||
#endregion Nested type: ZipWritingStream
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,338 +0,0 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
".NETFramework,Version=v4.6.2": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.0.3, )",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net462": "1.0.3"
|
||||
}
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"System.Memory": {
|
||||
"type": "Direct",
|
||||
"requested": "[4.5.5, )",
|
||||
"resolved": "4.5.5",
|
||||
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
|
||||
"dependencies": {
|
||||
"System.Buffers": "4.5.1",
|
||||
"System.Numerics.Vectors": "4.5.0",
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.1, )",
|
||||
"resolved": "0.8.1",
|
||||
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
|
||||
"dependencies": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
|
||||
"System.Memory": "4.5.5"
|
||||
}
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net462": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "IzAV30z22ESCeQfxP29oVf4qEo8fBGXLXSU6oacv/9Iqe6PzgHDKCaWfwMBak7bSJQM0F5boXWoZS+kChztRIQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
},
|
||||
"System.Buffers": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.1",
|
||||
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
|
||||
},
|
||||
"System.Numerics.Vectors": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.0",
|
||||
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
|
||||
},
|
||||
"System.Runtime.CompilerServices.Unsafe": {
|
||||
"type": "Transitive",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
}
|
||||
},
|
||||
".NETStandard,Version=v2.0": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"NETStandard.Library": {
|
||||
"type": "Direct",
|
||||
"requested": "[2.0.3, )",
|
||||
"resolved": "2.0.3",
|
||||
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETCore.Platforms": "1.1.0"
|
||||
}
|
||||
},
|
||||
"System.Memory": {
|
||||
"type": "Direct",
|
||||
"requested": "[4.5.5, )",
|
||||
"resolved": "4.5.5",
|
||||
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
|
||||
"dependencies": {
|
||||
"System.Buffers": "4.5.1",
|
||||
"System.Numerics.Vectors": "4.4.0",
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.1, )",
|
||||
"resolved": "0.8.1",
|
||||
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
|
||||
"dependencies": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
|
||||
"System.Memory": "4.5.5"
|
||||
}
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETCore.Platforms": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.1.0",
|
||||
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
},
|
||||
"System.Buffers": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.1",
|
||||
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
|
||||
},
|
||||
"System.Numerics.Vectors": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.4.0",
|
||||
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
|
||||
},
|
||||
"System.Runtime.CompilerServices.Unsafe": {
|
||||
"type": "Transitive",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
}
|
||||
},
|
||||
".NETStandard,Version=v2.1": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.1, )",
|
||||
"resolved": "0.8.1",
|
||||
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
},
|
||||
"System.Buffers": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.1",
|
||||
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
|
||||
},
|
||||
"System.Numerics.Vectors": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.4.0",
|
||||
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
|
||||
},
|
||||
"System.Runtime.CompilerServices.Unsafe": {
|
||||
"type": "Transitive",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
|
||||
},
|
||||
"System.Memory": {
|
||||
"type": "CentralTransitive",
|
||||
"requested": "[4.5.5, )",
|
||||
"resolved": "4.5.5",
|
||||
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
|
||||
"dependencies": {
|
||||
"System.Buffers": "4.5.1",
|
||||
"System.Numerics.Vectors": "4.4.0",
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"net6.0": {
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.1, )",
|
||||
"resolved": "0.8.1",
|
||||
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
},
|
||||
"net8.0": {
|
||||
"Microsoft.NET.ILLink.Tasks": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.7, )",
|
||||
"resolved": "8.0.7",
|
||||
"contentHash": "iI52ptEKby2ymQ6B7h4TWbFmm85T4VvLgc/HvS45Yr3lgi4IIFbQtjON3bQbX/Vc94jXNSLvrDOp5Kh7SJyFYQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.1, )",
|
||||
"resolved": "0.8.1",
|
||||
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -73,45 +73,27 @@ public class ArchiveTests : ReaderTests
|
||||
}
|
||||
}
|
||||
|
||||
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected void ArchiveStreamRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
|
||||
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
|
||||
ArchiveStreamRead(readerOptions, testArchive);
|
||||
}
|
||||
|
||||
protected void ArchiveStreamRead(
|
||||
ReaderOptions? readerOptions = null,
|
||||
params string[] testArchives
|
||||
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
|
||||
|
||||
protected void ArchiveStreamRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
ReaderOptions? readerOptions = null,
|
||||
params string[] testArchives
|
||||
) =>
|
||||
ArchiveStreamRead(
|
||||
archiveFactory,
|
||||
readerOptions,
|
||||
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
|
||||
);
|
||||
|
||||
protected void ArchiveStreamRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
ReaderOptions? readerOptions,
|
||||
IEnumerable<string> testArchives
|
||||
)
|
||||
protected void ArchiveStreamRead(ReaderOptions? readerOptions, IEnumerable<string> testArchives)
|
||||
{
|
||||
foreach (var path in testArchives)
|
||||
{
|
||||
using (var stream = NonDisposingStream.Create(File.OpenRead(path), true))
|
||||
using (var archive = archiveFactory.Open(stream, readerOptions))
|
||||
using (var archive = ArchiveFactory.Open(stream, readerOptions))
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -236,14 +218,10 @@ public class ArchiveTests : ReaderTests
|
||||
}
|
||||
}
|
||||
|
||||
protected void ArchiveFileRead(
|
||||
IArchiveFactory archiveFactory,
|
||||
string testArchive,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
|
||||
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
|
||||
using (var archive = ArchiveFactory.Open(testArchive, readerOptions))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
@@ -256,19 +234,18 @@ public class ArchiveTests : ReaderTests
|
||||
VerifyFiles();
|
||||
}
|
||||
|
||||
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
|
||||
|
||||
protected void ArchiveFileSkip(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
if (!Environment.OSVersion.IsWindows())
|
||||
#if !NETFRAMEWORK
|
||||
if (!OperatingSystem.IsWindows())
|
||||
{
|
||||
fileOrder = fileOrder.Replace('\\', '/');
|
||||
}
|
||||
#endif
|
||||
var expected = new Stack<string>(fileOrder.Split(' '));
|
||||
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
|
||||
using var archive = ArchiveFactory.Open(testArchive, readerOptions);
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.BZip2;
|
||||
|
||||
public class BZip2ReaderTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void BZip2_Reader_Factory()
|
||||
{
|
||||
Stream stream = new MemoryStream(
|
||||
new byte[] { 0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35 }
|
||||
);
|
||||
Assert.Throws(typeof(InvalidOperationException), () => ReaderFactory.Open(stream));
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@ using System.Linq;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.GZip;
|
||||
using SharpCompress.Archives.Tar;
|
||||
using SharpCompress.Common;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.GZip;
|
||||
@@ -20,7 +19,7 @@ public class GZipArchiveTests : ArchiveTests
|
||||
using (var archive = ArchiveFactory.Open(stream))
|
||||
{
|
||||
var entry = archive.Entries.First();
|
||||
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
|
||||
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
|
||||
|
||||
var size = entry.Size;
|
||||
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
|
||||
@@ -42,7 +41,7 @@ public class GZipArchiveTests : ArchiveTests
|
||||
using (var archive = GZipArchive.Open(stream))
|
||||
{
|
||||
var entry = archive.Entries.First();
|
||||
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
|
||||
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
|
||||
|
||||
var size = entry.Size;
|
||||
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
|
||||
@@ -95,7 +94,6 @@ public class GZipArchiveTests : ArchiveTests
|
||||
using (var entryStream = archiveEntry.OpenEntryStream())
|
||||
{
|
||||
var result = TarArchive.IsTarFile(entryStream);
|
||||
Assert.True(result);
|
||||
}
|
||||
Assert.Equal(size, tarStream.Length);
|
||||
using (var entryStream = archiveEntry.OpenEntryStream())
|
||||
@@ -107,7 +105,7 @@ public class GZipArchiveTests : ArchiveTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TestGzCrcWithMostSignificantBitNotNegative()
|
||||
public void TestGzCrcWithMostSignificaltBitNotNegative()
|
||||
{
|
||||
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
|
||||
using var archive = GZipArchive.Open(stream);
|
||||
@@ -117,12 +115,4 @@ public class GZipArchiveTests : ArchiveTests
|
||||
Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TestGzArchiveTypeGzip()
|
||||
{
|
||||
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
|
||||
using var archive = GZipArchive.Open(stream);
|
||||
Assert.Equal(archive.Type, ArchiveType.GZip);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Test;
|
||||
|
||||
public static class OperatingSystemExtensions
|
||||
{
|
||||
public static bool IsWindows(this OperatingSystem os) =>
|
||||
os.Platform == PlatformID.Win32NT
|
||||
|| os.Platform == PlatformID.Win32Windows
|
||||
|| os.Platform == PlatformID.Win32S;
|
||||
}
|
||||
@@ -209,7 +209,7 @@ public class RarReaderTests : ReaderTests
|
||||
{
|
||||
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
|
||||
using var entryStream = reader.OpenEntryStream();
|
||||
var file = Path.GetFileName(reader.Entry.Key).NotNull();
|
||||
var file = Path.GetFileName(reader.Entry.Key);
|
||||
var folder =
|
||||
Path.GetDirectoryName(reader.Entry.Key)
|
||||
?? throw new ArgumentNullException();
|
||||
@@ -293,7 +293,7 @@ public class RarReaderTests : ReaderTests
|
||||
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (reader.Entry.Key.NotNull().Contains("jpg"))
|
||||
if (reader.Entry.Key.Contains("jpg"))
|
||||
{
|
||||
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
|
||||
reader.WriteEntryToDirectory(
|
||||
@@ -316,7 +316,7 @@ public class RarReaderTests : ReaderTests
|
||||
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (reader.Entry.Key.NotNull().Contains("jpg"))
|
||||
if (reader.Entry.Key.Contains("jpg"))
|
||||
{
|
||||
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
|
||||
reader.WriteEntryToDirectory(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user