Compare commits

..

5 Commits

Author SHA1 Message Date
Adam Hathcock
f298ad3322 more reverts 2024-03-29 16:05:20 +00:00
Adam Hathcock
69872dd9e7 split files 2024-03-29 16:00:05 +00:00
Adam Hathcock
92174f49ae revert naming changes 2024-03-29 15:53:11 +00:00
Adam Hathcock
c39a155c8f Merge branch 'master' into async-2
# Conflicts:
#	src/SharpCompress/Writers/Zip/ZipWriter.cs
2024-03-29 15:27:43 +00:00
Adam Hathcock
e5944cf72c add writer support for async 2024-03-12 15:40:29 +00:00
191 changed files with 1947 additions and 3767 deletions

View File

@@ -3,11 +3,10 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.30.6",
"version": "0.27.3",
"commands": [
"dotnet-csharpier"
],
"rollForward": false
]
}
}
}

View File

@@ -70,7 +70,7 @@ indent_style = tab
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = silent
dotnet_analyzer_diagnostic.severity = warning
##########################################
# File Header (Uncomment to support file headers)
@@ -269,8 +269,6 @@ dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
@@ -288,7 +286,6 @@ dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
@@ -306,12 +303,13 @@ dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.CS8981.severity = suggestion
dotnet_diagnostic.BL0005.severity = suggestion
@@ -320,7 +318,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0005.severity = error
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
@@ -331,7 +329,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
@@ -339,7 +337,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
@@ -353,20 +351,11 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
##########################################
# Styles

View File

@@ -10,7 +10,5 @@
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
</PropertyGroup>
</Project>

View File

@@ -1,19 +0,0 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="5.0.0" />
<PackageVersion Include="FluentAssertions" Version="7.0.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.0" />
<PackageVersion Include="System.Memory" Version="4.6.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.0.1" />
<PackageVersion Include="xunit.SkippableFact" Version="1.5.23" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.4" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -11,7 +11,7 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSourceMapping>
<!-- key value for <packageSource> should match key values from <packageSources> element -->
<packageSource key="nuget.org">
<package pattern="*" />
</packageSource>
</packageSourceMapping>
</configuration>

View File

@@ -1,12 +1,12 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
[![Static Badge](https://img.shields.io/badge/API%20Documentation-RobiniaDocs-43bc00?logo=readme&logoColor=white)](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
## Need Help?

View File

@@ -17,9 +17,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
EndProjectSection
EndProject
Global

View File

@@ -79,10 +79,6 @@
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
@@ -131,7 +127,6 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -27,7 +27,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net7.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
<PackageReference Include="Bullseye" Version="4.2.1" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="11.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,45 +0,0 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"Bullseye": {
"type": "Direct",
"requested": "[5.0.0, )",
"resolved": "5.0.0",
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
},
"Glob": {
"type": "Direct",
"requested": "[1.1.9, )",
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}
}

View File

@@ -141,7 +141,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
4,
3,
2,
1, // tap2
1 // tap2
};
#endif

View File

@@ -12,35 +12,39 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
{
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
@@ -61,12 +65,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => _lazyEntries;
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => _lazyVolumes;
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -80,29 +84,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public virtual long TotalUncompressSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
if (!disposed)
{
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
SrcStream?.Dispose();
_disposed = true;
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(

View File

@@ -41,8 +41,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
internal AbstractWritableArchive(ArchiveType type)
: base(type) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
: base(type, sourceStream) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream) { }
public override ICollection<TEntry> Entries
{
@@ -120,10 +120,6 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
{
foreach (var path in Entries.Select(x => x.Key))
{
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{

View File

@@ -189,10 +189,9 @@ public static class ArchiveFactory
foreach (var factory in Factory.Factories)
{
var isArchive = factory.IsArchive(stream);
stream.Position = startPosition;
if (isArchive)
if (factory.IsArchive(stream, null))
{
type = factory.KnownArchiveType;
return true;
@@ -240,6 +239,4 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -1,27 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(Stream stream, string? password = null) =>
throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
}

View File

@@ -90,7 +90,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
@@ -99,14 +99,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
srcStream.LoadAllParts();
var idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
@@ -182,11 +184,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -30,7 +32,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
public override long Crc => 0;
public override string? Key { get; }
public override string Key { get; }
public override long CompressedSize => 0;

View File

@@ -17,11 +17,15 @@ public static class IArchiveEntryExtensions
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Key,
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
{
return;
}
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -54,26 +55,14 @@ public static class IArchiveExtensions
var entry = entries.Entry;
if (entry.IsDirectory)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
}
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory)
// Create each directory
var path = Path.Combine(destination, entry.Key);
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
{
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
Directory.CreateDirectory(directory);
}
// Write file

View File

@@ -1,5 +1,4 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
@@ -14,7 +13,7 @@ namespace SharpCompress.Archives.Rar;
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;

View File

@@ -14,7 +14,6 @@ namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
@@ -22,47 +21,35 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
public override void Dispose()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
base.Dispose();
}
}
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream) { }
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
{
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
SrcStream.LoadAllParts(); //request all streams
var streams = SrcStream.Streams.ToArray();
var idx = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
sourceStream.IsVolumes = true;
SrcStream.IsVolumes = true;
streams[1].Position = 0;
sourceStream.Position = 0;
SrcStream.Position = 0;
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
i++
idx++
));
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
else //split mode or single file
{
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
}
}
protected override IReader CreateReaderForSolidExtraction()
@@ -121,7 +108,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
}
/// <summary>

View File

@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
internal class SeekableFilePart : RarFilePart
{
private readonly Stream _stream;
private readonly string? _password;
private readonly Stream stream;
private readonly string? password;
internal SeekableFilePart(
MarkHeader mh,
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
)
: base(mh, fh, index)
{
_stream = stream;
_password = password;
this.stream = stream;
this.password = password;
}
internal override Stream GetCompressedStream()
{
_stream.Position = FileHeader.DataStartPosition;
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
var cryptKey = new CryptKey3(password!);
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return _stream;
return stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -12,14 +14,14 @@ namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase? _database;
private ArchiveDatabase database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -30,7 +32,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(
@@ -49,7 +51,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
ReaderOptions readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
@@ -70,7 +72,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
ReaderOptions readerOptions = null
)
{
streams.CheckNotNull(nameof(streams));
@@ -89,25 +91,27 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
@@ -131,17 +135,13 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
var entries = new SevenZipArchiveEntry[database._files.Count];
for (var i = 0; i < database._files.Count; i++)
{
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
var file = database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
@@ -159,12 +159,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
private void LoadFactory(Stream stream)
{
if (_database is null)
if (database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
@@ -180,14 +180,14 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
private static ReadOnlySpan<byte> Signature =>
private static ReadOnlySpan<byte> SIGNATURE =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction() =>
@@ -196,24 +196,30 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override bool IsSolid =>
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
public override long TotalSize
{
get
{
var i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
public override SevenZipVolume Volume => _archive.Volumes.Single();
public override SevenZipVolume Volume => archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
var entries = _archive.Entries.ToList();
var entries = archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
@@ -223,42 +229,37 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
_currentFolder = group.Key;
currentFolder = group.Key;
if (group.Key is null)
{
_currentStream = Stream.Null;
currentStream = Stream.Null;
}
else
{
_currentStream = _archive._database?.GetFolderStream(
currentStream = archive.database.GetFolderStream(
stream,
_currentFolder,
currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
_currentItem = entry.FilePart.Header;
currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
private class PasswordProvider : IPasswordProvider
{
private readonly string? _password;
private readonly string _password;
public PasswordProvider(string? password) => _password = password;
public PasswordProvider(string password) => _password = password;
public string? CryptoGetTextPassword() => _password;
public string CryptoGetTextPassword() => _password;
}
}

View File

@@ -114,7 +114,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
tarHeader.Name.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
@@ -123,20 +123,22 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
private TarArchive()
internal TarArchive()
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
@@ -190,10 +192,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
@@ -227,12 +225,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
}
}

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;

View File

@@ -16,7 +16,10 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory? headerFactory;
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -27,13 +30,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream) =>
headerFactory = new SeekableZipHeaderFactory(
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
srcStream.ReaderOptions.Password,
srcStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
@@ -186,21 +189,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
{
stream.LoadAllParts(); //request all streams
stream.Position = 0;
SrcStream.LoadAllParts(); //request all streams
SrcStream.Position = 0;
var streams = stream.Streams.ToList();
var streams = SrcStream.Streams.ToList();
var idx = 0;
if (streams.Count() > 1) //test part 2 - true = multipart not split
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
stream.IsVolumes = true;
SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
@@ -212,7 +215,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
//split mode or single file
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
@@ -221,13 +224,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
@@ -250,14 +254,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
new SeekableZipFilePart(headerFactory, deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
@@ -278,11 +282,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}

View File

@@ -1,7 +1,3 @@
using System;
using System.Runtime.CompilerServices;
[assembly: CLSCompliant(true)]
[assembly: InternalsVisibleTo(
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
)]

View File

@@ -0,0 +1,33 @@
using System.Buffers;
namespace SharpCompress;
internal static class BufferPool
{
/// <summary>
/// gets a buffer from the pool
/// </summary>
/// <param name="bufferSize">size of the buffer</param>
/// <returns>the buffer</returns>
public static byte[] Rent(int bufferSize)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
return ArrayPool<byte>.Shared.Rent(bufferSize);
#else
return new byte[bufferSize];
#endif
}
/// <summary>
/// returns a buffer to the pool
/// </summary>
/// <param name="buffer">the buffer to return</param>
public static void Return(byte[] buffer)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
ArrayPool<byte>.Shared.Return(buffer);
#else
// no-op
#endif
}
}

View File

@@ -8,12 +8,12 @@ public class ArchiveEncoding
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding? Default { get; set; }
public Encoding Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding? Password { get; set; }
public Encoding Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
@@ -50,8 +50,6 @@ public class ArchiveEncoding
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder() =>
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}

View File

@@ -6,5 +6,5 @@ public enum ArchiveType
Zip,
Tar,
SevenZip,
GZip,
GZip
}

View File

@@ -16,10 +16,5 @@ public enum CompressionType
Unknown,
Deflate64,
Shrink,
Lzw,
Reduce1,
Reduce2,
Reduce3,
Reduce4,
Explode,
Lzw
}

View File

@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string? Key { get; }
public abstract string Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString() => Key ?? "Entry";
public override string ToString() => Key;
internal abstract IEnumerable<FilePart> Parts { get; }

View File

@@ -36,12 +36,10 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
var file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var folder = Path.GetDirectoryName(entry.Key)!;
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))

View File

@@ -8,7 +8,7 @@ public abstract class FilePart
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string? FilePartName { get; }
internal abstract string FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();

View File

@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
public class GZipEntry : Entry
{
private readonly GZipFilePart? _filePart;
private readonly GZipFilePart _filePart;
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart?.Crc ?? 0;
public override long Crc => _filePart.Crc ?? 0;
public override string? Key => _filePart?.FilePartName;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart?.UncompressedSize ?? 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart?.DateModified;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? CreatedTime => null;
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{

View File

@@ -34,7 +34,7 @@ internal sealed class GZipFilePart : FilePart
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string? FilePartName => _name;
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream() =>
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);

View File

@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions? options, int index)
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)

View File

@@ -9,7 +9,7 @@ public interface IEntry
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string? Key { get; }
string Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }

View File

@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
{
int Index { get; }
string? FileName { get; }
string FileName { get; }
}

View File

@@ -121,6 +121,7 @@ internal class FileHeader : RarHeader
switch (type)
{
case FHEXTRA_CRYPT: // file encryption
{
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
@@ -131,6 +132,7 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
@@ -144,6 +146,7 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HTIME: // file time
{
var flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
@@ -168,6 +171,7 @@ internal class FileHeader : RarHeader
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
@@ -280,6 +284,7 @@ internal class FileHeader : RarHeader
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
@@ -306,6 +311,7 @@ internal class FileHeader : RarHeader
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))

View File

@@ -13,7 +13,7 @@ public enum HeaderType : byte
Sign,
NewSub,
EndArchive,
Crypt,
Crypt
}
internal static class HeaderCodeV

View File

@@ -98,11 +98,13 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
@@ -144,12 +146,14 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
@@ -200,12 +204,14 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);

View File

@@ -70,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
protected override void Dispose(bool disposing)
{
if (disposing)
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null!;
}
base.Dispose(disposing);
}
}

View File

@@ -25,7 +25,7 @@ public abstract class RarEntry : Entry
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string? Key => FileHeader.FileName;
public override string Key => FileHeader.FileName;
public override string? LinkTarget => null;

View File

@@ -15,14 +15,17 @@ namespace SharpCompress.Common.Rar;
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
private int _maxCompressionAlgorithm;
internal int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
private ArchiveHeader? ArchiveHeader { get; set; }
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
private StreamingMode Mode => _headerFactory.StreamingMode;
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
internal abstract IEnumerable<RarFilePart> ReadFileParts();
@@ -36,16 +39,19 @@ public abstract class RarVolume : Volume
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
@@ -57,12 +63,14 @@ public abstract class RarVolume : Volume
}
break;
case HeaderType.Service:
{
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
{
var part = CreateFilePart(lastMarkHeader!, fh);
var buffer = new byte[fh.CompressedSize];
fh.PackedStream.Read(buffer, 0, buffer.Length);
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}
@@ -97,7 +105,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader?.IsFirstVolume ?? false;
return ArchiveHeader.IsFirstVolume;
}
}
@@ -109,7 +117,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader?.IsVolume ?? false;
return ArchiveHeader.IsVolume;
}
}
@@ -122,7 +130,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader?.IsSolid ?? false;
return ArchiveHeader.IsSolid;
}
}

View File

@@ -35,7 +35,7 @@ internal class ArchiveDatabase
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null;
_numUnpackStreamsVector = null!;
_files.Clear();
_packStreamStartPositions.Clear();

View File

@@ -1220,46 +1220,23 @@ internal class ArchiveReader
#region Public Methods
public void Open(Stream stream, bool lookForHeader)
public void Open(Stream stream)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
{
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
throw new EndOfStreamException();
}
offset += delta;
throw new EndOfStreamException();
}
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
offset += delta;
}
_stream = stream;

View File

@@ -13,7 +13,7 @@ public class SevenZipEntry : Entry
public override long Crc => FilePart.Header.Crc ?? 0;
public override string? Key => FilePart.Header.Name;
public override string Key => FilePart.Header.Name;
public override string? LinkTarget => null;

View File

@@ -41,7 +41,7 @@ internal class SevenZipFilePart : FilePart
{
if (!Header.HasStream)
{
throw new InvalidOperationException("File does not have a stream.");
return null!;
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -73,24 +73,34 @@ internal class SevenZipFilePart : FilePart
private const uint K_PPMD = 0x030401;
private const uint K_B_ZIP2 = 0x040202;
private CompressionType GetCompression()
internal CompressionType GetCompression()
{
if (Header.IsDir)
{
return CompressionType.None;
}
var coder = Folder.NotNull()._coders.First();
return coder._methodId._id switch
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
K_LZMA or K_LZMA2 => CompressionType.LZMA,
K_PPMD => CompressionType.PPMd,
K_B_ZIP2 => CompressionType.BZip2,
_ => throw new NotImplementedException(),
};
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
internal bool IsEncrypted =>
!Header.IsDir
&& Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
Header.IsDir
? false
: Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}

View File

@@ -14,5 +14,5 @@ internal enum EntryType : byte
LongName = (byte)'L',
SparseFile = (byte)'S',
VolumeHeader = (byte)'V',
GlobalExtendedHeader = (byte)'g',
GlobalExtendedHeader = (byte)'g'
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
@@ -11,8 +13,8 @@ internal sealed class TarHeader
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
internal string? Name { get; set; }
internal string? LinkName { get; set; }
internal string Name { get; set; }
internal string LinkName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
@@ -20,7 +22,7 @@ internal sealed class TarHeader
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream? PackedStream { get; set; }
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal const int BLOCK_SIZE = 512;
@@ -34,9 +36,7 @@ internal sealed class TarHeader
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
@@ -46,7 +46,7 @@ internal sealed class TarHeader
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -77,7 +77,7 @@ internal sealed class TarHeader
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
ArchiveEncoding.Encode(Name),
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
@@ -87,7 +87,7 @@ internal sealed class TarHeader
private void WriteLongFilenameHeader(Stream output)
{
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
var nameBytes = ArchiveEncoding.Encode(Name);
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
@@ -101,85 +101,57 @@ internal sealed class TarHeader
internal bool Read(BinaryReader reader)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = ReadBlock(reader);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = ReadLongName(reader, buffer);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = ReadLongName(reader, buffer);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
var buffer = ReadBlock(reader);
if (buffer.Length == 0)
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
buffer = ReadBlock(reader);
}
else
{
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
}
if (entryType == EntryType.Directory)
EntryType = ReadEntryType(buffer);
Size = ReadSize(buffer);
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (EntryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
if (EntryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
@@ -314,45 +286,9 @@ internal sealed class TarHeader
(byte)' ',
(byte)' ',
(byte)' ',
(byte)' ',
(byte)' '
};
internal static bool checkChecksum(byte[] buf)
{
const int eightSpacesChksum = 256;
var buffer = new Span<byte>(buf).Slice(0, 512);
int posix_sum = eightSpacesChksum;
int sun_sum = eightSpacesChksum;
foreach (byte b in buffer)
{
posix_sum += b;
sun_sum += unchecked((sbyte)b);
}
// Special case, empty file header
if (posix_sum == eightSpacesChksum)
{
return true;
}
// Remove current checksum from calculation
foreach (byte b in buffer.Slice(148, 8))
{
posix_sum -= b;
sun_sum -= unchecked((sbyte)b);
}
// Read and compare checksum for header
var crc = ReadAsciiInt64Base8(buf, 148, 7);
if (crc != posix_sum && crc != sun_sum)
{
return false;
}
return true;
}
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
@@ -387,5 +323,5 @@ internal sealed class TarHeader
public long? DataStartPosition { get; set; }
public string? Magic { get; set; }
public string Magic { get; set; }
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -8,9 +10,9 @@ namespace SharpCompress.Common.Tar;
public class TarEntry : Entry
{
private readonly TarFilePart? _filePart;
private readonly TarFilePart _filePart;
internal TarEntry(TarFilePart? filePart, CompressionType type)
internal TarEntry(TarFilePart filePart, CompressionType type)
{
_filePart = filePart;
CompressionType = type;
@@ -20,15 +22,15 @@ public class TarEntry : Entry
public override long Crc => 0;
public override string? Key => _filePart?.Header.Name;
public override string Key => _filePart.Header.Name;
public override string? LinkTarget => _filePart?.Header.LinkName;
public override string LinkTarget => _filePart.Header.LinkName;
public override long CompressedSize => _filePart?.Header.Size ?? 0;
public override long CompressedSize => _filePart.Header.Size;
public override long Size => _filePart?.Header.Size ?? 0;
public override long Size => _filePart.Header.Size;
public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime;
public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime;
public override DateTime? CreatedTime => null;
@@ -38,17 +40,17 @@ public class TarEntry : Entry
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory;
public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory;
public override bool IsSplitAfter => false;
public long Mode => _filePart?.Header.Mode ?? 0;
public long Mode => _filePart.Header.Mode;
public long UserID => _filePart?.Header.UserId ?? 0;
public long UserID => _filePart.Header.UserId;
public long GroupId => _filePart?.Header.GroupId ?? 0;
public long GroupId => _filePart.Header.GroupId;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(
StreamingMode mode,
@@ -57,17 +59,17 @@ public class TarEntry : Entry
ArchiveEncoding archiveEncoding
)
{
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
{
if (header != null)
if (h != null)
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
}
else
{
yield return new TarEntry(new TarFilePart(header, null), compressionType);
yield return new TarEntry(new TarFilePart(h, null), compressionType);
}
}
else

View File

@@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar;
internal sealed class TarFilePart : FilePart
{
private readonly Stream? _seekableStream;
private readonly Stream _seekableStream;
internal TarFilePart(TarHeader header, Stream? seekableStream)
internal TarFilePart(TarHeader header, Stream seekableStream)
: base(header.ArchiveEncoding)
{
_seekableStream = seekableStream;
@@ -16,16 +16,16 @@ internal sealed class TarFilePart : FilePart
internal TarHeader Header { get; }
internal override string? FilePartName => Header?.Name;
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition ?? 0;
_seekableStream.Position = Header.DataStartPosition!.Value;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream.NotNull();
return Header.PackedStream;
}
internal override Stream? GetRawStream() => null;

View File

@@ -28,6 +28,7 @@ internal static class TarHeaderFactory
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
@@ -36,6 +37,7 @@ internal static class TarHeaderFactory
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}

View File

@@ -9,11 +9,11 @@ public abstract class Volume : IVolume
{
private readonly Stream _actualStream;
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
{
Index = index;
ReaderOptions = readerOptions ?? new ReaderOptions();
if (ReaderOptions.LeaveStreamOpen)
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
{
stream = NonDisposingStream.Create(stream);
}
@@ -32,7 +32,7 @@ public abstract class Volume : IVolume
public virtual int Index { get; internal set; }
public string? FileName => (_actualStream as FileStream)?.Name;
public string FileName => (_actualStream as FileStream)?.Name!;
/// <summary>
/// RarArchive is part of a multi-part archive.

View File

@@ -13,5 +13,5 @@ internal enum HeaderFlags : ushort
EnhancedDeflate = 16,
//Bit 11: Language encoding flag
Efs = 2048,
Efs = 2048
}

View File

@@ -14,7 +14,7 @@ internal enum ExtraDataType : ushort
// -Info-ZIP Unicode Path Extra Field
UnicodePathExtraField = 0x7075,
Zip64ExtendedInformationExtraField = 0x0001,
UnixTimeExtraField = 0x5455,
UnixTimeExtraField = 0x5455
}
internal class ExtraData
@@ -166,10 +166,10 @@ internal sealed class UnixTimeExtraField : ExtraData
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
}
var flags = (RecordedTimeFlag)DataBytes[0];
var isModifiedTimeSpecified = flags.HasFlag(RecordedTimeFlag.LastModified);
var isLastAccessTimeSpecified = flags.HasFlag(RecordedTimeFlag.LastAccessed);
var isCreationTimeSpecified = flags.HasFlag(RecordedTimeFlag.Created);
var flags = DataBytes[0];
var isModifiedTimeSpecified = (flags & 0x01) == 1;
var isLastAccessTimeSpecified = (flags & 0x02) == 1;
var isCreationTimeSpecified = (flags & 0x04) == 1;
var currentIndex = 1;
DateTime? modifiedTime = null;
DateTime? lastAccessTime = null;
@@ -189,7 +189,7 @@ internal sealed class UnixTimeExtraField : ExtraData
{
if (currentIndex + 4 > DataBytes.Length)
{
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
throw new ArchiveException("Invalid UnicodeExtraTime field");
}
var lastAccessEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
@@ -206,7 +206,7 @@ internal sealed class UnixTimeExtraField : ExtraData
{
if (currentIndex + 4 > DataBytes.Length)
{
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
throw new ArchiveException("Invalid UnicodeExtraTime field");
}
var creationTimeEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
@@ -222,15 +222,6 @@ internal sealed class UnixTimeExtraField : ExtraData
return Tuple.Create(modifiedTime, lastAccessTime, creationTime);
}
}
[Flags]
private enum RecordedTimeFlag
{
None = 0,
LastModified = 1,
LastAccessed = 2,
Created = 4,
}
}
internal static class LocalEntryHeaderExtraFactory
@@ -238,14 +229,11 @@ internal static class LocalEntryHeaderExtraFactory
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData) =>
type switch
{
ExtraDataType.UnicodePathExtraField => new ExtraUnicodePathExtraField(
type,
length,
extraData
),
ExtraDataType.Zip64ExtendedInformationExtraField =>
new Zip64ExtendedInformationExtraField(type, length, extraData),
ExtraDataType.UnicodePathExtraField
=> new ExtraUnicodePathExtraField(type, length, extraData),
ExtraDataType.Zip64ExtendedInformationExtraField
=> new Zip64ExtendedInformationExtraField(type, length, extraData),
ExtraDataType.UnixTimeExtraField => new UnixTimeExtraField(type, length, extraData),
_ => new ExtraData(type, length, extraData),
_ => new ExtraData(type, length, extraData)
};
}

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
@@ -18,21 +20,21 @@ internal abstract class ZipFileEntry : ZipHeader
{
get
{
if (Name?.EndsWith('/') ?? false)
if (Name.EndsWith('/'))
{
return true;
}
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false);
return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\');
}
}
internal Stream? PackedStream { get; set; }
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal string? Name { get; set; }
internal string Name { get; set; }
internal HeaderFlags Flags { get; set; }
@@ -46,7 +48,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal List<ExtraData> Extra { get; set; }
public string? Password { get; set; }
public string Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
@@ -63,7 +65,7 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
/// <summary>
/// The last modified date as read from the Local or Central Directory header.
@@ -117,7 +119,7 @@ internal abstract class ZipFileEntry : ZipHeader
}
}
internal ZipFilePart? Part { get; set; }
internal ZipFilePart Part { get; set; }
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
}

View File

@@ -8,5 +8,5 @@ internal enum ZipHeaderType
DirectoryEnd,
Split,
Zip64DirectoryEnd,
Zip64DirectoryEndLocator,
Zip64DirectoryEndLocator
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Common.Zip;
internal enum CryptoMode
{
Encrypt,
Decrypt,
Decrypt
}
internal class PkwareTraditionalCryptoStream : Stream

View File

@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
internal byte[] StringToByteArray(string value)
{
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
var a = _archiveEncoding.Password.GetBytes(value);
return a;
}

View File

@@ -42,16 +42,16 @@ internal class SeekableZipFilePart : ZipFilePart
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.NotNull();
BaseStream.Position = Header.DataStartPosition!.Value;
if (
(Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& _directoryEntryHeader.HasData
&& (_directoryEntryHeader.CompressedSize != 0)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0)
)
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
}
return BaseStream;

View File

@@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream) { }
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
protected override Stream CreateBaseStream() => Header.PackedStream;
internal override Stream GetCompressedStream()
{

View File

@@ -36,19 +36,16 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
&& (
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|| _lastEntryHeader.IsZip64
)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
@@ -81,60 +78,6 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = reader.ReadUInt32();
var version = reader.ReadUInt16();
var flags = (HeaderFlags)reader.ReadUInt16();
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
var lastModifiedDate = reader.ReadUInt16();
var lastModifiedTime = reader.ReadUInt16();
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = reader.ReadUInt32();
var uncompressed_size = reader.ReadUInt32();
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)reader.ReadUInt32() << 32) | headerBytes;
headerBytes = reader.ReadUInt32();
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = reader.ReadUInt32();

View File

@@ -1,3 +1,5 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
@@ -8,7 +10,12 @@ internal class WinzipAesEncryptionData
{
private const int RFC2898_ITERATIONS = 1000;
private readonly byte[] _salt;
private readonly WinzipAesKeySize _keySize;
private readonly byte[] _passwordVerifyValue;
private readonly string _password;
private byte[] _generatedVerifyValue;
internal WinzipAesEncryptionData(
WinzipAesKeySize keySize,
@@ -18,28 +25,10 @@ internal class WinzipAesEncryptionData
)
{
_keySize = keySize;
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
password,
salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
_salt = salt;
_passwordVerifyValue = passwordVerifyValue;
_password = password;
Initialize();
}
internal byte[] IvBytes { get; set; }
@@ -56,4 +45,32 @@ internal class WinzipAesEncryptionData
WinzipAesKeySize.KeySize256 => 32,
_ => throw new InvalidOperationException(),
};
private void Initialize()
{
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
_password,
_salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
var generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
}
}
}

View File

@@ -4,5 +4,5 @@ internal enum WinzipAesKeySize
{
KeySize128 = 1,
KeySize192 = 2,
KeySize256 = 3,
KeySize256 = 3
}

View File

@@ -4,11 +4,6 @@ internal enum ZipCompressionMethod
{
None = 0,
Shrink = 1,
Reduce1 = 2,
Reduce2 = 3,
Reduce3 = 4,
Reduce4 = 5,
Explode = 6,
Deflate = 8,
Deflate64 = 9,
BZip2 = 12,
@@ -16,5 +11,5 @@ internal enum ZipCompressionMethod
ZStd = 93,
Xz = 95,
PPMd = 98,
WinzipAes = 0x63, //http://www.winzip.com/aes_info.htm
WinzipAes = 0x63 //http://www.winzip.com/aes_info.htm
}

View File

@@ -1,38 +1,29 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Common.Zip;
public class ZipEntry : Entry
{
private readonly ZipFilePart? _filePart;
private readonly ZipFilePart _filePart;
internal ZipEntry(ZipFilePart? filePart)
internal ZipEntry(ZipFilePart filePart)
{
if (filePart == null)
if (filePart != null)
{
return;
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
}
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
var times =
filePart.Header.Extra.FirstOrDefault(header =>
header.GetType() == typeof(UnixTimeExtraField)
) as UnixTimeExtraField;
LastAccessedTime = times?.UnicodeTimes.Item2;
CreatedTime = times?.UnicodeTimes.Item3;
}
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod switch
_filePart.Header.CompressionMethod switch
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
@@ -41,46 +32,33 @@ public class ZipEntry : Entry
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
_ => CompressionType.Unknown,
_ => CompressionType.Unknown
};
public override long Crc => _filePart?.Header.Crc ?? 0;
public override long Crc => _filePart.Header.Crc;
public override string? Key => _filePart?.Header.Name;
public override string Key => _filePart.Header.Name;
public override string? LinkTarget => null;
public override string LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override long CompressedSize => _filePart.Header.CompressedSize;
public override long Size => _filePart?.Header.UncompressedSize ?? 0;
public override long Size => _filePart.Header.UncompressedSize;
public override DateTime? LastModifiedTime { get; }
/// <inheritdoc/>
/// <remarks>
/// The returned time is UTC, not local.
/// </remarks>
public override DateTime? CreatedTime { get; }
public override DateTime? CreatedTime => null;
/// <inheritdoc/>
/// <remarks>
/// The returned time is UTC, not local.
/// </remarks>
public override DateTime? LastAccessedTime { get; }
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted =>
FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted);
FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted);
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
}

View File

@@ -7,10 +7,8 @@ using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.Explode;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
@@ -31,7 +29,7 @@ internal abstract class ZipFilePart : FilePart
internal Stream BaseStream { get; }
internal ZipFileEntry Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override string FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
@@ -91,32 +89,6 @@ internal abstract class ZipFilePart : FilePart
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
@@ -231,10 +203,6 @@ internal abstract class ZipFilePart : FilePart
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:

View File

@@ -55,13 +55,7 @@ internal class ZipHeaderFactory
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
{
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64

View File

@@ -555,6 +555,6 @@ internal class BZip2Constants
858,
364,
936,
638,
638
};
}

View File

@@ -69,7 +69,7 @@ public sealed class BZip2Stream : Stream
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK&& !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer) => stream.Read(buffer);

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.IO;
@@ -42,17 +42,14 @@ internal class CBZip2InputStream : Stream
private static void Cadvise()
{
//System.out.Println("CRC Error");
throw new InvalidOperationException("BZip2 error");
//throw new CCoruptionError();
}
private static void BadBGLengths() => Cadvise();
private static void BitStreamEOF() => Cadvise();
private static void CompressedStreamEOF()
{
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
}
private static void CompressedStreamEOF() => Cadvise();
private void MakeMaps()
{

View File

@@ -1829,7 +1829,7 @@ internal sealed class CBZip2OutputStream : Stream
88573,
265720,
797161,
2391484,
2391484
};
private void AllocateCompressStructures()

View File

@@ -288,7 +288,7 @@ internal class CRC
unchecked((int)0xbcb4666d),
unchecked((int)0xb8757bda),
unchecked((int)0xb5365d03),
unchecked((int)0xb1f740b4),
unchecked((int)0xb1f740b4)
};
public CRC() => InitialiseCRC();

View File

@@ -3,5 +3,5 @@ namespace SharpCompress.Compressors;
public enum CompressionMode
{
Compress = 0,
Decompress = 1,
Decompress = 1
}

View File

@@ -106,7 +106,7 @@ internal sealed partial class DeflateManager
5,
5,
5,
0,
0
};
// extra bits for each distance code
@@ -141,7 +141,7 @@ internal sealed partial class DeflateManager
12,
12,
13,
13,
13
};
internal enum BlockState
@@ -149,14 +149,14 @@ internal sealed partial class DeflateManager
NeedMore = 0, // block not completed, need more input or more output
BlockDone, // block flush performed
FinishStarted, // finish started, need only more output at next deflate
FinishDone, // finish done, accept no more input or output
FinishDone // finish done, accept no more input or output
}
internal enum DeflateFlavor
{
Store,
Fast,
Slow,
Slow
}
private const int MEM_LEVEL_MAX = 9;
@@ -214,7 +214,7 @@ internal sealed partial class DeflateManager
new Config(8, 16, 128, 128, DeflateFlavor.Slow),
new Config(8, 32, 128, 256, DeflateFlavor.Slow),
new Config(32, 128, 258, 1024, DeflateFlavor.Slow),
new Config(32, 258, 258, 4096, DeflateFlavor.Slow),
new Config(32, 258, 258, 4096, DeflateFlavor.Slow)
};
private static readonly Config[] Table;
@@ -233,7 +233,7 @@ internal sealed partial class DeflateManager
"insufficient memory",
"buffer error",
"incompatible version",
"",
""
};
// preset dictionary flag in zlib header
@@ -1793,7 +1793,7 @@ internal sealed partial class DeflateManager
DeflateFlavor.Store => DeflateNone,
DeflateFlavor.Fast => DeflateFast,
DeflateFlavor.Slow => DeflateSlow,
_ => DeflateFunction,
_ => DeflateFunction
};
internal int SetParams(CompressionLevel level, CompressionStrategy strategy)

View File

@@ -39,5 +39,5 @@ public enum FlushType
Full,
/// <summary>Signals the end of the compression/decompression stream.</summary>
Finish,
Finish
}

View File

@@ -1615,7 +1615,7 @@ internal sealed class InfTree
79,
0,
9,
255,
255
};
//UPGRADE_NOTE: Final was removed from the declaration of 'fixed_td'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
@@ -1716,7 +1716,7 @@ internal sealed class InfTree
193,
192,
5,
24577,
24577
};
// Tables for deflate from PKZIP's appnote.txt.
@@ -1753,7 +1753,7 @@ internal sealed class InfTree
227,
258,
0,
0,
0
};
// see note #13 above about 258
@@ -1790,7 +1790,7 @@ internal sealed class InfTree
5,
0,
112,
112,
112
};
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdist'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
@@ -1825,7 +1825,7 @@ internal sealed class InfTree
8193,
12289,
16385,
24577,
24577
};
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdext'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
@@ -1860,7 +1860,7 @@ internal sealed class InfTree
12,
12,
13,
13,
13
};
// If BMAX needs to be larger than 16, then h and x[] should be uLong.

View File

@@ -93,7 +93,7 @@ internal sealed class InflateBlocks
2,
14,
1,
15,
15
};
internal ZlibCodec _codec; // pointer back to this zlib stream
@@ -815,7 +815,7 @@ internal sealed class InflateBlocks
CODES = 6, // processing fixed or dynamic block
DRY = 7, // output remaining window bytes
DONE = 8, // finished last block, done
BAD = 9, // ot a data error--stuck here
BAD = 9 // ot a data error--stuck here
}
#endregion
@@ -842,7 +842,7 @@ internal static class InternalInflateConstants
0x00001fff,
0x00003fff,
0x00007fff,
0x0000ffff,
0x0000ffff
};
}
@@ -2054,7 +2054,7 @@ internal sealed class InflateManager
CHECK2 = 10, // two check bytes to go
CHECK1 = 11, // one check byte to go
DONE = 12, // finished check, done
BAD = 13, // got an error--stay here
BAD = 13 // got an error--stay here
}
#endregion

View File

@@ -95,7 +95,7 @@ internal sealed partial class DeflateManager
2,
14,
1,
15,
15
};
// The lengths of the bit length codes are sent in order of decreasing
@@ -618,7 +618,7 @@ internal sealed partial class DeflateManager
29,
29,
29,
29,
29
};
internal static readonly sbyte[] LengthCode =
@@ -878,7 +878,7 @@ internal sealed partial class DeflateManager
27,
27,
27,
28,
28
};
internal static readonly int[] LengthBase =
@@ -911,7 +911,7 @@ internal sealed partial class DeflateManager
160,
192,
224,
0,
0
};
internal static readonly int[] DistanceBase =
@@ -945,7 +945,7 @@ internal sealed partial class DeflateManager
8192,
12288,
16384,
24576,
24576
};
internal short[] dyn_tree; // the dynamic tree

View File

@@ -143,7 +143,7 @@ public enum CompressionLevel
/// <summary>
/// A synonym for BestCompression.
/// </summary>
Level9 = BestCompression,
Level9 = BestCompression
}
/// <summary>
@@ -171,7 +171,7 @@ public enum CompressionStrategy
/// Using <c>HuffmanOnly</c> will force the compressor to do Huffman encoding only, with no
/// string matching.
/// </summary>
HuffmanOnly = 2,
HuffmanOnly = 2
}
/// <summary>
@@ -859,7 +859,7 @@ internal sealed class StaticTree
99,
8,
227,
8,
8
};
internal static readonly short[] distTreeCodes =
@@ -923,7 +923,7 @@ internal sealed class StaticTree
7,
5,
23,
5,
5
};
// extra bits for each bit length code
@@ -947,7 +947,7 @@ internal sealed class StaticTree
0,
2,
3,
7,
7
};
internal static readonly StaticTree Literals;

View File

@@ -39,7 +39,7 @@ internal enum ZlibStreamFlavor
{
ZLIB = 1950,
DEFLATE = 1951,
GZIP = 1952,
GZIP = 1952
}
internal class ZlibBaseStream : Stream
@@ -655,6 +655,6 @@ internal class ZlibBaseStream : Stream
{
Writer,
Reader,
Undefined,
Undefined
}
}

View File

@@ -8,5 +8,5 @@ internal enum BlockType
{
Uncompressed = 0,
Static = 1,
Dynamic = 2,
Dynamic = 2
}

View File

@@ -111,7 +111,7 @@ internal static class FastEncoderStatics
0x7e,
0x7c,
0x1f,
0x3f,
0x3f
};
internal static ReadOnlySpan<byte> B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA =>
@@ -214,7 +214,7 @@ internal static class FastEncoderStatics
0x7e,
0x7c,
0x1f,
0x3f,
0x3f
};
// Output a currentMatch with length matchLen (>= MIN_MATCH) and displacement matchPos
@@ -762,7 +762,7 @@ internal static class FastEncoderStatics
0x0039e7f1,
0x003be7f1,
0x003de7f1,
0x000047eb,
0x000047eb
};
internal static readonly uint[] FAST_ENCODER_DISTANCE_CODE_INFO =
@@ -798,7 +798,7 @@ internal static class FastEncoderStatics
0x000007d5,
0x000017d5,
0x00000000,
0x00000100,
0x00000100
};
internal static readonly uint[] BIT_MASK =
@@ -818,7 +818,7 @@ internal static class FastEncoderStatics
4095,
8191,
16383,
32767,
32767
};
internal static readonly byte[] EXTRA_LENGTH_BITS =
{
@@ -850,7 +850,7 @@ internal static class FastEncoderStatics
5,
5,
5,
0,
0
};
internal static readonly byte[] EXTRA_DISTANCE_BITS =
{
@@ -885,7 +885,7 @@ internal static class FastEncoderStatics
13,
13,
0,
0,
0
};
internal const int NUM_CHARS = 256;
internal const int NUM_LENGTH_BASE_CODES = 29;

View File

@@ -70,7 +70,7 @@ internal sealed class InflaterManaged
5,
5,
5,
16,
16
};
// The base length for length code 257 - 285.
@@ -105,7 +105,7 @@ internal sealed class InflaterManaged
163,
195,
227,
3,
3
};
// The base distance for distance code 0 - 31
@@ -143,7 +143,7 @@ internal sealed class InflaterManaged
16385,
24577,
32769,
49153,
49153
};
// code lengths for code length alphabet is stored in following order
@@ -184,7 +184,7 @@ internal sealed class InflaterManaged
0x07,
0x17,
0x0f,
0x1f,
0x1f
};
private readonly OutputWindow _output;

View File

@@ -37,5 +37,5 @@ internal enum InflaterState
ReadingFooter = 22,
VerifyingFooter = 23,
Done = 24, // Finished
Done = 24 // Finished
}

View File

@@ -8,5 +8,5 @@ internal enum MatchState
{
HasSymbol = 1,
HasMatch = 2,
HasSymbolAndMatch = 3,
HasSymbolAndMatch = 3
}

View File

@@ -1,746 +0,0 @@
using System;
using System.IO;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Compressors.Explode;
public class ExplodeStream : Stream
{
private const int INVALID_CODE = 99;
private const int WSIZE = 64 * 1024;
private readonly long unCompressedSize;
private readonly int compressedSize;
private readonly HeaderFlags generalPurposeBitFlag;
private readonly Stream inStream;
private huftNode[]? hufLiteralCodeTable; /* literal code table */
private huftNode[] hufLengthCodeTable = []; /* length code table */
private huftNode[] hufDistanceCodeTable = []; /* distance code table */
private int bitsForLiteralCodeTable;
private int bitsForLengthCodeTable;
private int bitsForDistanceCodeTable;
private int numOfUncodedLowerDistanceBits; /* number of uncoded lower distance bits */
private ulong bitBuffer;
private int bitBufferCount;
private readonly byte[] windowsBuffer;
private uint maskForLiteralCodeTable;
private uint maskForLengthCodeTable;
private uint maskForDistanceCodeTable;
private uint maskForDistanceLowBits;
private long outBytesCount;
private int windowIndex;
private int distance;
private int length;
internal ExplodeStream(
Stream inStr,
long compressedSize,
long uncompressedSize,
HeaderFlags generalPurposeBitFlag
)
{
inStream = inStr;
this.compressedSize = (int)compressedSize;
unCompressedSize = (long)uncompressedSize;
this.generalPurposeBitFlag = generalPurposeBitFlag;
explode_SetTables();
windowsBuffer = new byte[WSIZE];
explode_var_init();
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => unCompressedSize;
public override long Position
{
get => outBytesCount;
set { }
}
static uint[] mask_bits = new uint[]
{
0x0000,
0x0001,
0x0003,
0x0007,
0x000f,
0x001f,
0x003f,
0x007f,
0x00ff,
0x01ff,
0x03ff,
0x07ff,
0x0fff,
0x1fff,
0x3fff,
0x7fff,
0xffff,
};
/* Tables for length and distance */
static int[] cplen2 = new int[]
{
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
};
static int[] cplen3 = new int[]
{
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
};
static int[] extra = new int[]
{
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
8,
};
static int[] cpdist4 = new int[]
{
1,
65,
129,
193,
257,
321,
385,
449,
513,
577,
641,
705,
769,
833,
897,
961,
1025,
1089,
1153,
1217,
1281,
1345,
1409,
1473,
1537,
1601,
1665,
1729,
1793,
1857,
1921,
1985,
2049,
2113,
2177,
2241,
2305,
2369,
2433,
2497,
2561,
2625,
2689,
2753,
2817,
2881,
2945,
3009,
3073,
3137,
3201,
3265,
3329,
3393,
3457,
3521,
3585,
3649,
3713,
3777,
3841,
3905,
3969,
4033,
};
static int[] cpdist8 = new int[]
{
1,
129,
257,
385,
513,
641,
769,
897,
1025,
1153,
1281,
1409,
1537,
1665,
1793,
1921,
2049,
2177,
2305,
2433,
2561,
2689,
2817,
2945,
3073,
3201,
3329,
3457,
3585,
3713,
3841,
3969,
4097,
4225,
4353,
4481,
4609,
4737,
4865,
4993,
5121,
5249,
5377,
5505,
5633,
5761,
5889,
6017,
6145,
6273,
6401,
6529,
6657,
6785,
6913,
7041,
7169,
7297,
7425,
7553,
7681,
7809,
7937,
8065,
};
private int get_tree(int[] arrBitLengths, int numberExpected)
/* Get the bit lengths for a code representation from the compressed
stream. If get_tree() returns 4, then there is an error in the data.
Otherwise zero is returned. */
{
/* get bit lengths */
int inIndex = inStream.ReadByte() + 1; /* length/count pairs to read */
int outIndex = 0; /* next code */
do
{
int nextByte = inStream.ReadByte();
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
if (outIndex + numOfCodes > numberExpected)
return 4; /* don't overflow arrBitLengths[] */
do
{
arrBitLengths[outIndex++] = bitLengthOfCodes;
} while ((--numOfCodes) != 0);
} while ((--inIndex) != 0);
return outIndex != numberExpected ? 4 : 0; /* should have read numberExpected of them */
}
private int explode_SetTables()
{
int returnCode; /* return codes */
int[] arrBitLengthsForCodes = new int[256]; /* bit lengths for codes */
bitsForLiteralCodeTable = 0; /* bits for tb */
bitsForLengthCodeTable = 7;
bitsForDistanceCodeTable = (compressedSize) > 200000 ? 8 : 7;
if ((generalPurposeBitFlag & HeaderFlags.Bit2) != 0)
/* With literal tree--minimum match length is 3 */
{
bitsForLiteralCodeTable = 9; /* base table size for literals */
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
256,
256,
[],
[],
out hufLiteralCodeTable,
ref bitsForLiteralCodeTable
)
) != 0
)
return returnCode;
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen3,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
else
/* No literal tree--minimum match length is 2 */
{
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
hufLiteralCodeTable = null;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen2,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return (int)returnCode;
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
{
numOfUncodedLowerDistanceBits = 7;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist8,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
else /* else 4K */
{
numOfUncodedLowerDistanceBits = 6;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist4,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
return returnCode;
}
private void NeedBits(int numberOfBits)
{
while (bitBufferCount < (numberOfBits))
{
bitBuffer |= (uint)inStream.ReadByte() << bitBufferCount;
bitBufferCount += 8;
}
}
private void DumpBits(int numberOfBits)
{
bitBuffer >>= numberOfBits;
bitBufferCount -= numberOfBits;
}
int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, out int e)
{
NeedBits(bits);
int tabOffset = (int)(~bitBuffer & mask);
huftPointer = htab[tabOffset];
while (true)
{
DumpBits(huftPointer.NumberOfBitsUsed);
e = huftPointer.NumberOfExtraBits;
if (e <= 32)
break;
if (e == INVALID_CODE)
return 1;
e &= 31;
NeedBits(e);
tabOffset = (int)(~bitBuffer & mask_bits[e]);
huftPointer = huftPointer.ChildNodes[tabOffset];
}
return 0;
}
private void explode_var_init()
{
/* explode the coded data */
bitBuffer = 0;
bitBufferCount = 0;
maskForLiteralCodeTable = mask_bits[bitsForLiteralCodeTable]; //only used in explode_lit
maskForLengthCodeTable = mask_bits[bitsForLengthCodeTable];
maskForDistanceCodeTable = mask_bits[bitsForDistanceCodeTable];
maskForDistanceLowBits = mask_bits[numOfUncodedLowerDistanceBits];
outBytesCount = 0;
windowIndex = 0; /* initialize bit buffer, window */
}
public override int Read(byte[] buffer, int offset, int count)
{
int countIndex = 0;
while (countIndex < count && outBytesCount < unCompressedSize) /* do until unCompressedSize bytes uncompressed */
{
if (length == 0)
{
NeedBits(1);
bool literal = (bitBuffer & 1) == 1;
DumpBits(1);
huftNode huftPointer;
if (literal) /* then literal--decode it */
{
byte nextByte;
if (hufLiteralCodeTable != null)
{
/* get coded literal */
if (
DecodeHuft(
hufLiteralCodeTable,
bitsForLiteralCodeTable,
maskForLiteralCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding literal value");
nextByte = (byte)huftPointer.Value;
}
else
{
NeedBits(8);
nextByte = (byte)bitBuffer;
DumpBits(8);
}
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
NeedBits(numOfUncodedLowerDistanceBits); /* get distance low bits */
distance = (int)(bitBuffer & maskForDistanceLowBits);
DumpBits(numOfUncodedLowerDistanceBits);
/* get coded distance high bits */
if (
DecodeHuft(
hufDistanceCodeTable,
bitsForDistanceCodeTable,
maskForDistanceCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding distance high bits");
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
/* get coded length */
if (
DecodeHuft(
hufLengthCodeTable,
bitsForLengthCodeTable,
maskForLengthCodeTable,
out huftPointer,
out int extraBitLength
) != 0
)
throw new Exception("Error decoding coded length");
length = huftPointer.Value;
if (extraBitLength != 0) /* get length extra bits */
{
NeedBits(8);
length += (int)(bitBuffer & 0xff);
DumpBits(8);
}
if (length > (unCompressedSize - outBytesCount))
length = (int)(unCompressedSize - outBytesCount);
distance &= WSIZE - 1;
}
while (length != 0 && countIndex < count)
{
byte nextByte = windowsBuffer[distance++];
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (distance == WSIZE)
distance = 0;
if (windowIndex == WSIZE)
windowIndex = 0;
length--;
}
}
return countIndex;
}
}

View File

@@ -1,269 +0,0 @@
/*
* This code has been converted to C# based on the original huft_tree code found in
* inflate.c -- by Mark Adler version c17e, 30 Mar 2007
*/
namespace SharpCompress.Compressors.Explode;
public class huftNode
{
public int NumberOfExtraBits; /* number of extra bits or operation */
public int NumberOfBitsUsed; /* number of bits in this code or subcode */
public int Value; /* literal, length base, or distance base */
public huftNode[] ChildNodes = []; /* next level of table */
}
public static class HuftTree
{
private const int INVALID_CODE = 99;
/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
private const int BMAX = 16; /* maximum bit length of any code (16 for explode) */
private const int N_MAX = 288; /* maximum number of codes in any set */
public static int huftbuid(
int[] arrBitLengthForCodes,
int numberOfCodes,
int numberOfSimpleValueCodes,
int[] arrBaseValuesForNonSimpleCodes,
int[] arrExtraBitsForNonSimpleCodes,
out huftNode[] outHufTable,
ref int outBitsForTable
)
/* Given a list of code lengths and a maximum table size, make a set of
tables to decode that set of codes. Return zero on success, one if
the given code set is incomplete (the tables are still built in this
case), two if the input is invalid (all zero length codes or an
oversubscribed set of lengths), and three if not enough memory.
The code with value 256 is special, and the tables are constructed
so that no bits beyond that code are fetched when that code is
decoded. */
{
outHufTable = [];
/* Generate counts for each bit length */
int lengthOfEOBcode = numberOfCodes > 256 ? arrBitLengthForCodes[256] : BMAX; /* set length of EOB code, if any */
int[] arrBitLengthCount = new int[BMAX + 1];
for (int i = 0; i < BMAX + 1; i++)
arrBitLengthCount[i] = 0;
int pIndex = 0;
int counterCurrentCode = numberOfCodes;
do
{
arrBitLengthCount[arrBitLengthForCodes[pIndex]]++;
pIndex++; /* assume all entries <= BMAX */
} while ((--counterCurrentCode) != 0);
if (arrBitLengthCount[0] == numberOfCodes) /* null input--all zero length codes */
{
return 0;
}
/* Find minimum and maximum length, bound *outBitsForTable by those */
int counter;
for (counter = 1; counter <= BMAX; counter++)
if (arrBitLengthCount[counter] != 0)
break;
int numberOfBitsInCurrentCode = counter; /* minimum code length */
if (outBitsForTable < counter)
outBitsForTable = counter;
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
if (arrBitLengthCount[counterCurrentCode] != 0)
break;
int maximumCodeLength = counterCurrentCode; /* maximum code length */
if (outBitsForTable > counterCurrentCode)
outBitsForTable = counterCurrentCode;
/* Adjust last length count to fill out codes, if needed */
int numberOfDummyCodesAdded;
for (
numberOfDummyCodesAdded = 1 << counter;
counter < counterCurrentCode;
counter++, numberOfDummyCodesAdded <<= 1
)
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
return 2; /* bad input: more codes than bits */
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
return 2;
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
/* Generate starting offsets into the value table for each length */
int[] bitOffset = new int[BMAX + 1];
bitOffset[1] = 0;
counter = 0;
pIndex = 1;
int xIndex = 2;
while ((--counterCurrentCode) != 0)
{ /* note that i == g from above */
bitOffset[xIndex++] = (counter += arrBitLengthCount[pIndex++]);
}
/* Make a table of values in order of bit lengths */
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
for (int i = 0; i < N_MAX; i++)
arrValuesInOrderOfBitLength[i] = 0;
pIndex = 0;
counterCurrentCode = 0;
do
{
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
} while (++counterCurrentCode < numberOfCodes);
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
/* Generate the Huffman codes and for each, make the table entries */
bitOffset[0] = counterCurrentCode = 0; /* first Huffman code is zero */
pIndex = 0; /* grab values in bit order */
int tableLevel = -1; /* no tables yet--level -1 */
int bitsBeforeThisTable = 0;
int[] arrLX = new int[BMAX + 1];
int stackOfBitsPerTable = 1; /* stack of bits per table */
arrLX[stackOfBitsPerTable - 1] = 0; /* no bits decoded yet */
huftNode[][] arrHufTableStack = new huftNode[BMAX][];
huftNode[] pointerToCurrentTable = [];
int numberOfEntriesInCurrentTable = 0;
bool first = true;
/* go through the bit lengths (k already is bits in shortest code) */
for (; numberOfBitsInCurrentCode <= maximumCodeLength; numberOfBitsInCurrentCode++)
{
int counterForCodes = arrBitLengthCount[numberOfBitsInCurrentCode];
while ((counterForCodes--) != 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (
numberOfBitsInCurrentCode
> bitsBeforeThisTable + arrLX[stackOfBitsPerTable + tableLevel]
)
{
bitsBeforeThisTable += arrLX[stackOfBitsPerTable + (tableLevel++)]; /* add bits already decoded */
/* compute minimum size table less than or equal to *outBitsForTable bits */
numberOfEntriesInCurrentTable =
(numberOfEntriesInCurrentTable = maximumCodeLength - bitsBeforeThisTable)
> outBitsForTable
? outBitsForTable
: numberOfEntriesInCurrentTable; /* upper limit */
int fBitCounter1 =
1 << (counter = numberOfBitsInCurrentCode - bitsBeforeThisTable);
if (fBitCounter1 > counterForCodes + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
fBitCounter1 -= counterForCodes + 1; /* deduct codes from patterns left */
xIndex = numberOfBitsInCurrentCode;
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
{
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
break; /* enough codes to use up j bits */
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
}
}
if (
bitsBeforeThisTable + counter > lengthOfEOBcode
&& bitsBeforeThisTable < lengthOfEOBcode
)
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
/* allocate and link in new table */
pointerToCurrentTable = new huftNode[numberOfEntriesInCurrentTable];
// set the pointer, pointed to by *outHufTable to the second huft in pointertoCurrentTable
if (first)
{
outHufTable = pointerToCurrentTable; /* link to list for huft_free() */
first = false;
}
arrHufTableStack[tableLevel] = pointerToCurrentTable; /* table starts after link */
/* connect to last table, if there is one */
if (tableLevel != 0)
{
bitOffset[tableLevel] = counterCurrentCode; /* save pattern for backing up */
huftNode vHuft = new huftNode
{
NumberOfBitsUsed = arrLX[stackOfBitsPerTable + tableLevel - 1], /* bits to dump before this table */
NumberOfExtraBits = 32 + counter, /* bits in this table */
ChildNodes = pointerToCurrentTable, /* pointer to this table */
};
counter =
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1))
>> (bitsBeforeThisTable - arrLX[stackOfBitsPerTable + tableLevel - 1]);
arrHufTableStack[tableLevel - 1][counter] = vHuft; /* connect to last table */
}
}
/* set up table entry in r */
huftNode vHuft1 = new huftNode
{
NumberOfBitsUsed = numberOfBitsInCurrentCode - bitsBeforeThisTable,
};
if (pIndex >= numberOfCodes)
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
{
vHuft1.NumberOfExtraBits = (
arrValuesInOrderOfBitLength[pIndex] < 256 ? 32 : 31
); /* 256 is end-of-block code */
vHuft1.Value = arrValuesInOrderOfBitLength[pIndex++]; /* simple code is just the value */
}
else
{
vHuft1.NumberOfExtraBits = arrExtraBitsForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex] - numberOfSimpleValueCodes
]; /* non-simple--look up in lists */
vHuft1.Value = arrBaseValuesForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex++] - numberOfSimpleValueCodes
];
}
/* fill code-like entries with r */
int fBitCounter2 = 1 << (numberOfBitsInCurrentCode - bitsBeforeThisTable);
for (
counter = counterCurrentCode >> bitsBeforeThisTable;
counter < numberOfEntriesInCurrentTable;
counter += fBitCounter2
)
pointerToCurrentTable[counter] = vHuft1;
/* backwards increment the k-bit code i */
for (
counter = 1 << (numberOfBitsInCurrentCode - 1);
(counterCurrentCode & counter) != 0;
counter >>= 1
)
counterCurrentCode ^= counter;
counterCurrentCode ^= counter;
/* backup over finished tables */
while (
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
)
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
}
}
/* return actual size of base table */
outBitsForTable = arrLX[stackOfBitsPerTable];
/* Return true (1) if we were given an incomplete table */
return (numberOfDummyCodesAdded != 0 && maximumCodeLength != 1) ? 1 : 0;
}
}

View File

@@ -13,7 +13,7 @@ internal class BCJFilter : Filter
true,
false,
false,
false,
false
};
private static readonly int[] MASK_TO_BIT_NUMBER = { 0, 1, 2, 2, 3, 3, 3, 3 };

View File

@@ -1,63 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterARM64 : Filter
{
private int _pos;
public BCJFilterARM64(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
protected override int Transform(byte[] buffer, int offset, int count)
{
var end = offset + count - 4;
int i;
for (i = offset; i <= end; i += 4)
{
uint pc = (uint)(_pos + i - offset);
uint instr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i, 4)
);
if ((instr >> 26) == 0x25)
{
uint src = instr;
instr = 0x94000000;
pc >>= 2;
if (!_isEncoder)
pc = 0U - pc;
instr |= (src + pc) & 0x03FFFFFF;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
else if ((instr & 0x9F000000) == 0x90000000)
{
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
if (((src + 0x00020000) & 0x001C0000) != 0)
continue;
instr &= 0x9000001F;
pc >>= 12;
if (!_isEncoder)
pc = 0U - pc;
uint dest = src + pc;
instr |= (dest & 3) << 29;
instr |= (dest & 0x0003FFFC) << 3;
instr |= (0U - (dest & 0x00020000)) & 0x00E00000;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
}
i -= offset;
_pos += i;
return i;
}
}

View File

@@ -39,7 +39,7 @@ internal class BCJFilterIA64 : Filter
4,
4,
0,
0,
0
};
public BCJFilterIA64(bool isEncoder, Stream baseStream)

View File

@@ -1,210 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterRISCV : Filter
{
private int _pos;
public BCJFilterRISCV(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
private int Decode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr = ((b1 & 0xF0) << 13) | (b2 << 9) | (b3 << 1);
addr -= pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 8) & 0xF0));
buffer[i + 2] = (byte)(
((addr >> 16) & 0x0F) | ((addr >> 7) & 0x10) | ((addr << 4) & 0xE0)
);
buffer[i + 3] = (byte)(((addr >> 4) & 0x7F) | ((addr >> 13) & 0x80));
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
uint inst2 = 0;
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += inst2 >> 20;
inst = 0x17 | (2 << 7) | (inst2 << 12);
inst2 = addr;
}
else
{
uint inst2_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((inst2_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint addr = BinaryPrimitives.ReadUInt32BigEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
addr -= (uint)(_pos + i);
inst2 = (inst >> 12) | (addr << 20);
inst = 0x17 | (inst2_rs1 << 7) | ((addr + 0x800) & 0xFFFFF000);
}
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i + 4, 4), inst2);
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
private int Encode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr =
((b1 & 0xF0) << 8)
| ((b2 & 0x0F) << 16)
| ((b2 & 0x10) << 7)
| ((b2 & 0xE0) >> 4)
| ((b3 & 0x7F) << 4)
| ((b3 & 0x80) << 13);
addr += pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 13) & 0xF0));
buffer[i + 2] = (byte)(addr >> 9);
buffer[i + 3] = (byte)(addr >> 1);
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
uint inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += (inst2 >> 20) - ((inst2 >> 19) & 0x1000);
addr += (uint)(_pos + i);
inst = 0x17 | (2 << 7) | (inst2 << 12);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32BigEndian(new Span<byte>(buffer, i + 4, 4), addr);
}
else
{
uint fake_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((fake_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint fake_addr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
uint fake_inst2 = (inst >> 12) | (fake_addr << 20);
inst = 0x17 | (fake_rs1 << 7) | (fake_addr & 0xFFFFF000);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(
new Span<byte>(buffer, i + 4, 4),
fake_inst2
);
}
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
protected override int Transform(byte[] buffer, int offset, int count)
{
if (_isEncoder)
{
return Encode(buffer, offset, count);
}
else
{
return Decode(buffer, offset, count);
}
}
}

View File

@@ -20,8 +20,7 @@ internal sealed class AesDecoderStream : DecoderStream2
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
var password = pass.CryptoGetTextPassword();
if (password == null)
if (pass.CryptoGetTextPassword() == null)
{
throw new SharpCompress.Common.CryptographicException(
"Encrypted 7Zip archive has no password specified."
@@ -38,8 +37,8 @@ internal sealed class AesDecoderStream : DecoderStream2
Init(info, out var numCyclesPower, out var salt, out var seed);
var passwordBytes = Encoding.Unicode.GetBytes(password);
var key = InitKey(numCyclesPower, salt, passwordBytes);
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
var key = InitKey(numCyclesPower, salt, password);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
@@ -208,6 +207,28 @@ internal sealed class AesDecoderStream : DecoderStream2
}
else
{
#if NETSTANDARD2_0
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
sha.AppendData(pass, 0, pass.Length);
sha.AppendData(counter, 0, 8);
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
return sha.GetHashAndReset();
#else
using var sha = SHA256.Create();
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
@@ -230,6 +251,7 @@ internal sealed class AesDecoderStream : DecoderStream2
sha.TransformFinalBlock(counter, 0, 0);
return sha.Hash;
#endif
}
}

Some files were not shown because too many files have changed in this diff Show More