Compare commits

..

41 Commits

Author SHA1 Message Date
Adam Hathcock
6fc4b045fd mark for 0.37.2 2024-04-27 09:34:32 +01:00
Adam Hathcock
446852c7d0 really fix source link and central usage 2024-04-27 09:34:05 +01:00
Adam Hathcock
c635f00899 mark as 0.37.1 2024-04-27 09:12:17 +01:00
Adam Hathcock
1393629bc5 Mark sourcelink as PrivateAssets="All" 2024-04-27 06:15:29 +01:00
Adam Hathcock
49ce17b759 update zstdsharp.port and net8 is only trimmable 2024-04-25 08:35:52 +01:00
Adam Hathcock
74888021c8 Merge pull request #835 from Blokyk/fix-736
Prevent infinite loop when reading corrupted archive
2024-04-24 09:20:44 +01:00
Adam Hathcock
9483856439 fmt 2024-04-24 09:17:42 +01:00
blokyk
dbbc7c8132 fix(tar): prevent infinite loop when reading corrupted archive 2024-04-24 03:13:13 +02:00
Adam Hathcock
b203d165f5 Mark for 0.37.0 2024-04-23 10:25:32 +01:00
Adam Hathcock
c695e1136d Merge pull request #828 from adamhathcock/remove-netstandard20
Remove ~netstandard20~ just net7.0
2024-04-23 10:18:24 +01:00
Adam Hathcock
d847202308 add back net standard 2.0 2024-04-23 09:59:30 +01:00
Adam Hathcock
9d24e0a4b8 set package locks and central management 2024-04-23 09:37:25 +01:00
Adam Hathcock
745fe1eb9f references 2024-04-23 09:28:33 +01:00
Adam Hathcock
3e52b85e9d Merge remote-tracking branch 'origin/master' into remove-netstandard20
# Conflicts:
#	.config/dotnet-tools.json
2024-04-23 09:26:44 +01:00
Adam Hathcock
d26f020b50 Merge pull request #832 from adamhathcock/remove-ignored-nulls
Remove ignored nulls
2024-04-23 09:25:08 +01:00
Adam Hathcock
095b5f702c get rid of another null! 2024-04-23 09:20:20 +01:00
Adam Hathcock
9622853b8d fix and fmt 2024-04-23 09:16:05 +01:00
Adam Hathcock
b94e75fabe try to fix more tests 2024-04-23 09:06:49 +01:00
Adam Hathcock
23dd041e2e fix some tests 2024-04-23 08:52:10 +01:00
Adam Hathcock
c73ca21b4d fmt 2024-04-22 15:19:05 +01:00
Adam Hathcock
7ebdc85ad2 more null clean up 2024-04-22 15:17:24 +01:00
Adam Hathcock
99e2c8c90d more clean up 2024-04-22 15:10:22 +01:00
Adam Hathcock
f24bfdf945 fix tests 2024-04-22 14:57:08 +01:00
Adam Hathcock
7963233702 add missing usings 2024-04-22 14:18:41 +01:00
Adam Hathcock
b550df2038 get rid of more ! and update csharpier 2024-04-22 14:17:08 +01:00
Adam Hathcock
fb55624f5f add more null handling 2024-04-18 14:25:10 +01:00
Adam Hathcock
e96366f489 Entry can be null and remove other ! usages 2024-04-18 13:24:03 +01:00
Adam Hathcock
900190cf54 Merge pull request #829 from NeuroXiq/patch-1
Update README.md - Change API Docs to DNDocs
2024-04-15 08:14:16 +01:00
Marek Węglarz
2af744b474 Update README.md 2024-04-15 04:28:15 +02:00
Adam Hathcock
11153084e2 update csharpier 2024-04-11 15:47:39 +01:00
Adam Hathcock
4b9c814bfc remove .netstandard 2.0 and clean up 2024-04-11 15:46:43 +01:00
Adam Hathcock
1b5d3a3b6e Merge pull request #825 from adamhathcock/tar-corruption
Fix tar corruption when sizes mismatch
2024-04-11 13:11:29 +01:00
Adam Hathcock
373637e6a7 more logic fixes 2024-04-11 09:05:45 +01:00
Adam Hathcock
cb223217c1 actually, transfer block is different than overall transfer 2024-04-10 16:12:01 +01:00
Adam Hathcock
eab97a3f8b calculate remaining afterwards 2024-04-10 08:53:20 +01:00
Adam Hathcock
fdfaa8ab45 add max transfer size to tar 2024-04-09 15:35:15 +01:00
Adam Hathcock
2321d9dbee fix patch 2024-04-09 08:56:15 +01:00
Adam Hathcock
bf74dd887a Fix tar corruption when sizes mismatch 2024-04-09 08:19:23 +01:00
Adam Hathcock
3612035894 Merge pull request #823 from klimatr26/new-7z-filters
Add support for 7z ARM64 and RISCV filters
2024-04-08 09:56:07 +01:00
Adam Hathcock
6553e9b0cd formatting 2024-04-08 09:50:37 +01:00
klimatr26
09f2410170 Add support for 7z ARM64 and RISCV filters 2024-04-05 15:00:43 -05:00
100 changed files with 1655 additions and 1585 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.27.3",
"version": "0.28.1",
"commands": [
"dotnet-csharpier"
]

View File

@@ -70,7 +70,7 @@ indent_style = tab
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
dotnet_analyzer_diagnostic.severity = silent
##########################################
# File Header (Uncomment to support file headers)
@@ -269,6 +269,8 @@ dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
@@ -286,6 +288,7 @@ dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
@@ -303,13 +306,12 @@ dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.CS8981.severity = suggestion
dotnet_diagnostic.BL0005.severity = suggestion
@@ -318,7 +320,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = error
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
@@ -329,7 +331,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
@@ -337,7 +339,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
@@ -351,11 +353,20 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
##########################################
# Styles

View File

@@ -10,5 +10,7 @@
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
</PropertyGroup>
</Project>

18
Directory.Packages.props Normal file
View File

@@ -0,0 +1,18 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="5.0.0" />
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Memory" Version="4.5.5" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.7.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.5.8" />
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.0" />
</ItemGroup>
</Project>

9
NuGet.config Normal file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSourceMapping>
<!-- key value for <packageSource> should match key values from <packageSources> element -->
<packageSource key="nuget.org">
<package pattern="*" />
</packageSource>
</packageSourceMapping>
</configuration>

View File

@@ -1,12 +1,12 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Documentation-RobiniaDocs-43bc00?logo=readme&logoColor=white)](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
## Need Help?

View File

@@ -17,6 +17,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
EndProjectSection
EndProject
Global

View File

@@ -79,6 +79,10 @@
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
@@ -127,6 +131,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net7.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.2.1" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="11.0.0" />
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
</ItemGroup>
</Project>

25
build/packages.lock.json Normal file
View File

@@ -0,0 +1,25 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"Bullseye": {
"type": "Direct",
"requested": "[5.0.0, )",
"resolved": "5.0.0",
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
},
"Glob": {
"type": "Direct",
"requested": "[1.1.9, )",
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
}
}
}
}

View File

@@ -12,39 +12,35 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
Type = type;
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
@@ -65,12 +61,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => lazyEntries;
public virtual ICollection<TEntry> Entries => _lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => lazyVolumes;
public ICollection<TVolume> Volumes => _lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -84,29 +80,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public virtual long TotalUncompressSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!disposed)
if (!_disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
SrcStream?.Dispose();
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
disposed = true;
_disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(

View File

@@ -41,8 +41,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
internal AbstractWritableArchive(ArchiveType type)
: base(type) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
: base(type, sourceStream) { }
public override ICollection<TEntry> Entries
{
@@ -120,6 +120,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
{
foreach (var path in Entries.Select(x => x.Key))
{
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{

View File

@@ -90,7 +90,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
@@ -99,16 +99,14 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
srcStream.LoadAllParts();
var idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
@@ -184,7 +182,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -32,7 +30,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
public override long Crc => 0;
public override string Key { get; }
public override string? Key { get; }
public override long CompressedSize => 0;

View File

@@ -17,15 +17,11 @@ public static class IArchiveEntryExtensions
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key,
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
{
return;
}
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -59,7 +58,7 @@ public static class IArchiveExtensions
}
// Create each directory
var path = Path.Combine(destination, entry.Key);
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
{
Directory.CreateDirectory(directory);

View File

@@ -13,7 +13,7 @@ namespace SharpCompress.Archives.Rar;
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;

View File

@@ -21,35 +21,33 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream) { }
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var streams = SrcStream.Streams.ToArray();
var idx = 0;
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
SrcStream.IsVolumes = true;
sourceStream.IsVolumes = true;
streams[1].Position = 0;
SrcStream.Position = 0;
sourceStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
idx++
i++
));
}
else //split mode or single file
{
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -108,7 +106,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
/// <summary>

View File

@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string? password;
private readonly Stream _stream;
private readonly string? _password;
internal SeekableFilePart(
MarkHeader mh,
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
)
: base(mh, fh, index)
{
this.stream = stream;
this.password = password;
_stream = stream;
_password = password;
}
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
_stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(password!);
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return stream;
return _stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -14,14 +12,14 @@ namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
private ArchiveDatabase? _database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -32,7 +30,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(
@@ -51,7 +49,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions readerOptions = null
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
@@ -72,7 +70,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions readerOptions = null
ReaderOptions? readerOptions = null
)
{
streams.CheckNotNull(nameof(streams));
@@ -91,27 +89,25 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream) { }
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
@@ -135,13 +131,17 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (var i = 0; i < database._files.Count; i++)
if (_database is null)
{
var file = database._files[i];
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
@@ -159,12 +159,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
private void LoadFactory(Stream stream)
{
if (database is null)
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
@@ -180,14 +180,14 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
private static ReadOnlySpan<byte> SIGNATURE =>
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
return signatureBytes.SequenceEqual(Signature);
}
protected override IReader CreateReaderForSolidExtraction() =>
@@ -196,30 +196,24 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override bool IsSolid =>
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
public override long TotalSize
{
get
{
var i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
public override SevenZipVolume Volume => archive.Volumes.Single();
public override SevenZipVolume Volume => _archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
var entries = archive.Entries.ToList();
var entries = _archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
@@ -229,37 +223,42 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
currentFolder = group.Key;
_currentFolder = group.Key;
if (group.Key is null)
{
currentStream = Stream.Null;
_currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(
_currentStream = _archive._database?.GetFolderStream(
stream,
currentFolder,
_currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
_currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
private readonly string? _password;
public PasswordProvider(string password) => _password = password;
public PasswordProvider(string? password) => _password = password;
public string CryptoGetTextPassword() => _password;
public string? CryptoGetTextPassword() => _password;
}
}

View File

@@ -114,7 +114,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name.Length == 0
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
@@ -123,22 +123,20 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
internal TarArchive()
private TarArchive()
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
@@ -192,6 +190,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
@@ -225,7 +227,12 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
}
}

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;

View File

@@ -16,10 +16,7 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -30,13 +27,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="sourceStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream) =>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
srcStream.ReaderOptions.Password,
srcStream.ReaderOptions.ArchiveEncoding
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
@@ -189,21 +186,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
SrcStream.LoadAllParts(); //request all streams
SrcStream.Position = 0;
stream.LoadAllParts(); //request all streams
stream.Position = 0;
var streams = SrcStream.Streams.ToList();
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count > 1) //test part 2 - true = multipart not split
if (streams.Count() > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
SrcStream.IsVolumes = true;
stream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
@@ -215,7 +212,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
//split mode or single file
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
@@ -224,14 +221,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
@@ -254,14 +250,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory, deh, s)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
@@ -282,7 +278,11 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}

View File

@@ -8,12 +8,12 @@ public class ArchiveEncoding
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding Default { get; set; }
public Encoding? Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding Password { get; set; }
public Encoding? Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
@@ -50,6 +50,8 @@ public class ArchiveEncoding
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder() =>
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}

View File

@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string Key { get; }
public abstract string? Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString() => Key;
public override string ToString() => Key ?? "Entry";
internal abstract IEnumerable<FilePart> Parts { get; }

View File

@@ -36,10 +36,11 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key);
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key)!;
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))

View File

@@ -8,7 +8,7 @@ public abstract class FilePart
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract string? FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();

View File

@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
public class GZipEntry : Entry
{
private readonly GZipFilePart _filePart;
private readonly GZipFilePart? _filePart;
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart.Crc ?? 0;
public override long Crc => _filePart?.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string? Key => _filePart?.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override long Size => _filePart?.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? LastModifiedTime => _filePart?.DateModified;
public override DateTime? CreatedTime => null;
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{

View File

@@ -34,7 +34,7 @@ internal sealed class GZipFilePart : FilePart
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);

View File

@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
public GZipVolume(Stream stream, ReaderOptions? options, int index)
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)

View File

@@ -9,7 +9,7 @@ public interface IEntry
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string? Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }

View File

@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
{
int Index { get; }
string FileName { get; }
string? FileName { get; }
}

View File

@@ -121,7 +121,6 @@ internal class FileHeader : RarHeader
switch (type)
{
case FHEXTRA_CRYPT: // file encryption
{
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
@@ -132,7 +131,6 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
@@ -146,7 +144,6 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HTIME: // file time
{
var flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
@@ -171,7 +168,6 @@ internal class FileHeader : RarHeader
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
@@ -284,7 +280,6 @@ internal class FileHeader : RarHeader
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
@@ -311,7 +306,6 @@ internal class FileHeader : RarHeader
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))

View File

@@ -98,13 +98,11 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
@@ -146,14 +144,12 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
@@ -204,14 +200,12 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);

View File

@@ -70,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
protected override void Dispose(bool disposing)
{
if (_rijndael != null)
if (disposing)
{
_rijndael.Dispose();
_rijndael = null!;
}
base.Dispose(disposing);
}
}

View File

@@ -25,7 +25,7 @@ public abstract class RarEntry : Entry
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string Key => FileHeader.FileName;
public override string? Key => FileHeader.FileName;
public override string? LinkTarget => null;

View File

@@ -15,17 +15,14 @@ namespace SharpCompress.Common.Rar;
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal int _maxCompressionAlgorithm;
private int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
private ArchiveHeader? ArchiveHeader { get; set; }
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
private StreamingMode Mode => _headerFactory.StreamingMode;
internal abstract IEnumerable<RarFilePart> ReadFileParts();
@@ -39,19 +36,16 @@ public abstract class RarVolume : Volume
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
@@ -63,7 +57,6 @@ public abstract class RarVolume : Volume
}
break;
case HeaderType.Service:
{
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
@@ -105,7 +98,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsFirstVolume;
return ArchiveHeader?.IsFirstVolume ?? false;
}
}
@@ -117,7 +110,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsVolume;
return ArchiveHeader?.IsVolume ?? false;
}
}
@@ -130,7 +123,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsSolid;
return ArchiveHeader?.IsSolid ?? false;
}
}

View File

@@ -35,7 +35,7 @@ internal class ArchiveDatabase
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null!;
_numUnpackStreamsVector = null;
_files.Clear();
_packStreamStartPositions.Clear();

View File

@@ -13,7 +13,7 @@ public class SevenZipEntry : Entry
public override long Crc => FilePart.Header.Crc ?? 0;
public override string Key => FilePart.Header.Name;
public override string? Key => FilePart.Header.Name;
public override string? LinkTarget => null;

View File

@@ -41,7 +41,7 @@ internal class SevenZipFilePart : FilePart
{
if (!Header.HasStream)
{
return null!;
throw new InvalidOperationException("File does not have a stream.");
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -73,34 +73,24 @@ internal class SevenZipFilePart : FilePart
private const uint K_PPMD = 0x030401;
private const uint K_B_ZIP2 = 0x040202;
internal CompressionType GetCompression()
private CompressionType GetCompression()
{
if (Header.IsDir)
return CompressionType.None;
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
return CompressionType.None;
}
var coder = Folder.NotNull()._coders.First();
return coder._methodId._id switch
{
K_LZMA or K_LZMA2 => CompressionType.LZMA,
K_PPMD => CompressionType.PPMd,
K_B_ZIP2 => CompressionType.BZip2,
_ => throw new NotImplementedException()
};
}
internal bool IsEncrypted =>
Header.IsDir
? false
: Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
!Header.IsDir
&& Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
@@ -13,8 +11,8 @@ internal sealed class TarHeader
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
internal string Name { get; set; }
internal string LinkName { get; set; }
internal string? Name { get; set; }
internal string? LinkName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
@@ -22,7 +20,7 @@ internal sealed class TarHeader
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream PackedStream { get; set; }
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal const int BLOCK_SIZE = 512;
@@ -36,7 +34,9 @@ internal sealed class TarHeader
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
@@ -46,7 +46,7 @@ internal sealed class TarHeader
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -77,7 +77,7 @@ internal sealed class TarHeader
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(
ArchiveEncoding.Encode(Name),
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
@@ -87,7 +87,7 @@ internal sealed class TarHeader
private void WriteLongFilenameHeader(Stream output)
{
var nameBytes = ArchiveEncoding.Encode(Name);
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
@@ -323,5 +323,5 @@ internal sealed class TarHeader
public long? DataStartPosition { get; set; }
public string Magic { get; set; }
public string? Magic { get; set; }
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -10,9 +8,9 @@ namespace SharpCompress.Common.Tar;
public class TarEntry : Entry
{
private readonly TarFilePart _filePart;
private readonly TarFilePart? _filePart;
internal TarEntry(TarFilePart filePart, CompressionType type)
internal TarEntry(TarFilePart? filePart, CompressionType type)
{
_filePart = filePart;
CompressionType = type;
@@ -22,15 +20,15 @@ public class TarEntry : Entry
public override long Crc => 0;
public override string Key => _filePart.Header.Name;
public override string? Key => _filePart?.Header.Name;
public override string LinkTarget => _filePart.Header.LinkName;
public override string? LinkTarget => _filePart?.Header.LinkName;
public override long CompressedSize => _filePart.Header.Size;
public override long CompressedSize => _filePart?.Header.Size ?? 0;
public override long Size => _filePart.Header.Size;
public override long Size => _filePart?.Header.Size ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime;
public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime;
public override DateTime? CreatedTime => null;
@@ -40,17 +38,17 @@ public class TarEntry : Entry
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory;
public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory;
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long Mode => _filePart?.Header.Mode ?? 0;
public long UserID => _filePart.Header.UserId;
public long UserID => _filePart?.Header.UserId ?? 0;
public long GroupId => _filePart.Header.GroupId;
public long GroupId => _filePart?.Header.GroupId ?? 0;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<TarEntry> GetEntries(
StreamingMode mode,
@@ -59,17 +57,17 @@ public class TarEntry : Entry
ArchiveEncoding archiveEncoding
)
{
foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
{
if (h != null)
if (header != null)
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
}
else
{
yield return new TarEntry(new TarFilePart(h, null), compressionType);
yield return new TarEntry(new TarFilePart(header, null), compressionType);
}
}
else

View File

@@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar;
internal sealed class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
private readonly Stream? _seekableStream;
internal TarFilePart(TarHeader header, Stream seekableStream)
internal TarFilePart(TarHeader header, Stream? seekableStream)
: base(header.ArchiveEncoding)
{
_seekableStream = seekableStream;
@@ -16,16 +16,16 @@ internal sealed class TarFilePart : FilePart
internal TarHeader Header { get; }
internal override string FilePartName => Header.Name;
internal override string? FilePartName => Header?.Name;
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition!.Value;
_seekableStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
return Header.PackedStream.NotNull();
}
internal override Stream? GetRawStream() => null;

View File

@@ -28,7 +28,6 @@ internal static class TarHeaderFactory
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
@@ -37,7 +36,6 @@ internal static class TarHeaderFactory
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}

View File

@@ -9,11 +9,11 @@ public abstract class Volume : IVolume
{
private readonly Stream _actualStream;
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
{
Index = index;
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
ReaderOptions = readerOptions ?? new ReaderOptions();
if (ReaderOptions.LeaveStreamOpen)
{
stream = NonDisposingStream.Create(stream);
}
@@ -32,7 +32,7 @@ public abstract class Volume : IVolume
public virtual int Index { get; internal set; }
public string FileName => (_actualStream as FileStream)?.Name!;
public string? FileName => (_actualStream as FileStream)?.Name;
/// <summary>
/// RarArchive is part of a multi-part archive.

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
@@ -20,21 +18,21 @@ internal abstract class ZipFileEntry : ZipHeader
{
get
{
if (Name.EndsWith('/'))
if (Name?.EndsWith('/') ?? false)
{
return true;
}
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\');
return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false);
}
}
internal Stream PackedStream { get; set; }
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal string Name { get; set; }
internal string? Name { get; set; }
internal HeaderFlags Flags { get; set; }
@@ -48,7 +46,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal List<ExtraData> Extra { get; set; }
public string Password { get; set; }
public string? Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
@@ -65,7 +63,7 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
/// <summary>
/// The last modified date as read from the Local or Central Directory header.
@@ -119,7 +117,7 @@ internal abstract class ZipFileEntry : ZipHeader
}
}
internal ZipFilePart Part { get; set; }
internal ZipFilePart? Part { get; set; }
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
}

View File

@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
internal byte[] StringToByteArray(string value)
{
var a = _archiveEncoding.Password.GetBytes(value);
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
return a;
}

View File

@@ -42,16 +42,16 @@ internal class SeekableZipFilePart : ZipFilePart
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition!.Value;
BaseStream.Position = Header.DataStartPosition.NotNull();
if (
(Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0)
&& _directoryEntryHeader.HasData
&& (_directoryEntryHeader.CompressedSize != 0)
)
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;

View File

@@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream) { }
protected override Stream CreateBaseStream() => Header.PackedStream;
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
internal override Stream GetCompressedStream()
{

View File

@@ -42,6 +42,10 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
@@ -10,12 +8,7 @@ internal class WinzipAesEncryptionData
{
private const int RFC2898_ITERATIONS = 1000;
private readonly byte[] _salt;
private readonly WinzipAesKeySize _keySize;
private readonly byte[] _passwordVerifyValue;
private readonly string _password;
private byte[] _generatedVerifyValue;
internal WinzipAesEncryptionData(
WinzipAesKeySize keySize,
@@ -25,10 +18,28 @@ internal class WinzipAesEncryptionData
)
{
_keySize = keySize;
_salt = salt;
_passwordVerifyValue = passwordVerifyValue;
_password = password;
Initialize();
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
password,
salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
}
internal byte[] IvBytes { get; set; }
@@ -45,32 +56,4 @@ internal class WinzipAesEncryptionData
WinzipAesKeySize.KeySize256 => 32,
_ => throw new InvalidOperationException(),
};
private void Initialize()
{
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
_password,
_salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
var generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
}
}
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using SharpCompress.Common.Zip.Headers;
@@ -8,22 +6,23 @@ namespace SharpCompress.Common.Zip;
public class ZipEntry : Entry
{
private readonly ZipFilePart _filePart;
private readonly ZipFilePart? _filePart;
internal ZipEntry(ZipFilePart filePart)
internal ZipEntry(ZipFilePart? filePart)
{
if (filePart != null)
if (filePart == null)
{
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
return;
}
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
}
public override CompressionType CompressionType =>
_filePart.Header.CompressionMethod switch
_filePart?.Header.CompressionMethod switch
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
@@ -35,15 +34,15 @@ public class ZipEntry : Entry
_ => CompressionType.Unknown
};
public override long Crc => _filePart.Header.Crc;
public override long Crc => _filePart?.Header.Crc ?? 0;
public override string Key => _filePart.Header.Name;
public override string? Key => _filePart?.Header.Name;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart.Header.CompressedSize;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override long Size => _filePart.Header.UncompressedSize;
public override long Size => _filePart?.Header.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime { get; }
@@ -54,11 +53,11 @@ public class ZipEntry : Entry
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted =>
FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted);
FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted);
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -29,7 +29,7 @@ internal abstract class ZipFilePart : FilePart
internal Stream BaseStream { get; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName => Header.Name;
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{

View File

@@ -55,7 +55,13 @@ internal class ZipHeaderFactory
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
if (
FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
&& _lastEntryHeader != null
)
{
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64

View File

@@ -69,7 +69,7 @@ public sealed class BZip2Stream : Stream
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NETFRAMEWORK&& !NETSTANDARD2_0
public override int Read(Span<byte> buffer) => stream.Read(buffer);

View File

@@ -0,0 +1,63 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterARM64 : Filter
{
private int _pos;
public BCJFilterARM64(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
protected override int Transform(byte[] buffer, int offset, int count)
{
var end = offset + count - 4;
int i;
for (i = offset; i <= end; i += 4)
{
uint pc = (uint)(_pos + i - offset);
uint instr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i, 4)
);
if ((instr >> 26) == 0x25)
{
uint src = instr;
instr = 0x94000000;
pc >>= 2;
if (!_isEncoder)
pc = 0U - pc;
instr |= (src + pc) & 0x03FFFFFF;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
else if ((instr & 0x9F000000) == 0x90000000)
{
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
if (((src + 0x00020000) & 0x001C0000) != 0)
continue;
instr &= 0x9000001F;
pc >>= 12;
if (!_isEncoder)
pc = 0U - pc;
uint dest = src + pc;
instr |= (dest & 3) << 29;
instr |= (dest & 0x0003FFFC) << 3;
instr |= (0U - (dest & 0x00020000)) & 0x00E00000;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
}
i -= offset;
_pos += i;
return i;
}
}

View File

@@ -0,0 +1,210 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterRISCV : Filter
{
private int _pos;
public BCJFilterRISCV(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
private int Decode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr = ((b1 & 0xF0) << 13) | (b2 << 9) | (b3 << 1);
addr -= pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 8) & 0xF0));
buffer[i + 2] = (byte)(
((addr >> 16) & 0x0F) | ((addr >> 7) & 0x10) | ((addr << 4) & 0xE0)
);
buffer[i + 3] = (byte)(((addr >> 4) & 0x7F) | ((addr >> 13) & 0x80));
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
uint inst2 = 0;
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += inst2 >> 20;
inst = 0x17 | (2 << 7) | (inst2 << 12);
inst2 = addr;
}
else
{
uint inst2_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((inst2_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint addr = BinaryPrimitives.ReadUInt32BigEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
addr -= (uint)(_pos + i);
inst2 = (inst >> 12) | (addr << 20);
inst = 0x17 | (inst2_rs1 << 7) | ((addr + 0x800) & 0xFFFFF000);
}
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i + 4, 4), inst2);
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
private int Encode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr =
((b1 & 0xF0) << 8)
| ((b2 & 0x0F) << 16)
| ((b2 & 0x10) << 7)
| ((b2 & 0xE0) >> 4)
| ((b3 & 0x7F) << 4)
| ((b3 & 0x80) << 13);
addr += pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 13) & 0xF0));
buffer[i + 2] = (byte)(addr >> 9);
buffer[i + 3] = (byte)(addr >> 1);
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
uint inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += (inst2 >> 20) - ((inst2 >> 19) & 0x1000);
addr += (uint)(_pos + i);
inst = 0x17 | (2 << 7) | (inst2 << 12);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32BigEndian(new Span<byte>(buffer, i + 4, 4), addr);
}
else
{
uint fake_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((fake_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint fake_addr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
uint fake_inst2 = (inst >> 12) | (fake_addr << 20);
inst = 0x17 | (fake_rs1 << 7) | (fake_addr & 0xFFFFF000);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(
new Span<byte>(buffer, i + 4, 4),
fake_inst2
);
}
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
protected override int Transform(byte[] buffer, int offset, int count)
{
if (_isEncoder)
{
return Encode(buffer, offset, count);
}
else
{
return Decode(buffer, offset, count);
}
}
}

View File

@@ -20,7 +20,8 @@ internal sealed class AesDecoderStream : DecoderStream2
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
if (pass.CryptoGetTextPassword() == null)
var password = pass.CryptoGetTextPassword();
if (password == null)
{
throw new SharpCompress.Common.CryptographicException(
"Encrypted 7Zip archive has no password specified."
@@ -37,8 +38,8 @@ internal sealed class AesDecoderStream : DecoderStream2
Init(info, out var numCyclesPower, out var salt, out var seed);
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
var key = InitKey(numCyclesPower, salt, password);
var passwordBytes = Encoding.Unicode.GetBytes(password);
var key = InitKey(numCyclesPower, salt, passwordBytes);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
@@ -207,28 +208,6 @@ internal sealed class AesDecoderStream : DecoderStream2
}
else
{
#if NETSTANDARD2_0
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
sha.AppendData(pass, 0, pass.Length);
sha.AppendData(counter, 0, 8);
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
return sha.GetHashAndReset();
#else
using var sha = SHA256.Create();
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
@@ -251,7 +230,6 @@ internal sealed class AesDecoderStream : DecoderStream2
sha.TransformFinalBlock(counter, 0, 0);
return sha.Hash;
#endif
}
}

View File

@@ -63,18 +63,18 @@ public sealed class LZipStream : Stream
var crc32Stream = (Crc32Stream)_stream;
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream!.Count;
var compressedCount = _countingWritableSubStream.NotNull().Count;
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
_countingWritableSubStream?.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf);
_countingWritableSubStream?.Write(intBuf);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf);
_countingWritableSubStream?.Write(intBuf);
}
_finished = true;
}

View File

@@ -25,6 +25,8 @@ internal static class DecoderRegistry
private const uint K_ARM = 0x03030501;
private const uint K_ARMT = 0x03030701;
private const uint K_SPARC = 0x03030805;
private const uint K_ARM64 = 0x0A;
private const uint K_RISCV = 0x0B;
private const uint K_DEFLATE = 0x040108;
private const uint K_B_ZIP2 = 0x040202;
private const uint K_ZSTD = 0x4F71101;
@@ -66,6 +68,10 @@ internal static class DecoderRegistry
return new BCJFilterARMT(false, inStreams.Single());
case K_SPARC:
return new BCJFilterSPARC(false, inStreams.Single());
case K_ARM64:
return new BCJFilterARM64(false, inStreams.Single());
case K_RISCV:
return new BCJFilterRISCV(false, inStreams.Single());
case K_B_ZIP2:
return new BZip2Stream(inStreams.Single(), CompressionMode.Decompress, true);
case K_PPMD:

View File

@@ -2,5 +2,5 @@ namespace SharpCompress.Compressors.LZMA.Utilites;
internal interface IPasswordProvider
{
string CryptoGetTextPassword();
string? CryptoGetTextPassword();
}

View File

@@ -530,7 +530,6 @@ internal partial class Unpack
{
case FILTER_E8:
case FILTER_E8E9:
{
var FileOffset = (uint)WrittenFileSize;
@@ -569,7 +568,6 @@ internal partial class Unpack
}
return SrcData;
case FILTER_ARM:
{
var FileOffset = (uint)WrittenFileSize;
// DataSize is unsigned, so we use "CurPos+3" and not "DataSize-3"

View File

@@ -228,7 +228,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMP:
{
var value1 = (VMFlags)GetValue(cmd.IsByteMode, Mem, op1);
var result = value1 - GetValue(cmd.IsByteMode, Mem, op2);
@@ -247,7 +246,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMPB:
{
var value1 = (VMFlags)GetValue(true, Mem, op1);
var result = value1 - GetValue(true, Mem, op2);
@@ -265,7 +263,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMPD:
{
var value1 = (VMFlags)GetValue(false, Mem, op1);
var result = value1 - GetValue(false, Mem, op2);
@@ -283,7 +280,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_ADD:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var result = (int)(
@@ -351,7 +347,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SUB:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var result = (int)(
@@ -411,7 +406,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_INC:
{
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFffL + 1L));
if (cmd.IsByteMode)
@@ -440,7 +434,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_DEC:
{
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFff - 1));
SetValue(cmd.IsByteMode, Mem, op1, result);
@@ -463,7 +456,6 @@ internal sealed class RarVM : BitInput
continue;
case VMCommands.VM_XOR:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) ^ GetValue(cmd.IsByteMode, Mem, op2);
@@ -475,7 +467,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_AND:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
@@ -487,7 +478,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_OR:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) | GetValue(cmd.IsByteMode, Mem, op2);
@@ -499,7 +489,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_TEST:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
@@ -578,7 +567,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SHL:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -596,7 +584,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SHR:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -610,7 +597,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SAR:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -624,7 +610,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_NEG:
{
var result = -GetValue(cmd.IsByteMode, Mem, op1);
flags = (VMFlags)(
@@ -645,7 +630,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_PUSHA:
{
for (int i = 0, SP = R[7] - 4; i < regCount; i++, SP -= 4)
{
@@ -656,7 +640,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_POPA:
{
for (int i = 0, SP = R[7]; i < regCount; i++, SP += 4)
{
@@ -684,7 +667,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_XCHG:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
SetValue(cmd.IsByteMode, Mem, op1, GetValue(cmd.IsByteMode, Mem, op2));
@@ -693,7 +675,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_MUL:
{
var result = (int)(
(
@@ -707,7 +688,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_DIV:
{
var divider = GetValue(cmd.IsByteMode, Mem, op2);
if (divider != 0)
@@ -719,7 +699,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_ADC:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var FC = (int)(flags & VMFlags.VM_FC);
@@ -749,7 +728,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SBB:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var FC = (int)(flags & VMFlags.VM_FC);
@@ -1156,7 +1134,6 @@ internal sealed class RarVM : BitInput
{
case VMStandardFilters.VMSF_E8:
case VMStandardFilters.VMSF_E8E9:
{
var dataSize = R[4];
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
@@ -1211,7 +1188,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_ITANIUM:
{
var dataSize = R[4];
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
@@ -1269,7 +1245,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_DELTA:
{
var dataSize = R[4] & unchecked((int)0xFFffFFff);
var channels = R[0] & unchecked((int)0xFFffFFff);
@@ -1300,7 +1275,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_RGB:
{
// byte *SrcData=Mem,*DestData=SrcData+DataSize;
int dataSize = R[4],
@@ -1366,7 +1340,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_AUDIO:
{
int dataSize = R[4],
channels = R[0];
@@ -1497,7 +1470,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_UPCASE:
{
int dataSize = R[4],
srcPos = 0,

View File

@@ -11,8 +11,8 @@ public class SourceStream : Stream
private long _prevSize;
private readonly List<FileInfo> _files;
private readonly List<Stream> _streams;
private readonly Func<int, FileInfo?> _getFilePart;
private readonly Func<int, Stream?> _getStreamPart;
private readonly Func<int, FileInfo?>? _getFilePart;
private readonly Func<int, Stream?>? _getStreamPart;
private int _stream;
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
@@ -38,8 +38,8 @@ public class SourceStream : Stream
if (!IsFileMode)
{
_streams.Add(stream!);
_getStreamPart = getStreamPart!;
_getFilePart = _ => null!;
_getStreamPart = getStreamPart;
_getFilePart = _ => null;
if (stream is FileStream fileStream)
{
_files.Add(new FileInfo(fileStream.Name));
@@ -49,8 +49,8 @@ public class SourceStream : Stream
{
_files.Add(file!);
_streams.Add(_files[0].OpenRead());
_getFilePart = getFilePart!;
_getStreamPart = _ => null!;
_getFilePart = getFilePart;
_getStreamPart = _ => null;
}
_stream = 0;
_prevSize = 0;
@@ -78,7 +78,7 @@ public class SourceStream : Stream
{
if (IsFileMode)
{
var f = _getFilePart(_streams.Count);
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
if (f == null)
{
_stream = _streams.Count - 1;
@@ -90,7 +90,7 @@ public class SourceStream : Stream
}
else
{
var s = _getStreamPart(_streams.Count);
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
if (s == null)
{
_stream = _streams.Count - 1;

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
namespace SharpCompress;
public static class NotNullExtensions
{
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) =>
source ?? Enumerable.Empty<T>();
public static IEnumerable<T> Empty<T>(this T? source)
{
if (source is null)
{
return Enumerable.Empty<T>();
}
return source.AsEnumerable();
}
#if NETFRAMEWORK || NETSTANDARD
public static T NotNull<T>(this T? obj, string? message = null)
where T : class
{
if (obj is null)
{
throw new ArgumentNullException(message ?? "Value is null");
}
return obj;
}
public static T NotNull<T>(this T? obj, string? message = null)
where T : struct
{
if (obj is null)
{
throw new ArgumentNullException(message ?? "Value is null");
}
return obj.Value;
}
#else
public static T NotNull<T>(
[NotNull] this T? obj,
[CallerArgumentExpression(nameof(obj))] string? paramName = null
)
where T : class
{
ArgumentNullException.ThrowIfNull(obj, paramName);
return obj;
}
public static T NotNull<T>(
[NotNull] this T? obj,
[CallerArgumentExpression(nameof(obj))] string? paramName = null
)
where T : struct
{
ArgumentNullException.ThrowIfNull(obj, paramName);
return obj.Value;
}
#endif
}

View File

@@ -13,9 +13,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
where TEntry : Entry
where TVolume : Volume
{
private bool completed;
private IEnumerator<TEntry>? entriesForCurrentReadStream;
private bool wroteCurrentEntry;
private bool _completed;
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private bool _wroteCurrentEntry;
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
@@ -35,18 +35,18 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// <summary>
/// Current volume that the current entry resides in
/// </summary>
public abstract TVolume Volume { get; }
public abstract TVolume? Volume { get; }
/// <summary>
/// Current file entry
/// </summary>
public TEntry Entry => entriesForCurrentReadStream!.Current;
public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current;
#region IDisposable Members
public void Dispose()
{
entriesForCurrentReadStream?.Dispose();
_entriesForCurrentReadStream?.Dispose();
Volume?.Dispose();
}
@@ -61,7 +61,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// </summary>
public void Cancel()
{
if (!completed)
if (!_completed)
{
Cancelled = true;
}
@@ -69,7 +69,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public bool MoveToNextEntry()
{
if (completed)
if (_completed)
{
return false;
}
@@ -77,27 +77,27 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
{
throw new ReaderCancelledException("Reader has been cancelled.");
}
if (entriesForCurrentReadStream is null)
if (_entriesForCurrentReadStream is null)
{
return LoadStreamForReading(RequestInitialStream());
}
if (!wroteCurrentEntry)
if (!_wroteCurrentEntry)
{
SkipEntry();
}
wroteCurrentEntry = false;
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
{
return true;
}
completed = true;
_completed = true;
return false;
}
protected bool LoadStreamForReading(Stream stream)
{
entriesForCurrentReadStream?.Dispose();
if ((stream is null) || (!stream.CanRead))
_entriesForCurrentReadStream?.Dispose();
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException(
"File is split into multiple archives: '"
@@ -105,13 +105,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
+ "'. A new readable stream is required. Use Cancel if it was intended."
);
}
entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
return entriesForCurrentReadStream.MoveNext();
_entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
return _entriesForCurrentReadStream.MoveNext();
}
protected virtual Stream RequestInitialStream() => Volume.Stream;
protected virtual Stream RequestInitialStream() =>
Volume.NotNull("Volume isn't loaded.").Stream;
internal virtual bool NextEntryForCurrentStream() => entriesForCurrentReadStream!.MoveNext();
internal virtual bool NextEntryForCurrentStream() =>
_entriesForCurrentReadStream.NotNull().MoveNext();
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
@@ -149,7 +151,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public void WriteEntryTo(Stream writableStream)
{
if (wroteCurrentEntry)
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
@@ -166,7 +168,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
Write(writableStream);
wroteCurrentEntry = true;
_wroteCurrentEntry = true;
}
internal void Write(Stream writeStream)
@@ -178,12 +180,12 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public EntryStream OpenEntryStream()
{
if (wroteCurrentEntry)
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = GetEntryStream();
wroteCurrentEntry = true;
_wroteCurrentEntry = true;
return stream;
}

View File

@@ -7,8 +7,8 @@ namespace SharpCompress.Readers.GZip;
public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
{
internal GZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options);
private GZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0);
public override GZipVolume Volume { get; }

View File

@@ -17,7 +17,7 @@ internal class MultiVolumeRarReader : RarReader
internal MultiVolumeRarReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
internal override void ValidateArchive(RarVolume archive) { }
protected override void ValidateArchive(RarVolume archive) { }
protected override Stream RequestInitialStream()
{

View File

@@ -14,16 +14,16 @@ namespace SharpCompress.Readers.Rar;
public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
{
private RarVolume? volume;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
private Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
private Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
internal RarReader(ReaderOptions options)
: base(options, ArchiveType.Rar) { }
internal abstract void ValidateArchive(RarVolume archive);
protected abstract void ValidateArchive(RarVolume archive);
public override RarVolume Volume => volume!;
public override RarVolume? Volume => volume;
/// <summary>
/// Opens a RarReader for Non-seeking usage with a single volume
@@ -51,7 +51,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
protected override IEnumerable<RarReaderEntry> GetEntries(Stream stream)
{
volume = new RarReaderVolume(stream, Options);
volume = new RarReaderVolume(stream, Options, 0);
foreach (var fp in volume.ReadFileParts())
{
ValidateArchive(volume);

View File

@@ -8,7 +8,7 @@ namespace SharpCompress.Readers.Rar;
public class RarReaderVolume : RarVolume
{
internal RarReaderVolume(Stream stream, ReaderOptions options, int index = 0)
internal RarReaderVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Streaming, stream, options, index) { }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>

View File

@@ -11,7 +11,7 @@ internal class SingleVolumeRarReader : RarReader
internal SingleVolumeRarReader(Stream stream, ReaderOptions options)
: base(options) => this.stream = stream;
internal override void ValidateArchive(RarVolume archive)
protected override void ValidateArchive(RarVolume archive)
{
if (archive.IsMultiVolume)
{

View File

@@ -69,7 +69,6 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
switch (h.ZipHeaderType)
{
case ZipHeaderType.LocalEntry:
{
yield return new ZipEntry(
new StreamingZipFilePart((LocalEntryHeader)h, stream)

View File

@@ -2,11 +2,11 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.36.0</VersionPrefix>
<AssemblyVersion>0.36.0</AssemblyVersion>
<FileVersion>0.36.0</FileVersion>
<VersionPrefix>0.37.2</VersionPrefix>
<AssemblyVersion>0.37.2</AssemblyVersion>
<FileVersion>0.37.2</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net7.0;net8.0</TargetFrameworks>
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -17,33 +17,37 @@
<Copyright>Copyright (c) 2014 Adam Hathcock</Copyright>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 2.0/2.1/NET 6.0/NET 7.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET Standard 2.0/NET Standard 2.1/NET 6.0/NET 8.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<IncludeSymbols>true</IncludeSymbols>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<IsTrimmable>true</IsTrimmable>
<LangVersion>latest</LangVersion>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PackageReadmeFile>README.md</PackageReadmeFile>
</PropertyGroup>
<ItemGroup>
<Compile Remove="Compressors\Lzw\LzwException.cs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" PrivateAssets="All" />
<PackageReference Include="ZstdSharp.Port" Version="0.7.4" />
</ItemGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
<IsTrimmable>true</IsTrimmable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="ZstdSharp.Port" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Memory" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' == 'NETFRAMEWORK' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Memory" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\README.md" Pack="true" PackagePath="\" />

View File

@@ -2,8 +2,6 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -279,8 +277,42 @@ public static class Utility
long total = 0;
while (ReadTransferBlock(source, array, out var count))
{
total += count;
destination.Write(array, 0, count);
total += count;
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static long TransferTo(this Stream source, Stream destination, long maxLength)
{
var array = GetTransferByteArray();
var maxReadSize = array.Length;
try
{
long total = 0;
var remaining = maxLength;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
while (ReadTransferBlock(source, array, maxReadSize, out var count))
{
destination.Write(array, 0, count);
total += count;
if (remaining - count < 0)
{
break;
}
remaining -= count;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
}
return total;
}
@@ -320,6 +352,16 @@ public static class Utility
private static bool ReadTransferBlock(Stream source, byte[] array, out int count) =>
(count = source.Read(array, 0, array.Length)) != 0;
private static bool ReadTransferBlock(Stream source, byte[] array, int size, out int count)
{
if (size > array.Length)
{
size = array.Length;
}
count = source.Read(array, 0, size);
return count != 0;
}
private static byte[] GetTransferByteArray() => ArrayPool<byte>.Shared.Rent(81920);
public static bool ReadFully(this Stream stream, byte[] buffer)
@@ -392,10 +434,4 @@ public static class Utility
buffer[offset + 2] = (byte)(number >> 8);
buffer[offset + 3] = (byte)number;
}
public static async ValueTask WriteAsync(
this Stream stream,
byte[] bytes,
CancellationToken cancellationToken
) => await stream.WriteAsync(bytes, 0, bytes.Length, cancellationToken).ConfigureAwait(false);
}

View File

@@ -1,42 +1,26 @@
#nullable disable
using System;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
public abstract class AbstractWriter : IWriter
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions) : IWriter
{
private bool _isDisposed;
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
{
WriterType = type;
WriterOptions = writerOptions;
}
//always initializes the stream
protected void InitalizeStream(Stream stream) => OutputStream = stream;
protected void InitializeStream(Stream stream) => OutputStream = stream;
protected Stream OutputStream { get; private set; }
#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public ArchiveType WriterType { get; }
public ArchiveType WriterType { get; } = type;
protected WriterOptions WriterOptions { get; }
protected WriterOptions WriterOptions { get; } = writerOptions;
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
#if !NETFRAMEWORK && !NETSTANDARD2_0
public abstract ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
);
public abstract ValueTask DisposeAsync();
#endif
protected virtual void Dispose(bool isDisposing)
{

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -20,7 +18,7 @@ public sealed class GZipWriter : AbstractWriter
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(
InitializeStream(
new GZipStream(
destination,
CompressionMode.Compress,
@@ -52,15 +50,4 @@ public sealed class GZipWriter : AbstractWriter
source.TransferTo(stream);
_wroteToStream = true;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask DisposeAsync() => throw new NotImplementedException();
public override ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) => throw new NotImplementedException();
#endif
}

View File

@@ -1,24 +1,11 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
public interface IWriter : IDisposable
#if !NETFRAMEWORK && !NETSTANDARD2_0
, IAsyncDisposable
#endif
{
ArchiveType WriterType { get; }
void Write(string filename, Stream source, DateTime? modificationTime);
#if !NETFRAMEWORK && !NETSTANDARD2_0
ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
);
#endif
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
@@ -34,19 +32,16 @@ public class TarWriter : AbstractWriter
case CompressionType.None:
break;
case CompressionType.BZip2:
{
destination = new BZip2Stream(destination, CompressionMode.Compress, false);
}
break;
case CompressionType.GZip:
{
destination = new GZipStream(destination, CompressionMode.Compress);
}
break;
case CompressionType.LZip:
{
destination = new LZipStream(destination, CompressionMode.Compress);
}
@@ -58,7 +53,7 @@ public class TarWriter : AbstractWriter
);
}
}
InitalizeStream(destination);
InitializeStream(destination);
}
public override void Write(string filename, Stream source, DateTime? modificationTime) =>
@@ -92,8 +87,7 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
size = source.TransferTo(OutputStream);
size = source.TransferTo(OutputStream, realSize);
PadTo512(size.Value);
}
@@ -128,15 +122,4 @@ public class TarWriter : AbstractWriter
}
base.Dispose(isDisposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask DisposeAsync() => throw new NotImplementedException();
public override ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) => throw new NotImplementedException();
#endif
}

View File

@@ -1,560 +0,0 @@
#if NETFRAMEWORK || NETSTANDARD2_0
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Writers.Zip;
public class ZipWriter : AbstractWriter
{
private readonly CompressionType compressionType;
private readonly CompressionLevel compressionLevel;
private readonly List<ZipCentralDirectoryEntry> entries = new();
private readonly string zipComment;
private long streamPosition;
private PpmdProperties? ppmdProps;
private readonly bool isZip64;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
if (destination.CanSeek)
{
streamPosition = destination.Position;
}
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
if (WriterOptions.LeaveStreamOpen)
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(destination);
}
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
{
ulong size = 0;
foreach (var entry in entries)
{
size += entry.Write(OutputStream);
}
WriteEndRecord(size);
}
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
compressionType switch
{
CompressionType.None => ZipCompressionMethod.None,
CompressionType.Deflate => ZipCompressionMethod.Deflate,
CompressionType.BZip2 => ZipCompressionMethod.BZip2,
CompressionType.LZMA => ZipCompressionMethod.LZMA,
CompressionType.PPMd => ZipCompressionMethod.PPMd,
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
};
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
Write(
entryPath,
source,
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime }
);
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.TransferTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime ??= DateTime.Now;
options.EntryComment ??= string.Empty;
var entry = new ZipCentralDirectoryEntry(
compression,
entryPath,
(ulong)streamPosition,
WriterOptions.ArchiveEncoding
)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
if (options.EnableZip64.HasValue)
{
useZip64 = options.EnableZip64.Value;
}
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
);
}
private string NormalizeFilename(string filename)
{
filename = filename.Replace('\\', '/');
var pos = filename.IndexOf(':');
if (pos >= 0)
{
filename = filename.Remove(0, pos + 1);
}
return filename.Trim('/');
}
private int WriteHeader(
string filename,
ZipWriterEntryOptions zipWriterEntryOptions,
ZipCentralDirectoryEntry entry,
bool useZip64
)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
{
throw new NotSupportedException(
"Zip64 extensions are not supported on non-seekable streams"
);
}
var explicitZipCompressionInfo = ToZipCompressionMethod(
zipWriterEntryOptions.CompressionType ?? compressionType
);
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
OutputStream.Write(intBuf);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
{
OutputStream.Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64
}
else
{
OutputStream.Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible
}
}
else
{
OutputStream.Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA
}
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs
: 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
OutputStream.Write(intBuf.Slice(0, 2)); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
);
OutputStream.Write(intBuf);
// zipping date and time
OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
OutputStream.Write(intBuf.Slice(0, 2)); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
{
extralength = 2 + 2 + 8 + 8;
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
OutputStream.Write(intBuf.Slice(0, 2)); // extra length
OutputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (extralength != 0)
{
OutputStream.Write(new byte[extralength], 0, extralength); // reserve space for zip64 data
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
}
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
OutputStream.Write(intBuf);
}
private void WriteEndRecord(ulong size)
{
var zip64EndOfCentralDirectoryNeeded =
entries.Count > ushort.MaxValue
|| streamPosition >= uint.MaxValue
|| size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue =
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
Span<byte> intBuf = stackalloc byte[8];
if (zip64EndOfCentralDirectoryNeeded)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 });
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
OutputStream.Write(intBuf); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf.Slice(0, 2)); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf.Slice(0, 2)); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf.Slice(0, 4)); // Disk number
OutputStream.Write(intBuf.Slice(0, 4)); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
OutputStream.Write(intBuf); // Entries in this disk
OutputStream.Write(intBuf); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
OutputStream.Write(intBuf); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
OutputStream.Write(intBuf); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
OutputStream.Write(intBuf); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
OutputStream.Write(intBuf.Slice(0, 4)); // Number of disks
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
}
// Write normal end of central directory record
OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 });
BinaryPrimitives.WriteUInt16LittleEndian(
intBuf,
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
OutputStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
OutputStream.Write(intBuf.Slice(0, 4));
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
#region Nested type: ZipWritingStream
internal class ZipWritingStream : Stream
{
private readonly CRC32 crc = new();
private readonly ZipCentralDirectoryEntry entry;
private readonly Stream originalStream;
private readonly Stream writeStream;
private readonly ZipWriter writer;
private readonly ZipCompressionMethod zipCompressionMethod;
private readonly CompressionLevel compressionLevel;
private CountingWritableSubStream? counting;
private ulong decompressed;
// Flag to prevent throwing exceptions on Dispose
private bool _limitsExceeded;
private bool isDisposed;
internal ZipWritingStream(
ZipWriter writer,
Stream originalStream,
ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod,
CompressionLevel compressionLevel
)
{
this.writer = writer;
this.originalStream = originalStream;
this.entry = entry;
this.zipCompressionMethod = zipCompressionMethod;
this.compressionLevel = compressionLevel;
writeStream = GetWriteStream(originalStream);
}
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
private Stream GetWriteStream(Stream writeStream)
{
counting = new CountingWritableSubStream(writeStream);
Stream output = counting;
switch (zipCompressionMethod)
{
case ZipCompressionMethod.None:
{
return output;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(counting, CompressionMode.Compress, false);
}
case ZipCompressionMethod.LZMA:
{
counting.WriteByte(9);
counting.WriteByte(20);
counting.WriteByte(5);
counting.WriteByte(0);
var lzmaStream = new LzmaStream(
new LzmaEncoderProperties(!originalStream.CanSeek),
false,
counting
);
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
return lzmaStream;
}
case ZipCompressionMethod.PPMd:
{
counting.Write(writer.PpmdProperties.Properties, 0, 2);
return new PpmdStream(writer.PpmdProperties, counting, true);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
}
}
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
if (disposing)
{
writeStream.Dispose();
if (_limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
originalStream.Dispose();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Decompressed = decompressed;
var zip64 =
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(
entry.HeaderOffset + entry.Zip64HeaderOffset
);
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
originalStream.Write(intBuf);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
originalStream.Write(intBuf);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
{
throw new NotSupportedException(
"Streams larger than 4GiB are not supported for non-seekable streams"
);
}
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
ZipHeaderFactory.POST_DATA_DESCRIPTOR
);
originalStream.Write(intBuf);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
}
writer.entries.Add(entry);
}
}
public override void Flush() => writeStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (
_limitsExceeded
|| ((decompressed + (uint)count) > uint.MaxValue)
|| (counting!.Count + (uint)count) > uint.MaxValue
)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
}
decompressed += (uint)count;
crc.SlurpBlock(buffer, offset, count);
writeStream.Write(buffer, offset, count);
if (entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
_limitsExceeded = true;
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
}
}
}
#endregion Nested type: ZipWritingStream
}
#endif

View File

@@ -1,12 +1,8 @@
#if !NETFRAMEWORK && !NETSTANDARD2_0
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
@@ -21,8 +17,6 @@ namespace SharpCompress.Writers.Zip;
public class ZipWriter : AbstractWriter
{
private static readonly byte[] ZIP64eND_OFdIRECTORY = [80, 75, 6, 6];
private static readonly byte[] END_OFdIRECTORY = [80, 75, 6, 7];
private readonly CompressionType compressionType;
private readonly CompressionLevel compressionLevel;
private readonly List<ZipCentralDirectoryEntry> entries = new();
@@ -30,7 +24,6 @@ public class ZipWriter : AbstractWriter
private long streamPosition;
private PpmdProperties? ppmdProps;
private readonly bool isZip64;
private bool isDisposed;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
@@ -49,29 +42,14 @@ public class ZipWriter : AbstractWriter
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(destination);
InitializeStream(destination);
}
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
ulong size = 0;
foreach (var entry in entries)
{
size += entry.Write(OutputStream);
}
await WriteEndRecordAsync(size, CancellationToken.None).ConfigureAwait(false);
isDisposed = true;
}
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
if (isDisposing && OutputStream is not null)
{
ulong size = 0;
foreach (var entry in entries)
@@ -83,9 +61,8 @@ public class ZipWriter : AbstractWriter
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType)
{
return compressionType switch
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
compressionType switch
{
CompressionType.None => ZipCompressionMethod.None,
CompressionType.Deflate => ZipCompressionMethod.Deflate,
@@ -94,7 +71,6 @@ public class ZipWriter : AbstractWriter
CompressionType.PPMd => ZipCompressionMethod.PPMd,
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
};
}
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
Write(
@@ -109,34 +85,6 @@ public class ZipWriter : AbstractWriter
source.TransferTo(output);
}
public override async ValueTask WriteAsync(
string entryPath,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) =>
await WriteAsync(
entryPath,
source,
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime },
cancellationToken
);
public async ValueTask WriteAsync(
string entryPath,
Stream source,
ZipWriterEntryOptions zipWriterEntryOptions,
CancellationToken cancellationToken
)
{
await using var output = await WriteToStreamAsync(
entryPath,
zipWriterEntryOptions,
cancellationToken
);
await source.CopyToAsync(output, cancellationToken);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
@@ -166,49 +114,7 @@ public class ZipWriter : AbstractWriter
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
);
}
public async ValueTask<Stream> WriteToStreamAsync(
string entryPath,
ZipWriterEntryOptions options,
CancellationToken cancellationToken
)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime ??= DateTime.Now;
options.EntryComment ??= string.Empty;
var entry = new ZipCentralDirectoryEntry(
compression,
entryPath,
(ulong)streamPosition,
WriterOptions.ArchiveEncoding
)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
if (options.EnableZip64.HasValue)
{
useZip64 = options.EnableZip64.Value;
}
var headersize = (uint)
await WriteHeaderAsync(entryPath, options, entry, useZip64, cancellationToken)
.ConfigureAwait(false);
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
OutputStream.NotNull(),
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
@@ -315,105 +221,6 @@ public class ZipWriter : AbstractWriter
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private async ValueTask<int> WriteHeaderAsync(
string filename,
ZipWriterEntryOptions zipWriterEntryOptions,
ZipCentralDirectoryEntry entry,
bool useZip64,
CancellationToken cancellationToken
)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
{
throw new NotSupportedException(
"Zip64 extensions are not supported on non-seekable streams"
);
}
var explicitZipCompressionInfo = ToZipCompressionMethod(
zipWriterEntryOptions.CompressionType ?? compressionType
);
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
{
await OutputStream
.WriteAsync([45, 0], 0, 2, cancellationToken)
.ConfigureAwait(false); //smallest allowed version for zip64
}
else
{
await OutputStream
.WriteAsync([20, 0], 0, 2, cancellationToken)
.ConfigureAwait(false); //older version which is more compatible
}
}
else
{
await OutputStream.WriteAsync([63, 0], 0, 2, cancellationToken).ConfigureAwait(false); //version says we used PPMd or LZMA
}
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs
: 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
// zipping date and time
await OutputStream
.WriteAsync([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], cancellationToken)
.ConfigureAwait(false);
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
{
extralength = 2 + 2 + 8 + 8;
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // extra length
await OutputStream.WriteAsync(encodedFilename, cancellationToken).ConfigureAwait(false);
if (extralength != 0)
{
await OutputStream
.WriteAsync(new byte[extralength], cancellationToken)
.ConfigureAwait(false); // reserve space for zip64 data
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
}
ArrayPool<byte>.Shared.Return(intBuf);
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
Span<byte> intBuf = stackalloc byte[4];
@@ -425,23 +232,6 @@ public class ZipWriter : AbstractWriter
OutputStream.Write(intBuf);
}
private async ValueTask WriteFooterAsync(
uint crc,
uint compressed,
uint uncompressed,
CancellationToken cancellationToken
)
{
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
private void WriteEndRecord(ulong size)
{
var zip64EndOfCentralDirectoryNeeded =
@@ -512,82 +302,6 @@ public class ZipWriter : AbstractWriter
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
private async ValueTask WriteEndRecordAsync(ulong size, CancellationToken cancellationToken)
{
var zip64EndOfCentralDirectoryNeeded =
entries.Count > ushort.MaxValue
|| streamPosition >= uint.MaxValue
|| size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue =
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
var intBuf = ArrayPool<byte>.Shared.Rent(8);
if (zip64EndOfCentralDirectoryNeeded)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
await OutputStream
.WriteAsync(ZIP64eND_OFdIRECTORY, cancellationToken)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Disk number
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Entries in this disk
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Number of disks
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
}
// Write normal end of central directory record
OutputStream.Write(END_OFdIRECTORY);
BinaryPrimitives.WriteUInt16LittleEndian(
intBuf,
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
await OutputStream
.WriteAsync(encodedComment, 0, encodedComment.Length, cancellationToken)
.ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
#region Nested type: ZipWritingStream
internal class ZipWritingStream : Stream
@@ -614,6 +328,7 @@ public class ZipWriter : AbstractWriter
CompressionLevel compressionLevel
)
{
this.writer = writer;
this.originalStream = originalStream;
this.writer = writer;
this.entry = entry;
@@ -681,131 +396,6 @@ public class ZipWriter : AbstractWriter
}
}
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
isDisposed = true;
await writeStream.DisposeAsync();
if (limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
await originalStream.DisposeAsync();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Decompressed = decompressed;
var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
await writer.WriteFooterAsync(
entry.Crc,
compressedvalue,
decompressedvalue,
CancellationToken.None
);
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
var intBuf = ArrayPool<byte>.Shared.Rent(8);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
await originalStream
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
await originalStream
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
{
throw new NotSupportedException(
"Streams larger than 4GiB are not supported for non-seekable streams"
);
}
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
ZipHeaderFactory.POST_DATA_DESCRIPTOR
);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
await writer
.WriteFooterAsync(
entry.Crc,
compressedvalue,
decompressedvalue,
CancellationToken.None
)
.ConfigureAwait(false);
writer.streamPosition += (long)entry.Compressed + 16;
ArrayPool<byte>.Shared.Return(intBuf);
}
writer.entries.Add(entry);
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
@@ -829,13 +419,14 @@ public class ZipWriter : AbstractWriter
return;
}
var countingCount = counting?.Count ?? 0;
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Compressed = countingCount;
entry.Decompressed = decompressed;
var zip64 =
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var compressedvalue = zip64 ? uint.MaxValue : (uint)countingCount;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
@@ -843,7 +434,7 @@ public class ZipWriter : AbstractWriter
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
if (countingCount == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
@@ -930,11 +521,12 @@ public class ZipWriter : AbstractWriter
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
var countingCount = counting?.Count ?? 0;
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (
limitsExceeded
|| ((decompressed + (uint)count) > uint.MaxValue)
|| (counting!.Count + (uint)count) > uint.MaxValue
|| (countingCount + (uint)count) > uint.MaxValue
)
{
throw new NotSupportedException(
@@ -949,8 +541,9 @@ public class ZipWriter : AbstractWriter
if (entry.Zip64HeaderOffset == 0)
{
var countingCount = counting?.Count ?? 0;
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
if ((decompressed > uint.MaxValue) || countingCount > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
@@ -967,4 +560,3 @@ public class ZipWriter : AbstractWriter
#endregion Nested type: ZipWritingStream
}
#endif

View File

@@ -0,0 +1,338 @@
{
"version": 2,
"dependencies": {
".NETFramework,Version=v4.6.2": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net462": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Memory": {
"type": "Direct",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net462": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "IzAV30z22ESCeQfxP29oVf4qEo8fBGXLXSU6oacv/9Iqe6PzgHDKCaWfwMBak7bSJQM0F5boXWoZS+kChztRIQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.0": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"NETStandard.Library": {
"type": "Direct",
"requested": "[2.0.3, )",
"resolved": "2.0.3",
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Memory": {
"type": "Direct",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.4.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETCore.Platforms": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.1": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.4.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
"net6.0": {
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
},
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.4, )",
"resolved": "8.0.4",
"contentHash": "PZb5nfQ+U19nhnmnR9T1jw+LTmozhuG2eeuzuW5A7DqxD/UXW2ucjmNJqnqOuh8rdPzM3MQXoF8AfFCedJdCUw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}
}

View File

@@ -240,12 +240,10 @@ public class ArchiveTests : ReaderTests
ReaderOptions? readerOptions = null
)
{
#if !NETFRAMEWORK
if (!OperatingSystem.IsWindows())
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
#endif
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive, readerOptions);

View File

@@ -19,7 +19,7 @@ public class GZipArchiveTests : ArchiveTests
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -41,7 +41,7 @@ public class GZipArchiveTests : ArchiveTests
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -94,6 +94,7 @@ public class GZipArchiveTests : ArchiveTests
using (var entryStream = archiveEntry.OpenEntryStream())
{
var result = TarArchive.IsTarFile(entryStream);
Assert.True(result);
}
Assert.Equal(size, tarStream.Length);
using (var entryStream = archiveEntry.OpenEntryStream())

View File

@@ -0,0 +1,11 @@
using System;
namespace SharpCompress.Test;
public static class OperatingSystemExtensions
{
public static bool IsWindows(this OperatingSystem os) =>
os.Platform == PlatformID.Win32NT
|| os.Platform == PlatformID.Win32Windows
|| os.Platform == PlatformID.Win32S;
}

View File

@@ -209,7 +209,7 @@ public class RarReaderTests : ReaderTests
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var file = Path.GetFileName(reader.Entry.Key).NotNull();
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
@@ -293,7 +293,7 @@ public class RarReaderTests : ReaderTests
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
@@ -316,7 +316,7 @@ public class RarReaderTests : ReaderTests
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(

View File

@@ -77,12 +77,10 @@ public abstract class ReaderTests : TestBase
ReaderOptions? options = null
)
{
#if !NETFRAMEWORK
if (!OperatingSystem.IsWindows())
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
#endif
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);

View File

@@ -162,6 +162,12 @@ public class SevenZipArchiveTests : ArchiveTests
[Fact]
public void SevenZipArchive_SPARC_FileRead() => ArchiveFileRead("7Zip.SPARC.7z");
[Fact]
public void SevenZipArchive_ARM64_FileRead() => ArchiveFileRead("7Zip.ARM64.7z");
[Fact]
public void SevenZipArchive_RISCV_FileRead() => ArchiveFileRead("7Zip.RISCV.7z");
[Fact]
public void SevenZipArchive_Filters_FileRead() => ArchiveFileRead("7Zip.Filters.7z");
@@ -176,7 +182,7 @@ public class SevenZipArchiveTests : ArchiveTests
using (var archive = SevenZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar"));

View File

@@ -8,16 +8,13 @@
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.5">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit" Version="2.6.3" />
<PackageReference Include="Xunit.SkippableFact" Version="1.4.13" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="xunit" />
<PackageReference Include="Xunit.SkippableFact" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' != 'NETFRAMEWORK' ">
<PackageReference Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageReference Include="Mono.Posix.NETStandard" />
</ItemGroup>
</Project>

View File

@@ -195,7 +195,7 @@ public class TarArchiveTests : ArchiveTests
using (var archive = TarArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);

View File

@@ -79,7 +79,7 @@ public class TarReaderTests : ReaderTests
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file);
var destinationFileName = Path.Combine(destdir, file.NotNull());
using var fs = File.OpenWrite(destinationFileName);
entryStream.TransferTo(fs);
@@ -105,7 +105,7 @@ public class TarReaderTests : ReaderTests
{
if (!reader.Entry.IsDirectory)
{
filePaths.Add(reader.Entry.Key);
filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null"));
}
}
}
@@ -135,7 +135,7 @@ public class TarReaderTests : ReaderTests
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
entryStream.SkipEntry();
names.Add(reader.Entry.Key);
names.Add(reader.Entry.Key.NotNull());
}
}
Assert.Equal(3, names.Count);
@@ -183,6 +183,21 @@ public class TarReaderTests : ReaderTests
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
[Fact]
public void Tar_Corrupted()
{
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar");
using Stream stream = File.OpenRead(archiveFullPath);
using var reader = ReaderFactory.Open(stream);
var memoryStream = new MemoryStream();
Assert.True(reader.MoveToNextEntry());
Assert.True(reader.MoveToNextEntry());
reader.WriteEntryTo(memoryStream);
stream.Close();
Assert.Throws<IncompleteArchiveException>(() => reader.MoveToNextEntry());
}
#if !NETFRAMEWORK
[Fact]
public void Tar_GZip_With_Symlink_Entries()
@@ -224,7 +239,7 @@ public class TarReaderTests : ReaderTests
{
if (reader.Entry.LinkTarget != null)
{
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key);
var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull());
var link = new Mono.Unix.UnixSymbolicLinkInfo(path);
if (link.HasContents)
{

View File

@@ -216,8 +216,8 @@ public class TestBase : IDisposable
while (archive1.MoveToNextEntry())
{
Assert.True(archive2.MoveToNextEntry());
archive1Entries.Add(archive1.Entry.Key);
archive2Entries.Add(archive2.Entry.Key);
archive1Entries.Add(archive1.Entry.Key.NotNull());
archive2Entries.Add(archive2.Entry.Key.NotNull());
}
Assert.False(archive2.MoveToNextEntry());
}

View File

@@ -193,7 +193,7 @@ public class ZipArchiveTests : ArchiveTests
using (var archive = ZipArchive.Open(unmodified))
{
var entry = archive.Entries.Single(x =>
x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
@@ -249,16 +249,18 @@ public class ZipArchiveTests : ArchiveTests
var scratchPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
using var vfs = (ZipArchive)ArchiveFactory.Open(scratchPath);
var e = vfs.Entries.First(v => v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase));
var e = vfs.Entries.First(v =>
v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
vfs.RemoveEntry(e);
Assert.Null(
vfs.Entries.FirstOrDefault(v =>
v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
)
);
Assert.Null(
((IArchive)vfs).Entries.FirstOrDefault(v =>
v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
)
);
}
@@ -394,12 +396,12 @@ public class ZipArchiveTests : ArchiveTests
archive.AddAllFromDirectory(SCRATCH_FILES_PATH);
archive.RemoveEntry(
archive.Entries.Single(x =>
x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
)
);
Assert.Null(
archive.Entries.FirstOrDefault(x =>
x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
)
);
}
@@ -646,7 +648,7 @@ public class ZipArchiveTests : ArchiveTests
Assert.Equal(199, len1);
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NETFRAMEWORK
var len2 = 0;
var buffer2 = new byte[firstEntry.Size + 256];

View File

@@ -0,0 +1,368 @@
{
"version": 2,
"dependencies": {
".NETFramework,Version=v4.6.2": {
"FluentAssertions": {
"type": "Direct",
"requested": "[6.12.0, )",
"resolved": "6.12.0",
"contentHash": "ZXhHT2YwP9lajrwSKbLlFqsmCCvFJMoRSK9t7sImfnCyd0OB3MhgxdoMcVqxbq1iyxD6mD2fiackWmBb7ayiXQ==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.0"
}
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.9.0, )",
"resolved": "17.9.0",
"contentHash": "7GUNAUbJYn644jzwLm5BD3a2p9C1dmP8Hr6fDPDxgItQk9hBs1Svdxzz07KQ/UphMSmgza9AbijBJGmw5D658A==",
"dependencies": {
"Microsoft.CodeCoverage": "17.9.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net462": "1.0.3"
}
},
"xunit": {
"type": "Direct",
"requested": "[2.7.1, )",
"resolved": "2.7.1",
"contentHash": "9o050yCLzPvHxbrAkaHnI1j+YfPvRw+/ncvKbTfzIhO4JvQA0rPgoICJxXHMkscfgXmLFwZ8107ehnMUVzE23A==",
"dependencies": {
"xunit.analyzers": "1.12.0",
"xunit.assert": "2.7.1",
"xunit.core": "[2.7.1]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[2.5.8, )",
"resolved": "2.5.8",
"contentHash": "ZJTm71neOfZcUnqdyY0A0Qgcg1162DoOq6+VpCCsOaD9rwCK5alcjOEHeu17sEekzq4qNv3kyelx6lUMsAt/eA==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.9.0"
}
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.4.13, )",
"resolved": "1.4.13",
"contentHash": "IyzZNvJEtXGlXrzxDiSbtH5Lyxf4iJdRQADuyjGdDf00LjXRLJwIoezQNFhFGKTMtvk8IIgaSHxW4mAV4O7b8A==",
"dependencies": {
"Validation": "2.4.18",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "RGD37ZSrratfScYXm7M0HjvxMxZyWZL4jm+XgMZbkIY1UPgjUpbNA/t+WTGj/rC/0Hm9A3IrH3ywbKZkOCnoZA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net462": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "IzAV30z22ESCeQfxP29oVf4qEo8fBGXLXSU6oacv/9Iqe6PzgHDKCaWfwMBak7bSJQM0F5boXWoZS+kChztRIQ=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "1ilw/8vgmjLyKU+2SKXKXaOqpYFJCQfGqGz+x0cosl981VzjrY74Sv6qAJv+neZMZ9ZMxF3ArN6kotaQ4uvEBw==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
}
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Collections.Immutable": {
"type": "Transitive",
"resolved": "1.5.0",
"contentHash": "EXKiDFsChZW0RjrZ4FYHu9aW6+P4MCgEDCklsVseRfhoO0F+dXeMSsMRAlVXIo06kGJ/zv+2w1a2uc2+kxxSaQ=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
},
"System.Reflection.Metadata": {
"type": "Transitive",
"resolved": "1.6.0",
"contentHash": "COC1aiAJjCoA5GBF+QKL2uLqEBew4JsCkQmoHKbN3TlOZKa2fKLz5CpiRQKDz0RsAOEGsVKqOD5bomsXq/4STQ==",
"dependencies": {
"System.Collections.Immutable": "1.5.0"
}
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"Validation": {
"type": "Transitive",
"resolved": "2.4.18",
"contentHash": "NfvWJ1QeuZ1FQCkqgXTu1cOkRkbNCfxs4Tat+abXLwom6OXbULVhRGp34BTvVB4XPxj6VIAl7KfLfStXMt/Ehw=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
"contentHash": "pot1I4YOxlWjIb5jmwvvQNbTrZ3lJQ+jUGkGjWE3hEFM0l5gOnBWS+H3qsex68s5cO52g+44vpGzhAt+42vwKg=="
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.12.0",
"contentHash": "w23LH3aXade2WXKvXi0oA/uV15fpgUMjsPq1x91iQckzgWApgAiijNHmfFQtqNPm41wfrdbRl7nSJRd0yux/dw=="
},
"xunit.assert": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "JqvXS4yX+PtJn5BuqoKkYav7I0g4nXcxRbGTomDwVQjFccOdyfYKpuPOHX/DqrPCcL+MIHrGVdP3bveUXlvdnA=="
},
"xunit.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "hcODgS+RXdjmXq0zQzmRbERQgY+bAGGx1bdH3370t/8CTGmIEU2qAc1dQAjIRpARsacR0cj6LLJDUF5BNQNKTQ==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]",
"xunit.extensibility.execution": "[2.7.1]"
}
},
"xunit.extensibility.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "yLX4XlBFkvNYCzf+DEzlNk45KsSlu9W93IJHBmtUP96qZ9XyRYDFlwMj6BCcOhDKVNrZxSM8bqu4F/Qud4ehxA==",
"dependencies": {
"xunit.abstractions": "2.0.3"
}
},
"xunit.extensibility.execution": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "ei3dKF8agj4UKkJ6KkaZ5/Gcif3if6hBsyzegIQJonZDSKZFvb0AyKtyOhDfggBaXVL5iXZExITdRkfjC95yhw==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]"
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
"System.Memory": "[4.5.5, )",
"System.Text.Encoding.CodePages": "[8.0.0, )",
"ZstdSharp.Port": "[0.8.0, )"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Text.Encoding.CodePages": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
}
}
},
"net8.0": {
"FluentAssertions": {
"type": "Direct",
"requested": "[6.12.0, )",
"resolved": "6.12.0",
"contentHash": "ZXhHT2YwP9lajrwSKbLlFqsmCCvFJMoRSK9t7sImfnCyd0OB3MhgxdoMcVqxbq1iyxD6mD2fiackWmBb7ayiXQ==",
"dependencies": {
"System.Configuration.ConfigurationManager": "4.4.0"
}
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.9.0, )",
"resolved": "17.9.0",
"contentHash": "7GUNAUbJYn644jzwLm5BD3a2p9C1dmP8Hr6fDPDxgItQk9hBs1Svdxzz07KQ/UphMSmgza9AbijBJGmw5D658A==",
"dependencies": {
"Microsoft.CodeCoverage": "17.9.0",
"Microsoft.TestPlatform.TestHost": "17.9.0"
}
},
"Mono.Posix.NETStandard": {
"type": "Direct",
"requested": "[1.0.0, )",
"resolved": "1.0.0",
"contentHash": "vSN/L1uaVwKsiLa95bYu2SGkF0iY3xMblTfxc8alSziPuVfJpj3geVqHGAA75J7cZkMuKpFVikz82Lo6y6LLdA=="
},
"xunit": {
"type": "Direct",
"requested": "[2.7.1, )",
"resolved": "2.7.1",
"contentHash": "9o050yCLzPvHxbrAkaHnI1j+YfPvRw+/ncvKbTfzIhO4JvQA0rPgoICJxXHMkscfgXmLFwZ8107ehnMUVzE23A==",
"dependencies": {
"xunit.analyzers": "1.12.0",
"xunit.assert": "2.7.1",
"xunit.core": "[2.7.1]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[2.5.8, )",
"resolved": "2.5.8",
"contentHash": "ZJTm71neOfZcUnqdyY0A0Qgcg1162DoOq6+VpCCsOaD9rwCK5alcjOEHeu17sEekzq4qNv3kyelx6lUMsAt/eA=="
},
"Xunit.SkippableFact": {
"type": "Direct",
"requested": "[1.4.13, )",
"resolved": "1.4.13",
"contentHash": "IyzZNvJEtXGlXrzxDiSbtH5Lyxf4iJdRQADuyjGdDf00LjXRLJwIoezQNFhFGKTMtvk8IIgaSHxW4mAV4O7b8A==",
"dependencies": {
"Validation": "2.4.18",
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "RGD37ZSrratfScYXm7M0HjvxMxZyWZL4jm+XgMZbkIY1UPgjUpbNA/t+WTGj/rC/0Hm9A3IrH3ywbKZkOCnoZA=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "1ilw/8vgmjLyKU+2SKXKXaOqpYFJCQfGqGz+x0cosl981VzjrY74Sv6qAJv+neZMZ9ZMxF3ArN6kotaQ4uvEBw==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
}
},
"Microsoft.TestPlatform.TestHost": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "Spmg7Wx49Ya3SxBjyeAR+nQpjMTKZwTwpZ7KyeOTIqI/WHNPnBU4HUvl5kuHPQAwGWqMy4FGZja1HvEwvoaDiA==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.9.0",
"Newtonsoft.Json": "13.0.1"
}
},
"Newtonsoft.Json": {
"type": "Transitive",
"resolved": "13.0.1",
"contentHash": "ppPFpBcvxdsfUonNcvITKqLl3bqxWbDCZIzDWHzjpdAHRFfZe0Dw9HmA0+za13IdyrgJwpkDTDA9fHaxOrt20A=="
},
"System.Configuration.ConfigurationManager": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "gWwQv/Ug1qWJmHCmN17nAbxJYmQBM/E94QxKLksvUiiKB1Ld3Sc/eK1lgmbSjDFxkQhVuayI/cGFZhpBSodLrg==",
"dependencies": {
"System.Security.Cryptography.ProtectedData": "4.4.0"
}
},
"System.Reflection.Metadata": {
"type": "Transitive",
"resolved": "1.6.0",
"contentHash": "COC1aiAJjCoA5GBF+QKL2uLqEBew4JsCkQmoHKbN3TlOZKa2fKLz5CpiRQKDz0RsAOEGsVKqOD5bomsXq/4STQ=="
},
"System.Security.Cryptography.ProtectedData": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "cJV7ScGW7EhatRsjehfvvYVBvtiSMKgN8bOVI0bQhnF5bU7vnHVIsH49Kva7i7GWaWYvmEzkYVk1TC+gZYBEog=="
},
"Validation": {
"type": "Transitive",
"resolved": "2.4.18",
"contentHash": "NfvWJ1QeuZ1FQCkqgXTu1cOkRkbNCfxs4Tat+abXLwom6OXbULVhRGp34BTvVB4XPxj6VIAl7KfLfStXMt/Ehw=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
"contentHash": "pot1I4YOxlWjIb5jmwvvQNbTrZ3lJQ+jUGkGjWE3hEFM0l5gOnBWS+H3qsex68s5cO52g+44vpGzhAt+42vwKg=="
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.12.0",
"contentHash": "w23LH3aXade2WXKvXi0oA/uV15fpgUMjsPq1x91iQckzgWApgAiijNHmfFQtqNPm41wfrdbRl7nSJRd0yux/dw=="
},
"xunit.assert": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "JqvXS4yX+PtJn5BuqoKkYav7I0g4nXcxRbGTomDwVQjFccOdyfYKpuPOHX/DqrPCcL+MIHrGVdP3bveUXlvdnA=="
},
"xunit.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "hcODgS+RXdjmXq0zQzmRbERQgY+bAGGx1bdH3370t/8CTGmIEU2qAc1dQAjIRpARsacR0cj6LLJDUF5BNQNKTQ==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]",
"xunit.extensibility.execution": "[2.7.1]"
}
},
"xunit.extensibility.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "yLX4XlBFkvNYCzf+DEzlNk45KsSlu9W93IJHBmtUP96qZ9XyRYDFlwMj6BCcOhDKVNrZxSM8bqu4F/Qud4ehxA==",
"dependencies": {
"xunit.abstractions": "2.0.3"
}
},
"xunit.extensibility.execution": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "ei3dKF8agj4UKkJ6KkaZ5/Gcif3if6hBsyzegIQJonZDSKZFvb0AyKtyOhDfggBaXVL5iXZExITdRkfjC95yhw==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]"
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"ZstdSharp.Port": "[0.8.0, )"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
}
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.