Adds support for bzip2 (UDBZ) and zlib (UDZO) compressed

UDIFs.
This commit is contained in:
2016-09-10 02:15:01 +01:00
parent 11b6c094c6
commit 1db5530cb2
4 changed files with 66 additions and 8 deletions

View File

@@ -34,6 +34,9 @@
<Reference Include="plist-cil">
<HintPath>..\packages\plist-cil.1.14\lib\plist-cil.dll</HintPath>
</Reference>
<Reference Include="SharpCompress">
<HintPath>..\packages\SharpCompress.0.12.4\lib\net40\SharpCompress.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Include="Properties\AssemblyInfo.cs" />

View File

@@ -40,6 +40,8 @@ using DiscImageChef.CommonTypes;
using DiscImageChef.Console;
using DiscImageChef.ImagePlugins;
using DiscImageChef.Filters;
using SharpCompress.Compressor.Deflate;
using SharpCompress.Compressor.BZip2;
namespace DiscImageChef.DiscImages
{
@@ -146,9 +148,12 @@ namespace DiscImageChef.DiscImages
Dictionary<ulong, BlockChunk> chunks;
Dictionary<ulong, byte[]> sectorCache;
Dictionary<ulong, byte[]> chunkCache;
const uint MaxCacheSize = 16777216;
const uint sectorSize = 512;
uint maxCachedSectors = MaxCacheSize / sectorSize;
uint currentChunkCacheSize;
uint buffersize;
Stream imageStream;
@@ -356,6 +361,7 @@ namespace DiscImageChef.DiscImages
bChnk.type = ChunkType_Copy;
ImageInfo.sectors = footer.sectorCount;
chunks.Add(bChnk.sector, bChnk);
buffersize = 2048 * sectorSize;
fakeBlockChunks = true;
}
@@ -411,6 +417,8 @@ namespace DiscImageChef.DiscImages
if(blkxList.Count == 0)
throw new ImageNotSupportedException("Could not retrieve block chunks. Please fill an issue and send it to us.");
buffersize = 0;
foreach(byte[] blkxBytes in blkxList)
{
BlockHeader bHdr = new BlockHeader();
@@ -437,6 +445,9 @@ namespace DiscImageChef.DiscImages
DicConsole.DebugWriteLine("UDIF plugin", "bHdr.chunks = {0}", bHdr.chunks);
DicConsole.DebugWriteLine("UDIF plugin", "bHdr.reservedChk is empty? = {0}", ArrayHelpers.ArrayIsNullOrEmpty(bHdr.reservedChk));
if(bHdr.buffers > buffersize)
buffersize = bHdr.buffers * sectorSize;
for(int i = 0; i < bHdr.chunks; i++)
{
BlockChunk bChnk = new BlockChunk();
@@ -474,10 +485,6 @@ namespace DiscImageChef.DiscImages
throw new ImageNotSupportedException("Chunks compressed with LZH are not yet supported.");
if((bChnk.type == ChunkType_ADC))
throw new ImageNotSupportedException("Chunks compressed with ADC are not yet supported.");
if((bChnk.type == ChunkType_Zlib))
throw new ImageNotSupportedException("Chunks compressed with zlib are not yet supported.");
if((bChnk.type == ChunkType_Bzip))
throw new ImageNotSupportedException("Chunks compressed with bzip2 are not yet supported.");
if((bChnk.type == ChunkType_LZFSE))
throw new ImageNotSupportedException("Chunks compressed with lzfse are not yet supported.");
@@ -492,7 +499,9 @@ namespace DiscImageChef.DiscImages
}
sectorCache = new Dictionary<ulong, byte[]>();
imageStream = stream;
chunkCache = new Dictionary<ulong, byte[]>();
currentChunkCacheSize = 0;
imageStream = stream;
ImageInfo.imageCreationTime = imageFilter.GetCreationTime();
ImageInfo.imageLastModificationTime = imageFilter.GetLastWriteTime();
@@ -538,6 +547,51 @@ namespace DiscImageChef.DiscImages
if(!chunkFound)
throw new ArgumentOutOfRangeException(nameof(sectorAddress), string.Format("Sector address {0} not found", sectorAddress));
if((currentChunk.type & ChunkType_CompressedMask) == ChunkType_CompressedMask)
{
byte[] buffer;
if(!chunkCache.TryGetValue(chunkStartSector, out buffer))
{
byte[] cmpBuffer = new byte[currentChunk.length];
imageStream.Seek((long)currentChunk.offset, SeekOrigin.Begin);
imageStream.Read(cmpBuffer, 0, cmpBuffer.Length);
MemoryStream cmpMs = new MemoryStream(cmpBuffer);
Stream decStream;
if(currentChunk.type == ChunkType_Zlib)
decStream = new ZlibStream(cmpMs, SharpCompress.Compressor.CompressionMode.Decompress);
else if(currentChunk.type == ChunkType_Bzip)
decStream = new BZip2Stream(cmpMs, SharpCompress.Compressor.CompressionMode.Decompress);
else
throw new ImageNotSupportedException(string.Format("Unsupported chunk type 0x{0:X8} found", currentChunk.type));
byte[] tmpBuffer = new byte[buffersize];
int realSize = decStream.Read(tmpBuffer, 0, (int)buffersize);
buffer = new byte[realSize];
Array.Copy(tmpBuffer, 0, buffer, 0, realSize);
tmpBuffer = null;
if(currentChunkCacheSize + realSize > MaxCacheSize)
{
chunkCache.Clear();
currentChunkCacheSize = 0;
}
chunkCache.Add(chunkStartSector, buffer);
currentChunkCacheSize += (uint)realSize;
}
sector = new byte[sectorSize];
Array.Copy(buffer, relOff, sector, 0, sectorSize);
if(sectorCache.Count >= maxCachedSectors)
sectorCache.Clear();
sectorCache.Add(sectorAddress, sector);
return sector;
}
if(currentChunk.type == ChunkType_NoCopy || currentChunk.type == ChunkType_Zero)
{
sector = new byte[sectorSize];

View File

@@ -1,4 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="plist-cil" version="1.14" targetFramework="net40" />
<package id="SharpCompress" version="0.12.4" targetFramework="net40" />
</packages>

6
TODO
View File

@@ -64,13 +64,13 @@ VMDK plugin:
--- Add support for encrypted extents
UDIF plugin:
--- Add support for compressed chunks
--- Add support for chunks compressed with ADC, RLE, LZH or KenCode
NDIF plugin:
--- Add support for compressed chunks
--- Add support for chunks compressed with ADC, RLE, LZH or KenCode
DART plugin:
--- Add support for compressed chunks
--- Add support for chunks compressed with RLE or LZH
Filters:
--- Add support for XZ compressed files