2017-05-18 18:16:32 +01:00
//
// Author:
// Natalia Portillo claunia@claunia.com
//
// Copyright (c) 2017, © Claunia.com
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the distribution.
// * Neither the name of the [ORGANIZATION] nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
2017-12-30 00:32:21 +00:00
2017-05-18 18:16:32 +01:00
using System ;
using System.Collections.Generic ;
using System.Diagnostics ;
using System.IO ;
using System.Text ;
using System.Threading ;
using System.Xml.Serialization ;
using DiscImageChef.Checksums ;
using Ionic.Zip ;
using Newtonsoft.Json ;
using Schemas ;
2017-12-30 00:32:21 +00:00
using SharpCompress.Compressors ;
2017-05-18 18:16:32 +01:00
using SharpCompress.Compressors.BZip2 ;
using SharpCompress.Compressors.Deflate ;
using SharpCompress.Compressors.LZMA ;
namespace osrepodbmgr.Core
{
public static partial class Workers
{
public static void CompressFiles ( )
{
try
{
2017-12-30 00:32:21 +00:00
if ( string . IsNullOrWhiteSpace ( Context . DbInfo . Developer ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Developer cannot be empty" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( string . IsNullOrWhiteSpace ( Context . DbInfo . Product ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Product cannot be empty" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( string . IsNullOrWhiteSpace ( Context . DbInfo . Version ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Version cannot be empty" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
string destinationFolder = "" ;
2017-12-30 00:32:21 +00:00
destinationFolder = Path . Combine ( destinationFolder , Context . DbInfo . Developer ) ;
destinationFolder = Path . Combine ( destinationFolder , Context . DbInfo . Product ) ;
destinationFolder = Path . Combine ( destinationFolder , Context . DbInfo . Version ) ;
if ( ! string . IsNullOrWhiteSpace ( Context . DbInfo . Languages ) )
destinationFolder = Path . Combine ( destinationFolder , Context . DbInfo . Languages ) ;
if ( ! string . IsNullOrWhiteSpace ( Context . DbInfo . Architecture ) )
destinationFolder = Path . Combine ( destinationFolder , Context . DbInfo . Architecture ) ;
if ( Context . DbInfo . Oem ) destinationFolder = Path . Combine ( destinationFolder , "oem" ) ;
if ( ! string . IsNullOrWhiteSpace ( Context . DbInfo . Machine ) )
destinationFolder = Path . Combine ( destinationFolder , "for " + Context . DbInfo . Machine ) ;
2017-05-18 18:16:32 +01:00
string destinationFile = "" ;
2017-12-30 00:32:21 +00:00
if ( ! string . IsNullOrWhiteSpace ( Context . DbInfo . Format ) )
destinationFile + = "[" + Context . DbInfo . Format + "]" ;
if ( Context . DbInfo . Files )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = "files" ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
if ( Context . DbInfo . Netinstall )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = "netinstall" ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
if ( Context . DbInfo . Source )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = "source" ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
if ( Context . DbInfo . Update )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = "update" ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
if ( Context . DbInfo . Upgrade )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = "upgrade" ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
if ( ! string . IsNullOrWhiteSpace ( Context . DbInfo . Description ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
if ( destinationFile ! = "" ) destinationFile + = "_" ;
destinationFile + = Context . DbInfo . Description ;
2017-05-18 18:16:32 +01:00
}
else if ( destinationFile = = "" )
destinationFile = "archive" ;
string destination = Path . Combine ( destinationFolder , destinationFile ) + ".zip" ;
2017-12-29 19:31:26 +00:00
Md5Context md5 = new Md5Context ( ) ;
2017-05-18 18:16:32 +01:00
md5 . Init ( ) ;
byte [ ] tmp ;
string mdid = md5 . Data ( Encoding . UTF8 . GetBytes ( destination ) , out tmp ) ;
Console . WriteLine ( "MDID: {0}" , mdid ) ;
2017-12-30 00:32:21 +00:00
if ( dbCore . DbOps . ExistsOs ( mdid ) )
2017-05-18 18:16:32 +01:00
{
if ( File . Exists ( destination ) )
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "OS already exists." ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "OS already exists in the database but not in the repository, check for inconsistencies." ) ;
2017-05-18 18:16:32 +01:00
return ;
}
if ( File . Exists ( destination ) )
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "OS already exists in the repository but not in the database, check for inconsistencies." ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
Context . DbInfo . Mdid = mdid ;
2017-05-18 18:16:32 +01:00
string filesPath ;
2017-12-30 00:32:21 +00:00
if ( ! string . IsNullOrEmpty ( Context . TmpFolder ) & & Directory . Exists ( Context . TmpFolder ) )
filesPath = Context . TmpFolder ;
else filesPath = Context . Path ;
2017-05-18 18:16:32 +01:00
string extension = null ;
switch ( Settings . Current . CompressionAlgorithm )
{
case AlgoEnum . GZip :
extension = ".gz" ;
break ;
case AlgoEnum . BZip2 :
extension = ".bz2" ;
break ;
case AlgoEnum . LZMA :
extension = ".lzma" ;
break ;
2017-06-06 22:50:45 +01:00
case AlgoEnum . LZip :
extension = ".lz" ;
break ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
long totalSize = 0 , currentSize = 0 ;
foreach ( KeyValuePair < string , DbOsFile > file in Context . Hashes ) totalSize + = file . Value . Length ;
2017-08-22 16:31:31 +01:00
2017-12-30 00:32:21 +00:00
#if DEBUG
stopwatch . Restart ( ) ;
#endif
foreach ( KeyValuePair < string , DbOsFile > file in Context . Hashes )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
UpdateProgress ? . Invoke ( "Compressing..." , file . Value . Path , currentSize , totalSize ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
destinationFolder = Path . Combine ( Settings . Current . RepositoryPath , file . Value . Sha256 [ 0 ] . ToString ( ) ,
file . Value . Sha256 [ 1 ] . ToString ( ) , file . Value . Sha256 [ 2 ] . ToString ( ) ,
file . Value . Sha256 [ 3 ] . ToString ( ) , file . Value . Sha256 [ 4 ] . ToString ( ) ) ;
2017-05-18 18:16:32 +01:00
Directory . CreateDirectory ( destinationFolder ) ;
destinationFile = Path . Combine ( destinationFolder , file . Value . Sha256 + extension ) ;
2017-12-30 00:32:21 +00:00
if ( ! File . Exists ( destinationFile ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
FileStream inFs = new FileStream ( Path . Combine ( filesPath , file . Value . Path ) , FileMode . Open ,
FileAccess . Read ) ;
FileStream outFs = new FileStream ( destinationFile , FileMode . CreateNew , FileAccess . Write ) ;
Stream zStream = null ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
switch ( Settings . Current . CompressionAlgorithm )
2017-05-18 18:16:32 +01:00
{
case AlgoEnum . GZip :
2017-12-30 00:32:21 +00:00
zStream = new GZipStream ( outFs , CompressionMode . Compress ,
CompressionLevel . BestCompression ) ;
2017-05-18 18:16:32 +01:00
break ;
case AlgoEnum . BZip2 :
2017-12-30 00:32:21 +00:00
zStream = new BZip2Stream ( outFs , CompressionMode . Compress ) ;
2017-05-18 18:16:32 +01:00
break ;
case AlgoEnum . LZMA :
zStream = new LzmaStream ( new LzmaEncoderProperties ( ) , false , outFs ) ;
2017-12-30 00:32:21 +00:00
outFs . Write ( ( ( LzmaStream ) zStream ) . Properties , 0 ,
( ( LzmaStream ) zStream ) . Properties . Length ) ;
2017-05-18 18:16:32 +01:00
outFs . Write ( BitConverter . GetBytes ( inFs . Length ) , 0 , 8 ) ;
break ;
2017-06-06 22:50:45 +01:00
case AlgoEnum . LZip :
2017-12-30 00:32:21 +00:00
zStream = new LZipStream ( outFs , CompressionMode . Compress ) ;
2017-06-06 22:50:45 +01:00
break ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
byte [ ] buffer = new byte [ BUFFER_SIZE ] ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
while ( inFs . Position + BUFFER_SIZE < = inFs . Length )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
UpdateProgress2 ? . Invoke ( $"{inFs.Position / (double)inFs.Length:P}" ,
$"{inFs.Position} / {inFs.Length} bytes" , inFs . Position ,
inFs . Length ) ;
UpdateProgress ? . Invoke ( "Compressing..." , file . Value . Path , currentSize , totalSize ) ;
2017-05-18 18:16:32 +01:00
inFs . Read ( buffer , 0 , buffer . Length ) ;
zStream . Write ( buffer , 0 , buffer . Length ) ;
2017-08-22 16:31:31 +01:00
currentSize + = buffer . Length ;
2017-05-18 18:16:32 +01:00
}
buffer = new byte [ inFs . Length - inFs . Position ] ;
2017-12-30 00:32:21 +00:00
UpdateProgress2 ? . Invoke ( $"{inFs.Position / (double)inFs.Length:P}" ,
$"{inFs.Position} / {inFs.Length} bytes" , inFs . Position , inFs . Length ) ;
UpdateProgress ? . Invoke ( "Compressing..." , file . Value . Path , currentSize , totalSize ) ;
2017-05-18 18:16:32 +01:00
inFs . Read ( buffer , 0 , buffer . Length ) ;
zStream . Write ( buffer , 0 , buffer . Length ) ;
2017-08-22 16:31:31 +01:00
currentSize + = buffer . Length ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
UpdateProgress2 ? . Invoke ( $"{inFs.Length / (double)inFs.Length:P}" , "Finishing..." , inFs . Length ,
inFs . Length ) ;
2017-05-18 18:16:32 +01:00
inFs . Close ( ) ;
zStream . Close ( ) ;
2017-08-16 00:14:23 +01:00
outFs . Dispose ( ) ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
else currentSize + = file . Value . Length ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.CompressFiles(): Took {0} seconds to compress files" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
if ( Context . Metadata ! = null )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
MemoryStream xms = new MemoryStream ( ) ;
XmlSerializer xs = new XmlSerializer ( typeof ( CICMMetadataType ) ) ;
xs . Serialize ( xms , Context . Metadata ) ;
2017-05-18 18:16:32 +01:00
xms . Position = 0 ;
2017-12-30 00:32:21 +00:00
JsonSerializer js = new JsonSerializer
{
Formatting = Formatting . Indented ,
NullValueHandling = NullValueHandling . Ignore
} ;
2017-05-18 18:16:32 +01:00
MemoryStream jms = new MemoryStream ( ) ;
2017-12-30 00:32:21 +00:00
StreamWriter sw = new StreamWriter ( jms , Encoding . UTF8 , 1048576 , true ) ;
js . Serialize ( sw , Context . Metadata , typeof ( CICMMetadataType ) ) ;
2017-05-18 18:16:32 +01:00
sw . Close ( ) ;
jms . Position = 0 ;
2017-12-30 00:32:21 +00:00
destinationFolder = Path . Combine ( Settings . Current . RepositoryPath , "metadata" , mdid [ 0 ] . ToString ( ) ,
mdid [ 1 ] . ToString ( ) , mdid [ 2 ] . ToString ( ) , mdid [ 3 ] . ToString ( ) ,
mdid [ 4 ] . ToString ( ) ) ;
2017-05-18 18:16:32 +01:00
Directory . CreateDirectory ( destinationFolder ) ;
2017-12-30 00:32:21 +00:00
FileStream xfs = new FileStream ( Path . Combine ( destinationFolder , mdid + ".xml" ) , FileMode . CreateNew ,
FileAccess . Write ) ;
2017-05-18 18:16:32 +01:00
xms . CopyTo ( xfs ) ;
xfs . Close ( ) ;
2017-12-30 00:32:21 +00:00
FileStream jfs = new FileStream ( Path . Combine ( destinationFolder , mdid + ".json" ) , FileMode . CreateNew ,
FileAccess . Write ) ;
2017-05-18 18:16:32 +01:00
jms . CopyTo ( jfs ) ;
jfs . Close ( ) ;
xms . Position = 0 ;
jms . Position = 0 ;
}
2017-12-30 00:32:21 +00:00
FinishedWithText ? . Invoke ( $"Correctly added operating system with MDID {mdid}" ) ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception ex )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( $"Exception {ex.Message}\n{ex.InnerException}" ) ;
#if DEBUG
2017-06-13 18:21:12 +01:00
Console . WriteLine ( "Exception {0}\n{1}" , ex . Message , ex . InnerException ) ;
2017-12-30 00:32:21 +00:00
#endif
2017-05-18 18:16:32 +01:00
}
}
public static void OpenArchive ( )
{
2017-12-30 00:32:21 +00:00
if ( ! Context . UnarUsable )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "The UnArchiver is not correctly installed" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( ! File . Exists ( Context . Path ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Specified file cannot be found" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
try
{
2017-12-30 00:32:21 +00:00
string unarFolder = Path . GetDirectoryName ( Settings . Current . UnArchiverPath ) ;
string extension = Path . GetExtension ( Settings . Current . UnArchiverPath ) ;
2017-05-18 18:16:32 +01:00
string unarfilename = Path . GetFileNameWithoutExtension ( Settings . Current . UnArchiverPath ) ;
2017-12-30 00:32:21 +00:00
string lsarfilename = unarfilename ? . Replace ( "unar" , "lsar" ) ;
string lsarPath = Path . Combine ( unarFolder , lsarfilename + extension ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
Process lsarProcess = new Process
{
StartInfo =
{
FileName = lsarPath ,
CreateNoWindow = true ,
RedirectStandardOutput = true ,
UseShellExecute = false ,
Arguments = $"-j \" \ "\"{Context.Path}\"\"\""
}
} ;
2017-05-18 18:16:32 +01:00
lsarProcess . Start ( ) ;
string lsarOutput = lsarProcess . StandardOutput . ReadToEnd ( ) ;
lsarProcess . WaitForExit ( ) ;
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.OpenArchive(): Took {0} seconds to list archive contents" ,
stopwatch . Elapsed . TotalSeconds ) ;
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
long counter = 0 ;
string format = null ;
2017-05-18 18:16:32 +01:00
JsonTextReader jsReader = new JsonTextReader ( new StringReader ( lsarOutput ) ) ;
while ( jsReader . Read ( ) )
2017-12-30 00:32:21 +00:00
switch ( jsReader . TokenType )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
case JsonToken . PropertyName when jsReader . Value ! = null & &
jsReader . Value . ToString ( ) = = "XADFileName" :
counter + + ;
break ;
case JsonToken . PropertyName when jsReader . Value ! = null & &
jsReader . Value . ToString ( ) = = "lsarFormatName" :
jsReader . Read ( ) ;
if ( jsReader . TokenType = = JsonToken . String & & jsReader . Value ! = null )
format = jsReader . Value . ToString ( ) ;
break ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.OpenArchive(): Took {0} seconds to process archive contents" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
Context . UnzipWithUnAr = false ;
Context . ArchiveFormat = format ;
Context . NoFilesInArchive = counter ;
2017-05-18 18:16:32 +01:00
if ( string . IsNullOrEmpty ( format ) )
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "File not recognized as an archive" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
if ( counter = = 0 )
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Archive contains no files" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( Context . ArchiveFormat = = "Zip" )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Context . UnzipWithUnAr = false ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
if ( Context . UsableDotNetZip )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
ZipFile zf = ZipFile . Read ( Context . Path , new ReadOptions { Encoding = Encoding . UTF8 } ) ;
2017-05-18 18:16:32 +01:00
foreach ( ZipEntry ze in zf )
{
// ZIP created with Mac OS X, need to be extracted with The UnArchiver to get correct ResourceFork structure
2017-12-30 00:32:21 +00:00
if ( ! ze . FileName . StartsWith ( "__MACOSX" , StringComparison . CurrentCulture ) ) continue ;
Context . UnzipWithUnAr = true ;
break ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.OpenArchive(): Took {0} seconds to navigate in search of Mac OS X metadata" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
2017-05-18 18:16:32 +01:00
}
}
2017-12-30 00:32:21 +00:00
Finished ? . Invoke ( ) ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception ex )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( $"Exception {ex.Message}\n{ex.InnerException}" ) ;
#if DEBUG
2017-06-13 18:21:12 +01:00
Console . WriteLine ( "Exception {0}\n{1}" , ex . Message , ex . InnerException ) ;
2017-12-30 00:32:21 +00:00
#endif
2017-05-18 18:16:32 +01:00
}
}
public static void ExtractArchive ( )
{
2017-12-30 00:32:21 +00:00
if ( ! File . Exists ( Context . Path ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Specified file cannot be found" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
if ( ! Directory . Exists ( Settings . Current . TemporaryFolder ) )
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Temporary folder cannot be found" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
string tmpFolder = Context . UserExtracting
? Context . TmpFolder
: Path . Combine ( Settings . Current . TemporaryFolder , Path . GetRandomFileName ( ) ) ;
2017-05-18 18:16:32 +01:00
try
{
Directory . CreateDirectory ( tmpFolder ) ;
2017-12-30 00:32:21 +00:00
Context . TmpFolder = tmpFolder ;
2017-05-18 18:16:32 +01:00
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( "Cannot create temporary folder" ) ;
2017-05-18 18:16:32 +01:00
}
try
{
// If it's a ZIP file not created by Mac OS X, use DotNetZip to uncompress (unar freaks out or corrupts certain ZIP features)
2017-12-30 00:32:21 +00:00
if ( Context . ArchiveFormat = = "Zip" & & ! Context . UnzipWithUnAr & & Context . UsableDotNetZip )
2017-05-18 18:16:32 +01:00
try
{
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
ZipFile zf = ZipFile . Read ( Context . Path , new ReadOptions { Encoding = Encoding . UTF8 } ) ;
zf . ExtractExistingFile = ExtractExistingFileAction . OverwriteSilently ;
zf . ExtractProgress + = Zf_ExtractProgress ;
zipCounter = 0 ;
zipCurrentEntryName = "" ;
2017-05-18 18:16:32 +01:00
zf . ExtractAll ( tmpFolder ) ;
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception ex )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( $"Exception {ex.Message}\n{ex.InnerException}" ) ;
#if DEBUG
2017-06-13 18:21:12 +01:00
Console . WriteLine ( "Exception {0}\n{1}" , ex . Message , ex . InnerException ) ;
2017-12-30 00:32:21 +00:00
#endif
2017-05-18 18:16:32 +01:00
}
else
{
2017-12-30 00:32:21 +00:00
if ( ! Context . UnarUsable )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "The UnArchiver is not correctly installed" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
Context . UnarProcess = new Process
{
StartInfo =
{
FileName = Settings . Current . UnArchiverPath ,
CreateNoWindow = true ,
RedirectStandardOutput = true ,
UseShellExecute = false ,
Arguments =
$"-o \" \ "\"{tmpFolder}\"\"\" -r -D -k hidden \"\"\"{Context.Path}\"\"\""
}
} ;
long counter = 0 ;
Context . UnarProcess . OutputDataReceived + = ( sender , e ) = >
2017-05-18 18:16:32 +01:00
{
counter + + ;
2017-12-30 00:32:21 +00:00
UpdateProgress2 ? . Invoke ( "" , e . Data , counter , Context . NoFilesInArchive ) ;
2017-05-18 18:16:32 +01:00
} ;
2017-12-30 00:32:21 +00:00
Context . UnarProcess . Start ( ) ;
Context . UnarProcess . BeginOutputReadLine ( ) ;
Context . UnarProcess . WaitForExit ( ) ;
Context . UnarProcess . Close ( ) ;
Context . UnarProcess = null ;
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.ExtractArchive(): Took {0} seconds to extract archive contents using UnAr" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
Finished ? . Invoke ( ) ;
2017-05-18 18:16:32 +01:00
}
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception ex )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( $"Exception {ex.Message}\n{ex.InnerException}" ) ;
#if DEBUG
2017-06-13 18:21:12 +01:00
Console . WriteLine ( "Exception {0}\n{1}" , ex . Message , ex . InnerException ) ;
2017-12-30 00:32:21 +00:00
#endif
2017-05-18 18:16:32 +01:00
}
}
static void Zf_ExtractProgress ( object sender , ExtractProgressEventArgs e )
{
if ( e . CurrentEntry ! = null & & e . CurrentEntry . FileName ! = zipCurrentEntryName )
{
zipCurrentEntryName = e . CurrentEntry . FileName ;
zipCounter + + ;
}
if ( UpdateProgress ! = null & & e . CurrentEntry ! = null & & e . EntriesTotal > 0 )
UpdateProgress ( "Extracting..." , e . CurrentEntry . FileName , zipCounter , e . EntriesTotal ) ;
if ( UpdateProgress2 ! = null & & e . TotalBytesToTransfer > 0 )
2017-12-30 00:32:21 +00:00
UpdateProgress2 ( $"{e.BytesTransferred / (double)e.TotalBytesToTransfer:P}" ,
$"{e.BytesTransferred} / {e.TotalBytesToTransfer}" , e . BytesTransferred ,
e . TotalBytesToTransfer ) ;
if ( e . EventType ! = ZipProgressEventType . Extracting_AfterExtractAll | | Finished = = null ) return ;
#if DEBUG
stopwatch . Stop ( ) ;
Console . WriteLine ( "Core.Zf_ExtractProgress(): Took {0} seconds to extract archive contents using DotNetZip" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
Finished ( ) ;
2017-05-18 18:16:32 +01:00
}
public static void CompressTo ( )
{
try
{
2017-12-30 00:32:21 +00:00
if ( string . IsNullOrWhiteSpace ( Context . Path ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Destination cannot be empty" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( Directory . Exists ( Context . Path ) )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Destination cannot be a folder" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
if ( Context . DbInfo . Id = = 0 )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Operating system must be set" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-08-22 18:05:18 +01:00
2017-12-30 00:32:21 +00:00
if ( dbCore . DbOps . HasSymlinks ( Context . DbInfo . Id ) )
2017-08-22 18:05:18 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Cannot create symbolic links on ZIP files" ) ;
2017-08-22 18:05:18 +01:00
return ;
}
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
if ( ! Context . UsableDotNetZip )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
Failed ? . Invoke ( "Cannot create ZIP files" ) ;
2017-05-18 18:16:32 +01:00
return ;
}
2017-12-30 00:32:21 +00:00
ZipFile zf = new ZipFile ( Context . Path , Encoding . UTF8 )
{
CompressionLevel = Ionic . Zlib . CompressionLevel . BestCompression ,
CompressionMethod = CompressionMethod . Deflate ,
EmitTimesInUnixFormatWhenSaving = true ,
EmitTimesInWindowsFormatWhenSaving = true ,
UseZip64WhenSaving = Zip64Option . AsNecessary ,
SortEntriesBeforeSaving = true
} ;
2017-05-18 18:16:32 +01:00
zf . SaveProgress + = Zf_SaveProgress ;
2017-12-30 00:32:21 +00:00
UpdateProgress ? . Invoke ( "" , "Asking DB for files..." , 1 , 100 ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
dbCore . DbOps . GetAllFilesInOs ( out List < DbOsFile > files , Context . DbInfo . Id ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
UpdateProgress ? . Invoke ( "" , "Asking DB for folders..." , 2 , 100 ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
dbCore . DbOps . GetAllFolders ( out List < DbFolder > folders , Context . DbInfo . Id ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
UpdateProgress ? . Invoke ( "" , "Creating folders..." , 3 , 100 ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
long counter = 0 ;
foreach ( DbFolder folder in folders )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
UpdateProgress2 ? . Invoke ( "" , folder . Path , counter , folders . Count ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
ZipEntry zd = zf . AddDirectoryByName ( folder . Path ) ;
zd . Attributes = folder . Attributes ;
2017-05-18 18:16:32 +01:00
zd . CreationTime = folder . CreationTimeUtc ;
zd . AccessedTime = folder . LastAccessTimeUtc ;
zd . LastModified = folder . LastWriteTimeUtc ;
zd . ModifiedTime = folder . LastWriteTimeUtc ;
counter + + ;
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.CompressTo(): Took {0} seconds to add folders to ZIP" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
counter = 3 ;
Context . Hashes = new Dictionary < string , DbOsFile > ( ) ;
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
foreach ( DbOsFile file in files )
2017-05-18 18:16:32 +01:00
{
2017-12-30 00:32:21 +00:00
UpdateProgress ? . Invoke ( "" , $"Adding {file.Path}..." , counter , 3 + files . Count ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
Context . Hashes . Add ( file . Path , file ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
ZipEntry zi = zf . AddEntry ( file . Path , Zf_HandleOpen , Zf_HandleClose ) ;
zi . Attributes = file . Attributes ;
2017-05-18 18:16:32 +01:00
zi . CreationTime = file . CreationTimeUtc ;
zi . AccessedTime = file . LastAccessTimeUtc ;
zi . LastModified = file . LastWriteTimeUtc ;
zi . ModifiedTime = file . LastWriteTimeUtc ;
counter + + ;
}
2017-12-30 00:32:21 +00:00
#if DEBUG
2017-05-19 03:24:34 +01:00
stopwatch . Stop ( ) ;
2017-12-30 00:32:21 +00:00
Console . WriteLine ( "Core.CompressTo(): Took {0} seconds to add files to ZIP" ,
stopwatch . Elapsed . TotalSeconds ) ;
2017-05-19 03:24:34 +01:00
stopwatch . Restart ( ) ;
2017-12-30 00:32:21 +00:00
#endif
zipCounter = 0 ;
2017-05-18 18:16:32 +01:00
zipCurrentEntryName = "" ;
zf . Save ( ) ;
}
2017-12-30 00:32:21 +00:00
catch ( ThreadAbortException ) { }
2017-05-18 18:16:32 +01:00
catch ( Exception ex )
{
2017-12-30 00:32:21 +00:00
if ( Debugger . IsAttached ) throw ;
Failed ? . Invoke ( $"Exception {ex.Message}\n{ex.InnerException}" ) ;
#if DEBUG
2017-06-13 18:21:12 +01:00
Console . WriteLine ( "Exception {0}\n{1}" , ex . Message , ex . InnerException ) ;
2017-12-30 00:32:21 +00:00
#endif
2017-05-18 18:16:32 +01:00
}
}
static Stream Zf_HandleOpen ( string entryName )
{
2017-12-30 00:32:21 +00:00
DbOsFile file ;
if ( ! Context . Hashes . TryGetValue ( entryName , out file ) )
if ( ! Context . Hashes . TryGetValue ( entryName . Replace ( '/' , '\\' ) , out file ) )
2017-05-18 18:16:32 +01:00
throw new ArgumentException ( "Cannot find requested zip entry in hashes dictionary" ) ;
// Special case for empty file, as it seems to crash when SharpCompress tries to unLZMA it.
2017-12-30 00:32:21 +00:00
if ( file . Length = = 0 ) return new MemoryStream ( ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
Stream zStream = null ;
string repoPath ;
2017-05-18 18:16:32 +01:00
AlgoEnum algorithm ;
if ( File . Exists ( Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) , file . Sha256 [ 3 ] . ToString ( ) ,
file . Sha256 [ 4 ] . ToString ( ) , file . Sha256 + ".gz" ) ) )
2017-05-18 18:16:32 +01:00
{
repoPath = Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) , file . Sha256 [ 3 ] . ToString ( ) ,
file . Sha256 [ 4 ] . ToString ( ) , file . Sha256 + ".gz" ) ;
2017-05-18 18:16:32 +01:00
algorithm = AlgoEnum . GZip ;
}
else if ( File . Exists ( Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) ,
file . Sha256 [ 3 ] . ToString ( ) , file . Sha256 [ 4 ] . ToString ( ) ,
file . Sha256 + ".bz2" ) ) )
2017-05-18 18:16:32 +01:00
{
repoPath = Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) , file . Sha256 [ 3 ] . ToString ( ) ,
file . Sha256 [ 4 ] . ToString ( ) , file . Sha256 + ".bz2" ) ;
2017-05-18 18:16:32 +01:00
algorithm = AlgoEnum . BZip2 ;
}
else if ( File . Exists ( Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) ,
file . Sha256 [ 3 ] . ToString ( ) , file . Sha256 [ 4 ] . ToString ( ) ,
file . Sha256 + ".lzma" ) ) )
2017-05-18 18:16:32 +01:00
{
repoPath = Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) , file . Sha256 [ 3 ] . ToString ( ) ,
file . Sha256 [ 4 ] . ToString ( ) , file . Sha256 + ".lzma" ) ;
2017-05-18 18:16:32 +01:00
algorithm = AlgoEnum . LZMA ;
}
2017-06-06 22:50:45 +01:00
else if ( File . Exists ( Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) ,
file . Sha256 [ 3 ] . ToString ( ) , file . Sha256 [ 4 ] . ToString ( ) ,
file . Sha256 + ".lz" ) ) )
2017-06-06 22:50:45 +01:00
{
repoPath = Path . Combine ( Settings . Current . RepositoryPath , file . Sha256 [ 0 ] . ToString ( ) ,
2017-12-30 00:32:21 +00:00
file . Sha256 [ 1 ] . ToString ( ) , file . Sha256 [ 2 ] . ToString ( ) , file . Sha256 [ 3 ] . ToString ( ) ,
file . Sha256 [ 4 ] . ToString ( ) , file . Sha256 + ".lz" ) ;
2017-06-06 22:50:45 +01:00
algorithm = AlgoEnum . LZip ;
}
2017-05-18 18:16:32 +01:00
else
2017-12-30 00:32:21 +00:00
throw new ArgumentException ( $"Cannot find file with hash {file.Sha256} in the repository" ) ;
2017-05-18 18:16:32 +01:00
FileStream inFs = new FileStream ( repoPath , FileMode . Open , FileAccess . Read ) ;
switch ( algorithm )
{
case AlgoEnum . GZip :
2017-12-30 00:32:21 +00:00
zStream = new GZipStream ( inFs , CompressionMode . Decompress ) ;
2017-05-18 18:16:32 +01:00
break ;
case AlgoEnum . BZip2 :
2017-12-30 00:32:21 +00:00
zStream = new BZip2Stream ( inFs , CompressionMode . Decompress ) ;
2017-05-18 18:16:32 +01:00
break ;
case AlgoEnum . LZMA :
byte [ ] properties = new byte [ 5 ] ;
inFs . Read ( properties , 0 , 5 ) ;
inFs . Seek ( 8 , SeekOrigin . Current ) ;
zStream = new LzmaStream ( properties , inFs , inFs . Length - 13 , file . Length ) ;
break ;
2017-06-06 22:50:45 +01:00
case AlgoEnum . LZip :
2017-12-30 00:32:21 +00:00
zStream = new LZipStream ( inFs , CompressionMode . Decompress ) ;
2017-06-06 22:50:45 +01:00
break ;
2017-05-18 18:16:32 +01:00
}
return zStream ;
}
static void Zf_HandleClose ( string entryName , Stream stream )
{
stream . Close ( ) ;
}
static void Zf_SaveProgress ( object sender , SaveProgressEventArgs e )
{
if ( e . CurrentEntry ! = null & & e . CurrentEntry . FileName ! = zipCurrentEntryName )
{
zipCurrentEntryName = e . CurrentEntry . FileName ;
zipCounter + + ;
}
if ( UpdateProgress ! = null & & e . CurrentEntry ! = null & & e . EntriesTotal > 0 )
UpdateProgress ( "Compressing..." , e . CurrentEntry . FileName , zipCounter , e . EntriesTotal ) ;
if ( UpdateProgress2 ! = null & & e . TotalBytesToTransfer > 0 )
2017-12-30 00:32:21 +00:00
UpdateProgress2 ( $"{e.BytesTransferred / (double)e.TotalBytesToTransfer:P}" ,
$"{e.BytesTransferred} / {e.TotalBytesToTransfer}" , e . BytesTransferred ,
e . TotalBytesToTransfer ) ;
2017-05-18 18:16:32 +01:00
2017-12-30 00:32:21 +00:00
switch ( e . EventType )
2017-05-19 03:24:34 +01:00
{
2017-12-30 00:32:21 +00:00
case ZipProgressEventType . Error_Saving :
Failed ? . Invoke ( "An error occurred creating ZIP file." ) ;
break ;
case ZipProgressEventType . Saving_Completed when Finished ! = null :
#if DEBUG
stopwatch . Stop ( ) ;
Console . WriteLine ( "Core.Zf_SaveProgress(): Took {0} seconds to compress files to ZIP" ,
stopwatch . Elapsed . TotalSeconds ) ;
#endif
Finished ( ) ;
break ;
2017-05-19 03:24:34 +01:00
}
2017-05-18 18:16:32 +01:00
}
}
2017-12-30 00:32:21 +00:00
}