diff --git a/DiscImageChef.Core/Devices/Dumping/ATA.cs b/DiscImageChef.Core/Devices/Dumping/ATA.cs index c05eedfe..8e532248 100644 --- a/DiscImageChef.Core/Devices/Dumping/ATA.cs +++ b/DiscImageChef.Core/Devices/Dumping/ATA.cs @@ -82,7 +82,7 @@ namespace DiscImageChef.Core.Devices.Dumping string outputPrefix, string outputPath, Dictionary formatOptions, CICMMetadataType preSidecar, uint skip, - bool nometadata) + bool nometadata, bool notrim) { bool aborted; @@ -238,6 +238,7 @@ namespace DiscImageChef.Core.Devices.Dumping ibgLog = new IbgLog(outputPrefix + ".ibg", ATA_PROFILE); if(resume.NextBlock > 0) dumpLog.WriteLine("Resuming from block {0}.", resume.NextBlock); + bool newTrim = false; start = DateTime.UtcNow; for(ulong i = resume.NextBlock; i < blocks; i += blocksToRead) @@ -282,7 +283,8 @@ namespace DiscImageChef.Core.Devices.Dumping outputPlugin.WriteSectors(new byte[blockSize * skip], i, skip); imageWriteDuration += (DateTime.Now - writeStart).TotalSeconds; dumpLog.WriteLine("Skipping {0} blocks from errored block {1}.", skip, i); - i += skip - blocksToRead; + i += skip - blocksToRead; + newTrim = true; } double newSpeed = @@ -303,6 +305,41 @@ namespace DiscImageChef.Core.Devices.Dumping dumpLog.WriteLine("Average write speed {0:F3} KiB/sec.", (double)blockSize * (double)(blocks + 1) / 1024 / imageWriteDuration); + #region Trimming + if(resume.BadBlocks.Count > 0 && !aborted && !notrim && newTrim) + { + start = DateTime.UtcNow; + dumpLog.WriteLine("Trimming bad sectors"); + + ulong[] tmpArray = resume.BadBlocks.ToArray(); + foreach(ulong badSector in tmpArray) + { + if(aborted) + { + currentTry.Extents = ExtentsConverter.ToMetadata(extents); + dumpLog.WriteLine("Aborted!"); + break; + } + + DicConsole.Write("\rTrimming sector {0}", badSector); + + bool error = ataReader.ReadBlock(out cmdBuf, badSector, out duration); + + totalDuration += duration; + + if(error) continue; + + resume.BadBlocks.Remove(badSector); + extents.Add(badSector); + outputPlugin.WriteSector(cmdBuf, badSector); + } + + DicConsole.WriteLine(); + end = DateTime.UtcNow; + dumpLog.WriteLine("Trimmming finished in {0} seconds.", (end - start).TotalSeconds); + } + #endregion Trimming + #region Error handling if(resume.BadBlocks.Count > 0 && !aborted && retryPasses > 0) { diff --git a/DiscImageChef.Core/Devices/Dumping/CompactDisc.cs b/DiscImageChef.Core/Devices/Dumping/CompactDisc.cs index 3f107213..ff33ab32 100644 --- a/DiscImageChef.Core/Devices/Dumping/CompactDisc.cs +++ b/DiscImageChef.Core/Devices/Dumping/CompactDisc.cs @@ -91,7 +91,7 @@ namespace DiscImageChef.Core.Devices.Dumping string outputPrefix, string outputPath, Dictionary formatOptions, CICMMetadataType preSidecar, uint skip, - bool nometadata) + bool nometadata, bool notrim) { uint subSize; DateTime start; @@ -783,6 +783,7 @@ namespace DiscImageChef.Core.Devices.Dumping double imageWriteDuration = 0; if(skip < blocksToRead) skip = blocksToRead; + bool newTrim = false; // Start reading start = DateTime.UtcNow; @@ -872,7 +873,8 @@ namespace DiscImageChef.Core.Devices.Dumping ibgLog.Write(i, 0); dumpLog.WriteLine("Skipping {0} blocks from errored block {1}.", skip, i); - i += skip - blocksToRead; + i += skip - blocksToRead; + newTrim = true; } double newSpeed = @@ -894,6 +896,60 @@ namespace DiscImageChef.Core.Devices.Dumping dumpLog.WriteLine("Average write speed {0:F3} KiB/sec.", (double)blockSize * (double)(blocks + 1) / 1024 / imageWriteDuration); + #region Compact Disc Error trimming + if(resume.BadBlocks.Count > 0 && !aborted && !notrim && newTrim) + { + start = DateTime.UtcNow; + dumpLog.WriteLine("Trimming bad sectors"); + + ulong[] tmpArray = resume.BadBlocks.ToArray(); + foreach(ulong badSector in tmpArray) + { + if(aborted) + { + currentTry.Extents = ExtentsConverter.ToMetadata(extents); + dumpLog.WriteLine("Aborted!"); + break; + } + + DicConsole.Write("\rTrimming sector {0}", badSector); + + if(readcd) + { + sense = true; + sense = dev.ReadCd(out readBuffer, out senseBuf, (uint)badSector, blockSize, 1, + MmcSectorTypes.AllTypes, false, false, true, MmcHeaderCodes.AllHeaders, true, + true, MmcErrorField.None, supportedSubchannel, dev.Timeout, + out double cmdDuration); + totalDuration += cmdDuration; + } + + if(sense || dev.Error) continue; + + if(!sense && !dev.Error) + { + resume.BadBlocks.Remove(badSector); + extents.Add(badSector); + } + + if(supportedSubchannel != MmcSubchannel.None) + { + byte[] data = new byte[SECTOR_SIZE]; + byte[] sub = new byte[subSize]; + Array.Copy(readBuffer, 0, data, 0, SECTOR_SIZE); + Array.Copy(readBuffer, SECTOR_SIZE, sub, 0, subSize); + outputPlugin.WriteSectorLong(data, badSector); + outputPlugin.WriteSectorTag(sub, badSector, SectorTagType.CdSectorSubchannel); + } + else outputPlugin.WriteSectorLong(readBuffer, badSector); + } + + DicConsole.WriteLine(); + end = DateTime.UtcNow; + dumpLog.WriteLine("Trimmming finished in {0} seconds.", (end - start).TotalSeconds); + } + #endregion Compact Disc Error trimming + #region Compact Disc Error handling // TODO: Pass 0 should be called differently, splitting, or something like that, because we are just // separating skipped good sectors from really bad sectors and it's getting too chatty on log there... diff --git a/DiscImageChef.Core/Devices/Dumping/MMC.cs b/DiscImageChef.Core/Devices/Dumping/MMC.cs index 2eba8c42..702e8582 100644 --- a/DiscImageChef.Core/Devices/Dumping/MMC.cs +++ b/DiscImageChef.Core/Devices/Dumping/MMC.cs @@ -75,16 +75,16 @@ namespace DiscImageChef.Core.Devices.Dumping /// Path to output file /// Formats to pass to output file plugin /// If trying to dump GOD or WOD, or XGDs without a Kreon drive - internal static void Dump(Device dev, string devicePath, IWritableImage outputPlugin, ushort retryPasses, - bool force, bool dumpRaw, bool persistent, bool stopOnError, - ref MediaType dskType, - ref - Resume resume, ref DumpLog dumpLog, bool dumpLeadIn, - Encoding encoding, - string - outputPrefix, string outputPath, Dictionary formatOptions, - CICMMetadataType - preSidecar, uint skip, bool nometadata) + internal static void Dump(Device dev, string devicePath, + IWritableImage outputPlugin, ushort retryPasses, + bool force, bool dumpRaw, + bool persistent, bool stopOnError, ref MediaType dskType, + ref Resume resume, ref DumpLog dumpLog, + bool dumpLeadIn, Encoding encoding, + string outputPrefix, string outputPath, + Dictionary formatOptions, + CICMMetadataType preSidecar, uint skip, + bool nometadata, bool notrim) { bool sense; ulong blocks; @@ -200,12 +200,12 @@ namespace DiscImageChef.Core.Devices.Dumping { CompactDisc.Dump(dev, devicePath, outputPlugin, retryPasses, force, dumpRaw, persistent, stopOnError, ref dskType, ref resume, ref dumpLog, dumpLeadIn, encoding, outputPrefix, outputPath, - formatOptions, preSidecar, skip, nometadata); + formatOptions, preSidecar, skip, nometadata, notrim); return; } Reader scsiReader = new Reader(dev, dev.Timeout, null, dumpRaw); - blocks = scsiReader.GetDeviceBlocks(); + blocks = scsiReader.GetDeviceBlocks(); dumpLog.WriteLine("Device reports disc has {0} blocks", blocks); Dictionary mediaTags = new Dictionary(); @@ -219,7 +219,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) { PFI.PhysicalFormatInformation? nintendoPfi = PFI.Decode(cmdBuf); - if(nintendoPfi != null) + if(nintendoPfi != null) if(nintendoPfi.Value.DiskCategory == DiskCategory.Nintendo && nintendoPfi.Value.PartVersion == 15) { @@ -253,7 +253,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) if(PFI.Decode(cmdBuf).HasValue) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVD_PFI, tmpBuf); @@ -352,7 +352,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(cmdBuf.Length == 2052) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVD_DMI, tmpBuf); } @@ -374,7 +374,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) if(CSS_CPRM.DecodeLeadInCopyright(cmdBuf).HasValue) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVD_CMI, tmpBuf); } @@ -392,7 +392,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.BurstCuttingArea, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVD_BCA, tmpBuf); } @@ -409,7 +409,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) if(DDS.Decode(cmdBuf).HasValue) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVDRAM_DDS, tmpBuf); } @@ -421,7 +421,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) if(Spare.Decode(cmdBuf).HasValue) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVDRAM_SpareArea, tmpBuf); } @@ -437,7 +437,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.PreRecordedInfo, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVDR_PreRecordedInfo, tmpBuf); } @@ -457,7 +457,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.DvdrMediaIdentifier, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVDR_MediaIdentifier, tmpBuf); } @@ -468,7 +468,7 @@ namespace DiscImageChef.Core.Devices.Dumping out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVDR_PFI, tmpBuf); } @@ -486,7 +486,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.Adip, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DVD_ADIP, tmpBuf); } @@ -496,7 +496,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.Dcb, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.DCB, tmpBuf); } @@ -512,7 +512,7 @@ namespace DiscImageChef.Core.Devices.Dumping out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.HDDVD_CPI, tmpBuf); } @@ -532,7 +532,7 @@ namespace DiscImageChef.Core.Devices.Dumping if(!sense) if(DI.Decode(cmdBuf).HasValue) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.BD_DI, tmpBuf); } @@ -561,7 +561,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.BdBurstCuttingArea, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.BD_BCA, tmpBuf); } @@ -579,7 +579,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.BdDds, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.BD_DDS, tmpBuf); } @@ -589,7 +589,7 @@ namespace DiscImageChef.Core.Devices.Dumping MmcDiscStructureFormat.BdSpareAreaInformation, 0, dev.Timeout, out _); if(!sense) { - tmpBuf = new byte[cmdBuf.Length - 4]; + tmpBuf = new byte[cmdBuf.Length - 4]; Array.Copy(cmdBuf, 4, tmpBuf, 0, cmdBuf.Length - 4); mediaTags.Add(MediaTagType.BD_SpareArea, tmpBuf); } @@ -602,13 +602,13 @@ namespace DiscImageChef.Core.Devices.Dumping { Xgd.Dump(dev, devicePath, outputPlugin, retryPasses, force, dumpRaw, persistent, stopOnError, mediaTags, ref dskType, ref resume, ref dumpLog, encoding, outputPrefix, outputPath, formatOptions, - preSidecar, skip, nometadata); + preSidecar, skip, nometadata, notrim); return; } Sbc.Dump(dev, devicePath, outputPlugin, retryPasses, force, dumpRaw, persistent, stopOnError, mediaTags, ref dskType, true, ref resume, ref dumpLog, encoding, outputPrefix, outputPath, formatOptions, - preSidecar, skip, nometadata); + preSidecar, skip, nometadata, notrim); } internal static void AddMediaTagToSidecar(string outputPath, @@ -729,8 +729,8 @@ namespace DiscImageChef.Core.Devices.Dumping { new XboxSecuritySectorsType { - RequestNumber = 0, - RequestVersion = 1, + RequestNumber = 0, + RequestVersion = 1, SecuritySectors = new DumpType { Image = outputPath, diff --git a/DiscImageChef.Core/Devices/Dumping/NVMe.cs b/DiscImageChef.Core/Devices/Dumping/NVMe.cs index 52dac8b3..da2e07b7 100644 --- a/DiscImageChef.Core/Devices/Dumping/NVMe.cs +++ b/DiscImageChef.Core/Devices/Dumping/NVMe.cs @@ -50,7 +50,7 @@ namespace DiscImageChef.Core.Devices.Dumping DumpLog dumpLog, Encoding encoding, string outputPrefix, string outputPath, Dictionary - formatOptions, CICMMetadataType preSidecar, uint skip, bool nometadata) + formatOptions, CICMMetadataType preSidecar, uint skip, bool nometadata, bool notrim) { throw new NotImplementedException("NVMe devices not yet supported."); } diff --git a/DiscImageChef.Core/Devices/Dumping/SBC.cs b/DiscImageChef.Core/Devices/Dumping/SBC.cs index 77286360..bc656d56 100644 --- a/DiscImageChef.Core/Devices/Dumping/SBC.cs +++ b/DiscImageChef.Core/Devices/Dumping/SBC.cs @@ -87,7 +87,7 @@ namespace DiscImageChef.Core.Devices.Dumping Encoding encoding, string outputPrefix, string outputPath, Dictionary formatOptions, CICMMetadataType preSidecar, uint skip, - bool nometadata) + bool nometadata, bool notrim) { bool sense; ulong blocks; @@ -347,6 +347,7 @@ namespace DiscImageChef.Core.Devices.Dumping throw new InvalidOperationException("Could not process resume file, not continuing..."); if(resume.NextBlock > 0) dumpLog.WriteLine("Resuming from block {0}.", resume.NextBlock); + bool newTrim = false; for(ulong i = resume.NextBlock; i < blocks; i += blocksToRead) { @@ -396,7 +397,8 @@ namespace DiscImageChef.Core.Devices.Dumping ibgLog.Write(i, 0); dumpLog.WriteLine("Skipping {0} blocks from errored block {1}.", skip, i); - i += skip - blocksToRead; + i += skip - blocksToRead; + newTrim = true; } double newSpeed = @@ -417,6 +419,39 @@ namespace DiscImageChef.Core.Devices.Dumping dumpLog.WriteLine("Average write speed {0:F3} KiB/sec.", (double)blockSize * (double)(blocks + 1) / 1024 / imageWriteDuration); + #region Trimming + if(resume.BadBlocks.Count > 0 && !aborted && !notrim && newTrim) + { + start = DateTime.UtcNow; + dumpLog.WriteLine("Trimming bad sectors"); + + ulong[] tmpArray = resume.BadBlocks.ToArray(); + foreach(ulong badSector in tmpArray) + { + if(aborted) + { + currentTry.Extents = ExtentsConverter.ToMetadata(extents); + dumpLog.WriteLine("Aborted!"); + break; + } + + DicConsole.Write("\rTrimming sector {0}", badSector); + + sense = scsiReader.ReadBlock(out readBuffer, badSector, out double cmdDuration); + + if(sense || dev.Error) continue; + + resume.BadBlocks.Remove(badSector); + extents.Add(badSector); + outputPlugin.WriteSector(readBuffer, badSector); + } + + DicConsole.WriteLine(); + end = DateTime.UtcNow; + dumpLog.WriteLine("Trimmming finished in {0} seconds.", (end - start).TotalSeconds); + } + #endregion Trimming + #region Error handling if(resume.BadBlocks.Count > 0 && !aborted && retryPasses > 0) { diff --git a/DiscImageChef.Core/Devices/Dumping/SCSI.cs b/DiscImageChef.Core/Devices/Dumping/SCSI.cs index 347d5816..c52be83d 100644 --- a/DiscImageChef.Core/Devices/Dumping/SCSI.cs +++ b/DiscImageChef.Core/Devices/Dumping/SCSI.cs @@ -70,16 +70,15 @@ namespace DiscImageChef.Core.Devices.Dumping /// Path to output file /// Formats to pass to output file plugin /// If you asked to dump long sectors from a SCSI Streaming device - public static void Dump(Device dev, string devicePath, IWritableImage outputPlugin, ushort retryPasses, - bool force, bool dumpRaw, bool persistent, bool stopOnError, - ref Resume resume, - ref - DumpLog dumpLog, bool dumpLeadIn, Encoding encoding, - string outputPrefix, - string - outputPath, Dictionary formatOptions, - CICMMetadataType - preSidecar, uint skip, bool nometadata) + public static void Dump(Device dev, string devicePath, + IWritableImage outputPlugin, ushort retryPasses, + bool force, bool dumpRaw, + bool persistent, bool stopOnError, ref Resume resume, + ref DumpLog dumpLog, bool dumpLeadIn, + Encoding encoding, string outputPrefix, + string outputPath, Dictionary formatOptions, + CICMMetadataType preSidecar, uint skip, bool nometadata, + bool notrim) { MediaType dskType = MediaType.Unknown; int resets = 0; @@ -212,12 +211,12 @@ namespace DiscImageChef.Core.Devices.Dumping case PeripheralDeviceTypes.MultiMediaDevice: Mmc.Dump(dev, devicePath, outputPlugin, retryPasses, force, dumpRaw, persistent, stopOnError, ref dskType, ref resume, ref dumpLog, dumpLeadIn, encoding, outputPrefix, outputPath, - formatOptions, preSidecar, skip, nometadata); + formatOptions, preSidecar, skip, nometadata, notrim); return; default: Sbc.Dump(dev, devicePath, outputPlugin, retryPasses, force, dumpRaw, persistent, stopOnError, null, ref dskType, false, ref resume, ref dumpLog, encoding, outputPrefix, outputPath, - formatOptions, preSidecar, skip, nometadata); + formatOptions, preSidecar, skip, nometadata, notrim); break; } } diff --git a/DiscImageChef.Core/Devices/Dumping/SecureDigital.cs b/DiscImageChef.Core/Devices/Dumping/SecureDigital.cs index 45710f18..d8f5559d 100644 --- a/DiscImageChef.Core/Devices/Dumping/SecureDigital.cs +++ b/DiscImageChef.Core/Devices/Dumping/SecureDigital.cs @@ -80,7 +80,7 @@ namespace DiscImageChef.Core.Devices.Dumping string outputPrefix, string outputPath, Dictionary formatOptions, CICMMetadataType preSidecar, uint skip, - bool nometadata) + bool nometadata, bool notrim) { bool aborted; @@ -280,6 +280,7 @@ namespace DiscImageChef.Core.Devices.Dumping start = DateTime.UtcNow; double imageWriteDuration = 0; + bool newTrim = false; for(ulong i = resume.NextBlock; i < blocks; i += blocksToRead) { @@ -324,7 +325,8 @@ namespace DiscImageChef.Core.Devices.Dumping outputPlugin.WriteSectors(new byte[blockSize * skip], i, skip); imageWriteDuration += (DateTime.Now - writeStart).TotalSeconds; dumpLog.WriteLine("Skipping {0} blocks from errored block {1}.", skip, i); - i += skip - blocksToRead; + i += skip - blocksToRead; + newTrim = true; } double newSpeed = @@ -345,6 +347,42 @@ namespace DiscImageChef.Core.Devices.Dumping dumpLog.WriteLine("Average write speed {0:F3} KiB/sec.", (double)blockSize * (double)(blocks + 1) / 1024 / imageWriteDuration); + #region Trimming + if(resume.BadBlocks.Count > 0 && !aborted && !notrim && newTrim) + { + start = DateTime.UtcNow; + dumpLog.WriteLine("Trimming bad sectors"); + + ulong[] tmpArray = resume.BadBlocks.ToArray(); + foreach(ulong badSector in tmpArray) + { + if(aborted) + { + currentTry.Extents = ExtentsConverter.ToMetadata(extents); + dumpLog.WriteLine("Aborted!"); + break; + } + + DicConsole.Write("\rTrimming sector {0}", badSector); + + error = dev.Read(out cmdBuf, out _, (uint)badSector, blockSize, 1, byteAddressed, TIMEOUT, + out duration); + + totalDuration += duration; + + if(error) continue; + + resume.BadBlocks.Remove(badSector); + extents.Add(badSector); + outputPlugin.WriteSector(cmdBuf, badSector); + } + + DicConsole.WriteLine(); + end = DateTime.UtcNow; + dumpLog.WriteLine("Trimmming finished in {0} seconds.", (end - start).TotalSeconds); + } + #endregion Trimming + #region Error handling if(resume.BadBlocks.Count > 0 && !aborted && retryPasses > 0) { diff --git a/DiscImageChef.Core/Devices/Dumping/XGD.cs b/DiscImageChef.Core/Devices/Dumping/XGD.cs index f7f8ed99..e59ae7e5 100644 --- a/DiscImageChef.Core/Devices/Dumping/XGD.cs +++ b/DiscImageChef.Core/Devices/Dumping/XGD.cs @@ -90,7 +90,7 @@ namespace DiscImageChef.Core.Devices.Dumping string outputPrefix, string outputPath, Dictionary formatOptions, CICMMetadataType preSidecar, uint skip, - bool nometadata) + bool nometadata, bool notrim) { bool sense; ulong blocks; @@ -377,6 +377,7 @@ namespace DiscImageChef.Core.Devices.Dumping ulong currentSector = resume.NextBlock; if(resume.NextBlock > 0) dumpLog.WriteLine("Resuming from block {0}.", resume.NextBlock); + bool newTrim = false; dumpLog.WriteLine("Reading game partition."); for(int e = 0; e <= 16; e++) @@ -473,6 +474,7 @@ namespace DiscImageChef.Core.Devices.Dumping StringSplitOptions .RemoveEmptyEntries); foreach(string senseLine in senseLines) dumpLog.WriteLine(senseLine); + newTrim = true; } double newSpeed = @@ -649,6 +651,41 @@ namespace DiscImageChef.Core.Devices.Dumping dumpLog.WriteLine("Average write speed {0:F3} KiB/sec.", (double)BLOCK_SIZE * (double)(blocks + 1) / 1024 / imageWriteDuration); + #region Trimming + if(resume.BadBlocks.Count > 0 && !aborted && !notrim && newTrim) + { + start = DateTime.UtcNow; + dumpLog.WriteLine("Trimming bad sectors"); + + ulong[] tmpArray = resume.BadBlocks.ToArray(); + foreach(ulong badSector in tmpArray) + { + if(aborted) + { + currentTry.Extents = ExtentsConverter.ToMetadata(extents); + dumpLog.WriteLine("Aborted!"); + break; + } + + DicConsole.Write("\rTrimming sector {0}", badSector); + + sense = dev.Read12(out readBuffer, out senseBuf, 0, false, false, false, false, (uint)badSector, + BLOCK_SIZE, 0, 1, false, dev.Timeout, out cmdDuration); + totalDuration += cmdDuration; + + if(sense || dev.Error) continue; + + resume.BadBlocks.Remove(badSector); + extents.Add(badSector); + outputPlugin.WriteSector(readBuffer, badSector); + } + + DicConsole.WriteLine(); + end = DateTime.UtcNow; + dumpLog.WriteLine("Trimmming finished in {0} seconds.", (end - start).TotalSeconds); + } + #endregion Trimming + #region Error handling if(resume.BadBlocks.Count > 0 && !aborted && retryPasses > 0) { diff --git a/DiscImageChef.Server/docs/Changelog.md b/DiscImageChef.Server/docs/Changelog.md deleted file mode 120000 index 639240cb..00000000 --- a/DiscImageChef.Server/docs/Changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../Changelog.md \ No newline at end of file diff --git a/DiscImageChef.Server/docs/DONATING.md b/DiscImageChef.Server/docs/DONATING.md deleted file mode 120000 index 3e419693..00000000 --- a/DiscImageChef.Server/docs/DONATING.md +++ /dev/null @@ -1 +0,0 @@ -../../DONATING.md \ No newline at end of file diff --git a/DiscImageChef.Server/docs/README.md b/DiscImageChef.Server/docs/README.md deleted file mode 120000 index fe840054..00000000 --- a/DiscImageChef.Server/docs/README.md +++ /dev/null @@ -1 +0,0 @@ -../../README.md \ No newline at end of file diff --git a/DiscImageChef.Server/docs/TODO.md b/DiscImageChef.Server/docs/TODO.md deleted file mode 120000 index 25307576..00000000 --- a/DiscImageChef.Server/docs/TODO.md +++ /dev/null @@ -1 +0,0 @@ -../../TODO.md \ No newline at end of file diff --git a/DiscImageChef/Commands/DumpMedia.cs b/DiscImageChef/Commands/DumpMedia.cs index 6dcc3b6c..5def5fa8 100644 --- a/DiscImageChef/Commands/DumpMedia.cs +++ b/DiscImageChef/Commands/DumpMedia.cs @@ -121,7 +121,7 @@ namespace DiscImageChef.Commands try { StreamReader sr = new StreamReader(outputPrefix + ".resume.xml"); - resume = (Resume)xs.Deserialize(sr); + resume = (Resume)xs.Deserialize(sr); sr.Close(); } catch @@ -143,7 +143,7 @@ namespace DiscImageChef.Commands try { StreamReader sr = new StreamReader(options.CicmXml); - sidecar = (CICMMetadataType)sidecarXs.Deserialize(sr); + sidecar = (CICMMetadataType)sidecarXs.Deserialize(sr); sr.Close(); } catch @@ -207,26 +207,28 @@ namespace DiscImageChef.Commands case DeviceType.ATA: Ata.Dump(dev, options.DevicePath, outputFormat, options.RetryPasses, options.Force, options.Raw, options.Persistent, options.StopOnError, ref resume, ref dumpLog, encoding, outputPrefix, - options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, options.NoMetadata); + options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, options.NoMetadata, + options.NoTrim); break; case DeviceType.MMC: case DeviceType.SecureDigital: SecureDigital.Dump(dev, options.DevicePath, outputFormat, options.RetryPasses, options.Force, options.Raw, options.Persistent, options.StopOnError, ref resume, ref dumpLog, - encoding, outputPrefix, options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, - options.NoMetadata); + encoding, outputPrefix, options.OutputFile, parsedOptions, sidecar, + (uint)options.Skip, options.NoMetadata, options.NoTrim); break; case DeviceType.NVMe: NvMe.Dump(dev, options.DevicePath, outputFormat, options.RetryPasses, options.Force, options.Raw, options.Persistent, options.StopOnError, ref resume, ref dumpLog, encoding, outputPrefix, - options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, options.NoMetadata); + options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, options.NoMetadata, + options.NoTrim); break; case DeviceType.ATAPI: case DeviceType.SCSI: Scsi.Dump(dev, options.DevicePath, outputFormat, options.RetryPasses, options.Force, options.Raw, options.Persistent, options.StopOnError, ref resume, ref dumpLog, options.LeadIn, encoding, outputPrefix, options.OutputFile, parsedOptions, sidecar, (uint)options.Skip, - options.NoMetadata); + options.NoMetadata, options.NoTrim); break; default: dumpLog.WriteLine("Unknown device type."); @@ -242,7 +244,7 @@ namespace DiscImageChef.Commands if(File.Exists(outputPrefix + ".resume.xml")) File.Delete(outputPrefix + ".resume.xml"); FileStream fs = new FileStream(outputPrefix + ".resume.xml", FileMode.Create, FileAccess.ReadWrite); - xs = new XmlSerializer(resume.GetType()); + xs = new XmlSerializer(resume.GetType()); xs.Serialize(fs, resume); fs.Close(); } diff --git a/DiscImageChef/Options.cs b/DiscImageChef/Options.cs index 8cab66d0..38faebf3 100644 --- a/DiscImageChef/Options.cs +++ b/DiscImageChef/Options.cs @@ -93,12 +93,10 @@ namespace DiscImageChef [Option("crc64", Default = false, HelpText = "Calculates CRC64 (ECMA).")] public bool DoCrc64 { get; set; } - [Option("fletcher16", Default = false, - HelpText = "Calculates Fletcher-16.")] + [Option("fletcher16", Default = false, HelpText = "Calculates Fletcher-16.")] public bool DoFletcher16 { get; set; } - [Option("fletcher32", Default = false, - HelpText = "Calculates Fletcher-32.")] + [Option("fletcher32", Default = false, HelpText = "Calculates Fletcher-32.")] public bool DoFletcher32 { get; set; } [Option('m', "md5", Default = true, HelpText = "Calculates MD5.")] @@ -130,7 +128,7 @@ namespace DiscImageChef public class EntropyOptions : CommonOptions { [Option('p', "duplicated-sectors", Default = true, - HelpText = + HelpText = "Calculates how many sectors are duplicated (have same exact data in user area).")] public bool DuplicatedSectors { get; set; } @@ -251,11 +249,11 @@ namespace DiscImageChef [Option('i', "input", Required = true, HelpText = "Disc image.")] public string InputFile { get; set; } [Option('t', "tape", Required = false, Default = false, - HelpText = + HelpText = "When used indicates that input is a folder containing alphabetically sorted files extracted from a linear block-based tape with fixed block size (e.g. a SCSI tape device).")] public bool Tape { get; set; } [Option('b', "block-size", Required = false, Default = 512, - HelpText = + HelpText = "Only used for tapes, indicates block size. Files in the folder whose size is not a multiple of this value will simply be ignored.")] public int BlockSize { get; set; } @@ -298,7 +296,7 @@ namespace DiscImageChef public string OutputFile { get; set; } [Option('t', "format", Default = null, - HelpText = + HelpText = "Format of the output image, as plugin name or plugin id. If not present, will try to detect it from output image extension.")] public string OutputFormat { get; set; } @@ -314,6 +312,9 @@ namespace DiscImageChef [Option("no-metadata", Default = false, HelpText = "Disables creating CICM XML sidecar.")] public bool NoMetadata { get; set; } + + [Option("no-trim", Default = false, HelpText = "Disables trimming errored from skipped sectors.")] + public bool NoTrim { get; set; } } [Verb("device-report", HelpText = "Tests the device capabilities and creates an XML report of them.")] @@ -386,7 +387,7 @@ namespace DiscImageChef public string OutputFile { get; set; } [Option('p', "format", Default = null, - HelpText = + HelpText = "Format of the output image, as plugin name or plugin id. If not present, will try to detect it from output image extension.")] public string OutputFormat { get; set; } @@ -394,7 +395,7 @@ namespace DiscImageChef public int Count { get; set; } [Option('f', "force", Default = false, - HelpText = + HelpText = "Continue conversion even if sector or media tags will be lost in the process.")] public bool Force { get; set; } @@ -417,11 +418,11 @@ namespace DiscImageChef [Option("media-sequence", Default = 0, HelpText = "Number in sequence for the media represented by the image")] public int MediaSequence { get; set; } [Option("media-lastsequence", Default = 0, - HelpText = + HelpText = "Last media of the sequence the media represented by the image corresponds to")] public int LastMediaSequence { get; set; } [Option("drive-manufacturer", Default = null, - HelpText = + HelpText = "Manufacturer of the drive used to read the media represented by the image")] public string DriveManufacturer { get; set; } [Option("drive-model", Default = null, @@ -431,7 +432,7 @@ namespace DiscImageChef HelpText = "Serial number of the drive used to read the media represented by the image")] public string DriveSerialNumber { get; set; } [Option("drive-revision", Default = null, - HelpText = + HelpText = "Firmware revision of the drive used to read the media represented by the image")] public string DriveFirmwareRevision { get; set; }