Merge pull request #926 from aaru-dps/fakeshemp/merge-flux

Add flux merging
This commit is contained in:
2026-04-04 20:00:06 +01:00
committed by GitHub
12 changed files with 440 additions and 102 deletions

View File

@@ -9,6 +9,24 @@ namespace Aaru.Core.Image;
public sealed partial class Merger
{
/// <summary>
/// Single extent covering sectors <c>0 .. sectorCount - 1</c>, or an empty list when
/// <paramref name="sectorCount" /> is zero (avoids <c>0 - 1</c> unsigned underflow in callers).
/// </summary>
static List<Extent> CreateDefaultDumpExtents(ulong sectorCount)
{
if(sectorCount == 0) return new List<Extent>();
return new List<Extent>
{
new Extent
{
Start = 0,
End = sectorCount - 1
}
};
}
List<ulong> CalculateSectorsToCopy(IMediaImage primaryImage, IMediaImage secondaryImage, Resume primaryResume,
Resume secondaryResume, List<ulong> overrideSectorsList)
{
@@ -16,14 +34,7 @@ public sealed partial class Merger
[
new DumpHardware
{
Extents =
[
new Extent
{
Start = 0,
End = primaryImage.Info.Sectors - 1
}
]
Extents = CreateDefaultDumpExtents(primaryImage.Info.Sectors)
}
];
@@ -32,14 +43,7 @@ public sealed partial class Merger
[
new DumpHardware
{
Extents =
[
new Extent
{
Start = 0,
End = secondaryImage.Info.Sectors - 1
}
]
Extents = CreateDefaultDumpExtents(secondaryImage.Info.Sectors)
}
];
@@ -93,14 +97,7 @@ public sealed partial class Merger
[
new DumpHardware
{
Extents =
[
new Extent
{
Start = 0,
End = primaryImage.Info.Sectors - 1
}
]
Extents = CreateDefaultDumpExtents(primaryImage.Info.Sectors)
}
];
@@ -109,14 +106,7 @@ public sealed partial class Merger
[
new DumpHardware
{
Extents =
[
new Extent
{
Start = 0,
End = secondaryImage.Info.Sectors - 1
}
]
Extents = CreateDefaultDumpExtents(secondaryImage.Info.Sectors)
}
];

View File

@@ -1,52 +1,140 @@
using System.Collections.Generic;
using System.Linq;
using Aaru.CommonTypes.Enums;
using Aaru.CommonTypes.Interfaces;
using Aaru.CommonTypes.Structs;
namespace Aaru.Core.Image;
public sealed partial class Merger
{
// TODO: Should we return error any time?
// TODO: Add progress reporting
ErrorNumber CopyFlux(IFluxImage inputFlux, IWritableFluxImage outputFlux)
ErrorNumber MergeFlux(IFluxImage primaryFlux, IFluxImage secondaryFlux, IWritableFluxImage outputFlux)
{
for(ushort track = 0; track < inputFlux.Info.Cylinders; track++)
ErrorNumber error = primaryFlux.GetAllFluxCaptures(out List<FluxCapture> primaryCaptures);
if(error != ErrorNumber.NoError) return error;
if(primaryCaptures is null) primaryCaptures = new List<FluxCapture>();
List<FluxCapture> secondaryCaptures = new List<FluxCapture>();
if(secondaryFlux != null)
{
for(uint head = 0; head < inputFlux.Info.Heads; head++)
error = secondaryFlux.GetAllFluxCaptures(out secondaryCaptures);
if(error != ErrorNumber.NoError) return error;
if(secondaryCaptures is null) secondaryCaptures = new List<FluxCapture>();
}
Dictionary<(uint Head, ushort Track, byte SubTrack), List<FluxCapture>> primaryByLocation =
GroupFluxCapturesByLocation(primaryCaptures);
Dictionary<(uint Head, ushort Track, byte SubTrack), List<FluxCapture>> secondaryByLocation =
GroupFluxCapturesByLocation(secondaryCaptures);
HashSet<(uint Head, ushort Track, byte SubTrack)> allKeys = new HashSet<(uint Head, ushort Track, byte SubTrack)>();
foreach((uint Head, ushort Track, byte SubTrack) key in primaryByLocation.Keys) allKeys.Add(key);
foreach((uint Head, ushort Track, byte SubTrack) key in secondaryByLocation.Keys) allKeys.Add(key);
List<(uint Head, ushort Track, byte SubTrack)> sortedKeys =
allKeys.OrderBy(static k => k.Track).ThenBy(static k => k.Head).ThenBy(static k => k.SubTrack).ToList();
foreach((uint Head, ushort Track, byte SubTrack) key in sortedKeys)
{
List<FluxCapture> primaryGroup =
primaryByLocation.TryGetValue(key, out List<FluxCapture> pg) ? pg : new List<FluxCapture>();
List<FluxCapture> secondaryGroup =
secondaryByLocation.TryGetValue(key, out List<FluxCapture> sg) ? sg : new List<FluxCapture>();
primaryGroup.Sort((FluxCapture a, FluxCapture b) => a.CaptureIndex.CompareTo(b.CaptureIndex));
secondaryGroup.Sort((FluxCapture a, FluxCapture b) => a.CaptureIndex.CompareTo(b.CaptureIndex));
uint outputIndex = 0;
foreach(FluxCapture capture in primaryGroup)
{
ErrorNumber error = inputFlux.SubTrackLength(head, track, out byte subTrackLen);
error = primaryFlux.ReadFluxCapture(capture.Head,
capture.Track,
capture.SubTrack,
capture.CaptureIndex,
out ulong indexResolution,
out ulong dataResolution,
out byte[] indexBuffer,
out byte[] dataBuffer);
if(error != ErrorNumber.NoError) continue;
if(error != ErrorNumber.NoError) return error;
for(byte subTrackIndex = 0; subTrackIndex < subTrackLen; subTrackIndex++)
error = outputFlux.WriteFluxCapture(indexResolution,
dataResolution,
indexBuffer,
dataBuffer,
capture.Head,
capture.Track,
capture.SubTrack,
outputIndex);
if(error != ErrorNumber.NoError) return error;
outputIndex++;
}
if(secondaryFlux != null)
{
foreach(FluxCapture capture in secondaryGroup)
{
error = inputFlux.CapturesLength(head, track, subTrackIndex, out uint capturesLen);
error = secondaryFlux.ReadFluxCapture(capture.Head,
capture.Track,
capture.SubTrack,
capture.CaptureIndex,
out ulong indexResolution,
out ulong dataResolution,
out byte[] indexBuffer,
out byte[] dataBuffer);
if(error != ErrorNumber.NoError) continue;
if(error != ErrorNumber.NoError) return error;
for(uint captureIndex = 0; captureIndex < capturesLen; captureIndex++)
{
inputFlux.ReadFluxCapture(head,
track,
subTrackIndex,
captureIndex,
out ulong indexResolution,
out ulong dataResolution,
out byte[] indexBuffer,
out byte[] dataBuffer);
error = outputFlux.WriteFluxCapture(indexResolution,
dataResolution,
indexBuffer,
dataBuffer,
capture.Head,
capture.Track,
capture.SubTrack,
outputIndex);
outputFlux.WriteFluxCapture(indexResolution,
dataResolution,
indexBuffer,
dataBuffer,
head,
track,
subTrackIndex,
captureIndex);
}
if(error != ErrorNumber.NoError) return error;
outputIndex++;
}
}
}
return ErrorNumber.NoError;
}
}
static Dictionary<(uint Head, ushort Track, byte SubTrack), List<FluxCapture>> GroupFluxCapturesByLocation(
List<FluxCapture> captures)
{
Dictionary<(uint Head, ushort Track, byte SubTrack), List<FluxCapture>> result =
new Dictionary<(uint Head, ushort Track, byte SubTrack), List<FluxCapture>>();
foreach(FluxCapture capture in captures)
{
(uint Head, ushort Track, byte SubTrack) key = (capture.Head, capture.Track, capture.SubTrack);
if(!result.TryGetValue(key, out List<FluxCapture> list))
{
list = new List<FluxCapture>();
result[key] = list;
}
list.Add(capture);
}
return result;
}
}

View File

@@ -220,12 +220,15 @@ public sealed partial class Merger
InitProgress?.Invoke();
PulseProgress?.Invoke(UI.Calculating_sectors_to_merge);
List<ulong> sectorsToCopyFromSecondImage =
CalculateSectorsToCopy(primaryImage, secondaryImage, primaryResume, secondaryResume, overrideSectorsList);
List<ulong> sectorsToCopyFromSecondImage =
CalculateSectorsToCopy(primaryImage, secondaryImage, primaryResume, secondaryResume, overrideSectorsList);
EndProgress?.Invoke();
if(sectorsToCopyFromSecondImage.Count == 0)
// Flux images might contain no decoded data, which results in a sector count of 0. We allow this if the image contains flux.
var containsFlux = primaryImage is IFluxImage || secondaryImage is IFluxImage;
if(sectorsToCopyFromSecondImage.Count == 0 && !containsFlux)
{
StoppingErrorMessage
?.Invoke(UI.No_sectors_to_merge__output_image_will_be_identical_to_primary_image_not_continuing);
@@ -344,10 +347,15 @@ public sealed partial class Merger
if(errno != ErrorNumber.NoError) return errno;
if(primaryImage is IFluxImage inputFlux && outputFormat is IWritableFluxImage outputFlux)
if(primaryImage is IFluxImage primaryFlux && outputFormat is IWritableFluxImage outputFlux)
{
UpdateStatus?.Invoke(UI.Flux_data_will_be_copied_as_is_from_primary_image);
errno = CopyFlux(inputFlux, outputFlux);
IFluxImage secondaryFlux = secondaryImage as IFluxImage;
UpdateStatus?.Invoke(secondaryFlux != null
? UI.Flux_merge_primary_then_secondary_appended
: UI.Flux_merge_primary_flux_only_secondary_not_flux_image);
errno = MergeFlux(primaryFlux, secondaryFlux, outputFlux);
if(errno != ErrorNumber.NoError) return errno;
}

View File

@@ -59,7 +59,6 @@ public sealed partial class A2R : IWritableFluxImage
// Offset from the start of the current RWCP to the next capture
uint _currentCaptureOffset = 16;
uint _currentResolution;
bool _firstCaptureProcessed;
// 53 = A2R header, INFO header, INFO data
long _currentRwcpStart = 53;

View File

@@ -35,6 +35,7 @@ using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using Aaru.CommonTypes;
using Aaru.CommonTypes.Enums;
namespace Aaru.Images;
@@ -139,13 +140,133 @@ public sealed partial class A2R
{
tick += b;
data.Add(tick);
tick = 0;
}
}
return data;
}
/// <summary>
/// Converts cumulative index signal times from <paramref name="indexResolution" /> tick units to
/// <paramref name="dataResolution" /> tick units. A2R RWCP stores one picoseconds-per-tick for both index and flux
/// (the data resolution); index samples from sources with a different index clock must be rescaled.
/// </summary>
/// <param name="cumulativeIndexTicks">Cumulative absolute times in index-tick units (same encoding as <see cref="FluxRepresentationsToUInt32List" />).</param>
/// <param name="indexResolution">Picoseconds per tick for the index stream.</param>
/// <param name="dataResolution">Picoseconds per tick for the RWCP chunk (flux stream).</param>
/// <param name="cumulativeDataTicks">Cumulative absolute times in data-tick units.</param>
/// <returns><see cref="ErrorNumber.NoError" /> or <see cref="ErrorNumber.InvalidArgument" /> if scaling is impossible.</returns>
static ErrorNumber ScaleCumulativeIndexTicksToDataResolution(List<uint> cumulativeIndexTicks,
ulong indexResolution, ulong dataResolution,
out List<uint> cumulativeDataTicks)
{
cumulativeDataTicks = new List<uint>();
if(cumulativeIndexTicks is null || cumulativeIndexTicks.Count == 0) return ErrorNumber.NoError;
if(dataResolution == 0) return ErrorNumber.InvalidArgument;
if(indexResolution == dataResolution)
{
cumulativeDataTicks.AddRange(cumulativeIndexTicks);
return ErrorNumber.NoError;
}
IEnumerable<ulong> scaledTicks = cumulativeIndexTicks.Select(c => ((ulong)c * indexResolution) / dataResolution);
if(scaledTicks.Any(scaled => scaled > uint.MaxValue))
return ErrorNumber.InvalidArgument;
cumulativeDataTicks = scaledTicks.Select(scaled => (uint)scaled).ToList();
return ErrorNumber.NoError;
}
/// <summary>
/// Decodes the internal flux data buffer into per-interval tick lengths (data resolution ticks between transitions).
/// </summary>
static List<uint> FluxDataBufferToIntervals(byte[] data)
{
List<uint> intervals = new List<uint>();
if(data is null || data.Length == 0) return intervals;
List<uint> cumulative = FluxRepresentationsToUInt32List(data);
uint previous = 0;
foreach(uint c in cumulative)
{
intervals.Add(c - previous);
previous = c;
}
return intervals;
}
/// <summary>
/// Drops <paramref name="trimTicks" /> data ticks from the start of the flux stream and re-encodes the remainder.
/// </summary>
static ErrorNumber TrimFluxDataLeadingTicks(byte[] data, uint trimTicks, out byte[] trimmed)
{
trimmed = Array.Empty<byte>();
if(trimTicks == 0)
{
if(data is null || data.Length == 0)
return ErrorNumber.NoError;
trimmed = new byte[data.Length];
Array.Copy(data, trimmed, data.Length);
return ErrorNumber.NoError;
}
if(data is null || data.Length == 0) return ErrorNumber.InvalidArgument;
List<uint> intervals = FluxDataBufferToIntervals(data);
ulong totalTicks = 0;
foreach(uint iv in intervals) totalTicks += iv;
if(trimTicks > totalTicks) return ErrorNumber.InvalidArgument;
ulong remaining = trimTicks;
List<uint> newIntervals = new List<uint>();
foreach(uint interval in intervals)
{
if(remaining == 0)
{
newIntervals.Add(interval);
continue;
}
if(remaining >= (ulong)interval)
{
remaining -= interval;
continue;
}
uint part = interval - (uint)remaining;
remaining = 0;
newIntervals.Add(part);
}
if(newIntervals.Count == 0) return ErrorNumber.InvalidArgument;
List<byte> encoded = new List<byte>();
foreach(uint iv in newIntervals) encoded.AddRange(UInt32ToFluxRepresentation(iv));
trimmed = encoded.ToArray();
return ErrorNumber.NoError;
}
/// <summary>
/// Determines if a flux capture is a "timing" capture (~1.25 revolutions) or "xtiming" capture (2.25+ revolutions).
/// Per A2R 3.x spec: timing captures are quick flux captures (~1.25 revolutions), xtiming captures are extended

View File

@@ -61,6 +61,100 @@ public sealed partial class A2R
return ErrorNumber.WriteError;
}
if(dataResolution == 0)
{
ErrorMessage = Localization.A2R_could_not_scale_index_signals_to_data_resolution;
return ErrorNumber.InvalidArgument;
}
List<uint> rawCumulative = FluxRepresentationsToUInt32List(indexBuffer ?? Array.Empty<byte>());
if(rawCumulative.Count == 0)
{
ErrorMessage = Localization.A2R_cannot_index_align_without_index_signals;
return ErrorNumber.InvalidArgument;
}
bool startsAtIndex = rawCumulative[0] == 0;
List<uint> a2RIndices;
byte[] alignedData;
byte synchronizedFlag;
if(startsAtIndex)
{
rawCumulative.RemoveAt(0);
ErrorNumber scaleError =
ScaleCumulativeIndexTicksToDataResolution(rawCumulative, indexResolution, dataResolution, out a2RIndices);
if(scaleError != ErrorNumber.NoError)
{
ErrorMessage = Localization.A2R_could_not_scale_index_signals_to_data_resolution;
return scaleError;
}
if(dataBuffer is null || dataBuffer.Length == 0)
alignedData = Array.Empty<byte>();
else
{
alignedData = new byte[dataBuffer.Length];
Array.Copy(dataBuffer, alignedData, dataBuffer.Length);
}
synchronizedFlag = 1;
}
else
{
AaruLogging.Debug(MODULE_NAME, "Capture does not start at index, scaling index signals to data resolution. Some data will be lost.");
ErrorNumber scaleError =
ScaleCumulativeIndexTicksToDataResolution(rawCumulative, indexResolution, dataResolution,
out List<uint> scaled);
if(scaleError != ErrorNumber.NoError)
{
ErrorMessage = Localization.A2R_could_not_scale_index_signals_to_data_resolution;
return scaleError;
}
uint t0 = scaled[0];
AaruLogging.Debug(MODULE_NAME, "Trimmed {0} ticks from start of data", t0);
ErrorNumber trimError = TrimFluxDataLeadingTicks(dataBuffer ?? Array.Empty<byte>(), t0, out alignedData);
if(trimError != ErrorNumber.NoError)
{
ErrorMessage = Localization.A2R_index_alignment_trim_removed_entire_capture;
return trimError;
}
a2RIndices = new List<uint>();
foreach(uint t in scaled)
{
if(t < t0)
{
ErrorMessage = Localization.A2R_index_alignment_trim_removed_entire_capture;
return ErrorNumber.InvalidArgument;
}
a2RIndices.Add(t - t0);
}
while(a2RIndices.Count > 0 && a2RIndices[0] == 0)
a2RIndices.RemoveAt(0);
synchronizedFlag = 1;
}
_infoChunkV3.synchronized = (byte)Math.Max(_infoChunkV3.synchronized, synchronizedFlag);
// Per A2R 3.x spec: An RWCP chunk can only have one capture resolution per chunk.
// If the resolution changes, we need to create a new RWCP chunk.
@@ -90,7 +184,7 @@ public sealed partial class A2R
// Per A2R 3.x spec: Capture type 1 = timing (~1.25 revolutions), 3 = xtiming (2.25+ revolutions)
// Type 2 = bits (legacy, deprecated)
_writingStream.WriteByte(IsCaptureTypeTiming(dataResolution, dataBuffer) ? (byte)1 : (byte)3);
_writingStream.WriteByte(IsCaptureTypeTiming(dataResolution, alignedData) ? (byte)1 : (byte)3);
// Per A2R 3.x spec: Location uses formula ((cylinder << 1) + side) for most drive types
// For quarter-step drives (SS 5.25 @ 0.25 step), location is in halfphases
@@ -101,34 +195,15 @@ public sealed partial class A2R
0,
2);
// Per A2R 3.x spec: Index signals are absolute timings from start of track
// If capture starts at index signal, that signal should not be included in the array
List<uint> a2RIndices = FluxRepresentationsToUInt32List(indexBuffer);
// Per A2R 3.x spec: synchronized indicates if cross-track sync/index was used during imaging
// If the first index signal is 0, the capture started at index, so synchronized should be 1
if(!_firstCaptureProcessed && a2RIndices.Count > 0)
{
_infoChunkV3.synchronized = a2RIndices[0] == 0 ? (byte)1 : (byte)0;
_firstCaptureProcessed = true;
}
if(a2RIndices.Count > 0 && a2RIndices[0] == 0) a2RIndices.RemoveAt(0);
_writingStream.WriteByte((byte)a2RIndices.Count);
long previousIndex = 0;
foreach(uint cumulativeTicks in a2RIndices)
_writingStream.Write(BitConverter.GetBytes(cumulativeTicks), 0, 4);
foreach(uint index in a2RIndices)
{
_writingStream.Write(BitConverter.GetBytes(index + previousIndex), 0, 4);
previousIndex += index;
}
_writingStream.Write(BitConverter.GetBytes(alignedData.Length), 0, 4);
_writingStream.Write(alignedData, 0, alignedData.Length);
_writingStream.Write(BitConverter.GetBytes(dataBuffer.Length), 0, 4);
_writingStream.Write(dataBuffer, 0, dataBuffer.Length);
_currentCaptureOffset += (uint)(9 + a2RIndices.Count * 4 + dataBuffer.Length);
_currentCaptureOffset += (uint)(9 + a2RIndices.Count * 4 + alignedData.Length);
return ErrorNumber.NoError;
}
@@ -159,7 +234,6 @@ public sealed partial class A2R
IsWriting = true;
ErrorMessage = null;
_firstCaptureProcessed = false;
// Per A2R 3.x spec: File header is 8 bytes
// Bytes 0-3: "A2R3" (0x33523241 little-endian) - version 3.x
@@ -254,7 +328,6 @@ public sealed partial class A2R
// Will be set based on first capture's index signals in WriteFluxCapture
// Default to 0 (will be updated when first capture is written)
_infoChunkV3.synchronized = 0;
_firstCaptureProcessed = false;
// Per A2R 3.x spec: hardSectorCount indicates number of hard sectors (0 = soft sectored)
// Default to 0 (soft sectored) as most floppies are soft sectored

View File

@@ -340,6 +340,8 @@ public sealed partial class HxCStream
AaruLogging.Debug(MODULE_NAME, "Decoded {0} flux pulses (expected {1})", pulses.Length, numberOfPulses);
if(pulses.Length != numberOfPulses) return ErrorNumber.InvalidArgument;
fluxPulses.AddRange(pulses);
packetOffset += Marshal.SizeOf<HxCStreamPackedStreamHeader>() + streamHeader.packedSize;
@@ -381,6 +383,12 @@ public sealed partial class HxCStream
{
IoStreamState previousState = DecodeIoStreamValue(ioStream[0]);
bool oldIndex = previousState.IndexSignal;
bool startsAtIndex = previousState.IndexSignal;
if(startsAtIndex)
{
indexPositions.Add(0);
}
uint totalTicks = 0;
int pulseIndex = 0;
@@ -557,7 +565,7 @@ public sealed partial class HxCStream
if(capture == null) return ErrorNumber.OutOfRange;
var tmpBuffer = new List<byte> { 0 };
var tmpBuffer = new List<byte>();
uint previousTicks = 0;
foreach(uint indexPos in capture.indexPositions)

View File

@@ -600,6 +600,24 @@ namespace Aaru.Images {
}
}
internal static string A2R_could_not_scale_index_signals_to_data_resolution {
get {
return ResourceManager.GetString("A2R_could_not_scale_index_signals_to_data_resolution", resourceCulture);
}
}
internal static string A2R_cannot_index_align_without_index_signals {
get {
return ResourceManager.GetString("A2R_cannot_index_align_without_index_signals", resourceCulture);
}
}
internal static string A2R_index_alignment_trim_removed_entire_capture {
get {
return ResourceManager.GetString("A2R_index_alignment_trim_removed_entire_capture", resourceCulture);
}
}
internal static string Tried_to_write_past_image_size {
get {
return ResourceManager.GetString("Tried_to_write_past_image_size", resourceCulture);

View File

@@ -302,6 +302,15 @@
</data>
<data name="Tried_to_write_on_a_non_writable_image" xml:space="preserve">
<value>Tried to write on a non-writable image</value>
</data>
<data name="A2R_could_not_scale_index_signals_to_data_resolution" xml:space="preserve">
<value>Could not scale index signal timings to the flux capture resolution (invalid resolution or overflow).</value>
</data>
<data name="A2R_cannot_index_align_without_index_signals" xml:space="preserve">
<value>Cannot write A2R capture: no index signals were provided; index-aligned flux requires at least one index pulse.</value>
</data>
<data name="A2R_index_alignment_trim_removed_entire_capture" xml:space="preserve">
<value>Cannot write A2R capture: aligning flux to the first index pulse would remove the entire stream (inconsistent index vs flux timing).</value>
</data>
<data name="Tried_to_write_past_image_size" xml:space="preserve">
<value>Tried to write past image size</value>

View File

@@ -5115,6 +5115,24 @@ namespace Aaru.Localization {
return ResourceManager.GetString("Flux_data_will_be_copied_as_is_from_primary_image", resourceCulture);
}
}
/// <summary>
/// Looks up localized string similar to Merging flux: primary captures first, then secondary captures appended with renumbered indices...
/// </summary>
public static string Flux_merge_primary_then_secondary_appended {
get {
return ResourceManager.GetString("Flux_merge_primary_then_secondary_appended", resourceCulture);
}
}
/// <summary>
/// Looks up localized string similar to Secondary image is not a flux image; copying flux captures from the primary image only...
/// </summary>
public static string Flux_merge_primary_flux_only_secondary_not_flux_image {
get {
return ResourceManager.GetString("Flux_merge_primary_flux_only_secondary_not_flux_image", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Force geometry, only supported in not tape block media. Specify as C/H/S..

View File

@@ -5452,8 +5452,11 @@ Probadores:
<data name="Will_copy_0_sectors_from_secondary_image" xml:space="preserve">
<value>[slateblue1]Se copiarán [teal]{0}[/] sectores de la imagen secundaria.[/]</value>
</data>
<data name="Flux_data_will_be_copied_as_is_from_primary_image" xml:space="preserve">
<value>[slateblue1]Los datos de flujo se copiarán tal cual de la imagen primaria...[/]</value>
<data name="Flux_merge_primary_then_secondary_appended" xml:space="preserve">
<value>[slateblue1]Combinando flujo: primero capturas de la imagen primaria, luego las de la secundaria con índices renumerados...[/]</value>
</data>
<data name="Flux_merge_primary_flux_only_secondary_not_flux_image" xml:space="preserve">
<value>[slateblue1]La imagen secundaria no es de flujo; copiando solo las capturas de flujo de la imagen primaria...[/]</value>
</data>
<data name="Copying_file_0_of_partition_1" xml:space="preserve">
<value>[slateblue1]Copiando fichero [lime]{0}[/] de partición [teal]{1}[/]...[/]</value>

View File

@@ -5536,8 +5536,11 @@ Do you want to continue?</value>
<data name="Will_copy_0_sectors_from_secondary_image" xml:space="preserve">
<value>[slateblue1]Will copy [teal]{0}[/] sectors from secondary image.[/]</value>
</data>
<data name="Flux_data_will_be_copied_as_is_from_primary_image" xml:space="preserve">
<value>[slateblue1]Flux data will be copied as-is from primary image...[/]</value>
<data name="Flux_merge_primary_then_secondary_appended" xml:space="preserve">
<value>[slateblue1]Merging flux: primary captures first, then secondary captures appended with renumbered indices...[/]</value>
</data>
<data name="Flux_merge_primary_flux_only_secondary_not_flux_image" xml:space="preserve">
<value>[slateblue1]Secondary image is not a flux image; copying flux captures from the primary image only...[/]</value>
</data>
<data name="Copying_file_0_of_partition_1" xml:space="preserve">
<value>[slateblue1]Copying file [lime]{0}[/] of partition [teal]{1}[/]...[/]</value>