Add Hashfile deserialization test, fix issues

This commit is contained in:
Matt Nadareski
2023-07-13 16:26:03 -04:00
parent 3ac1e7ce4e
commit fcd95a6e60
5 changed files with 220 additions and 1 deletions

View File

@@ -27,7 +27,7 @@ namespace SabreTools.Models.DosCenter
#region DO NOT USE IN PRODUCTION
/// <remarks>Should be empty</remarks>
public string[] ADDITIONAL_ELEMENTS { get; set; }
public string[]? ADDITIONAL_ELEMENTS { get; set; }
#endregion
}

View File

@@ -16,5 +16,12 @@ namespace SabreTools.Models.Hashfile
public SHA512[]? SHA512 { get; set; }
public SpamSum[]? SpamSum { get; set; }
#region DO NOT USE IN PRODUCTION
/// <remarks>Should be empty</remarks>
public string[]? ADDITIONAL_ELEMENTS { get; set; }
#endregion
}
}

View File

@@ -0,0 +1,163 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Core;
namespace SabreTools.Serialization
{
/// <summary>
/// Serializer for hashfile variants
/// </summary>
public class Hashfile
{
/// <summary>
/// Deserializes a hashfile variant to the defined type
/// </summary>
/// <param name="path">Path to the file to deserialize</param>
/// <param name="hash">Hash corresponding to the hashfile variant</param>
/// <returns>Deserialized data on success, null on failure</returns>
public static Models.Hashfile.Hashfile? Deserialize(string path, Hash hash)
{
try
{
using var stream = PathProcessor.OpenStream(path);
return Deserialize(stream, hash);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
/// <summary>
/// Deserializes a hashfile variant in a stream to the defined type
/// </summary>
/// <param name="stream">Stream to deserialize</param>
/// <param name="hash">Hash corresponding to the hashfile variant</param>
/// <returns>Deserialized data on success, null on failure</returns>
public static Models.Hashfile.Hashfile? Deserialize(Stream? stream, Hash hash)
{
try
{
// If the stream is null
if (stream == null)
return default;
// Setup the reader and output
var reader = new StreamReader(stream);
var dat = new Models.Hashfile.Hashfile();
var additional = new List<string>();
// Loop through the rows and parse out values
var hashes = new List<object>();
while (!reader.EndOfStream)
{
// Read and split the line
string? line = reader.ReadLine();
string[]? lineParts = line?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (lineParts == null || lineParts.Length != 2)
{
additional.Add(line);
continue;
}
// Parse the line into a hash
switch (hash)
{
case Hash.CRC:
var sfv = new Models.Hashfile.SFV
{
File = lineParts[0],
Hash = lineParts[1],
};
hashes.Add(sfv);
break;
case Hash.MD5:
var md5 = new Models.Hashfile.MD5
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(md5);
break;
case Hash.SHA1:
var sha1 = new Models.Hashfile.SHA1
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(sha1);
break;
case Hash.SHA256:
var sha256 = new Models.Hashfile.SHA256
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(sha256);
break;
case Hash.SHA384:
var sha384 = new Models.Hashfile.SHA384
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(sha384);
break;
case Hash.SHA512:
var sha512 = new Models.Hashfile.SHA512
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(sha512);
break;
case Hash.SpamSum:
var spamSum = new Models.Hashfile.SpamSum
{
Hash = lineParts[0],
File = lineParts[1],
};
hashes.Add(spamSum);
break;
}
}
// Assign the hashes to the hashfile and return
switch (hash)
{
case Hash.CRC:
dat.SFV = hashes.Cast<Models.Hashfile.SFV>().ToArray();
break;
case Hash.MD5:
dat.MD5 = hashes.Cast<Models.Hashfile.MD5>().ToArray();
break;
case Hash.SHA1:
dat.SHA1 = hashes.Cast<Models.Hashfile.SHA1>().ToArray();
break;
case Hash.SHA256:
dat.SHA256 = hashes.Cast<Models.Hashfile.SHA256>().ToArray();
break;
case Hash.SHA384:
dat.SHA384 = hashes.Cast<Models.Hashfile.SHA384>().ToArray();
break;
case Hash.SHA512:
dat.SHA512 = hashes.Cast<Models.Hashfile.SHA512>().ToArray();
break;
case Hash.SpamSum:
dat.SpamSum = hashes.Cast<Models.Hashfile.SpamSum>().ToArray();
break;
}
dat.ADDITIONAL_ELEMENTS = additional.ToArray();
return dat;
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
}
}

View File

@@ -6,6 +6,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Core\SabreTools.Core.csproj" />
<ProjectReference Include="..\SabreTools.IO\SabreTools.IO.csproj" />
<ProjectReference Include="..\SabreTools.Models\SabreTools.Models.csproj" />
</ItemGroup>

View File

@@ -1,4 +1,5 @@
using System;
using SabreTools.Core;
using Xunit;
namespace SabreTools.Test.Parser
@@ -74,6 +75,53 @@ namespace SabreTools.Test.Parser
}
}
[Theory]
[InlineData("test-sfv.sfv", Hash.CRC)]
[InlineData("test-md5.md5", Hash.MD5)]
[InlineData("test-sha1.sha1", Hash.SHA1)]
[InlineData("test-sha256.sha256", Hash.SHA256)]
[InlineData("test-sha384.sha384", Hash.SHA384)]
[InlineData("test-sha512.sha512", Hash.SHA512)]
[InlineData("test-spamsum.spamsum", Hash.SpamSum)]
public void HashfileDeserializeTest(string file, Hash hash)
{
// Open the file for reading
string filename = System.IO.Path.Combine(Environment.CurrentDirectory, "TestData", file);
// Deserialize the file
var dat = Serialization.Hashfile.Deserialize(filename, hash);
// Validate the values
Assert.NotNull(dat);
switch (hash)
{
case Hash.CRC:
Assert.Single(dat.SFV);
break;
case Hash.MD5:
Assert.Single(dat.MD5);
break;
case Hash.SHA1:
Assert.Single(dat.SHA1);
break;
case Hash.SHA256:
Assert.Single(dat.SHA256);
break;
case Hash.SHA384:
Assert.Single(dat.SHA384);
break;
case Hash.SHA512:
Assert.Single(dat.SHA512);
break;
case Hash.SpamSum:
Assert.Single(dat.SpamSum);
break;
default:
throw new ArgumentOutOfRangeException(nameof(hash));
}
}
[Fact]
public void ListxmlDeserializeTest()
{