Compare commits

..

56 Commits
1.3.9 ... 1.6.4

Author SHA1 Message Date
Matt Nadareski
0bc869543a Bump version 2024-12-31 21:10:46 -05:00
Matt Nadareski
aa7d513d2c Add Ring Perfect Audio Offset pseudo-tag 2024-12-31 21:09:11 -05:00
Matt Nadareski
3d35129529 Update copyright 2024-12-30 21:27:26 -05:00
Matt Nadareski
ec563938ba Remove unnecessary action step 2024-12-30 21:26:16 -05:00
Matt Nadareski
f0f3a1a194 Bump version 2024-12-28 13:52:24 -05:00
Deterous
55f5262198 Add new Protection pseudo site code (#9)
* Add Protection pseudo site tag

* Use new sitecode in redumplib
2024-12-27 12:33:35 -05:00
Matt Nadareski
1d247b1f6f Use string comparison on tab replacement when possible 2024-12-25 22:13:26 -05:00
Matt Nadareski
32c57736ae Duplicate write offset field for convenience (fixes #8) 2024-12-25 22:03:55 -05:00
Deterous
8ab312ba8b Convert <Tab> (#7) 2024-12-24 21:07:51 -05:00
Matt Nadareski
3ea01ca933 Ensure .NET versions are installed for testing 2024-12-19 10:52:22 -05:00
Matt Nadareski
27d99f7429 Bump version 2024-12-16 14:35:02 -05:00
Matt Nadareski
8b147f2041 Change empty language list message 2024-12-16 14:33:22 -05:00
Matt Nadareski
9c7a143d52 Add to publish scripts, not rolling build 2024-12-16 14:26:55 -05:00
Matt Nadareski
30bbef7bba Add RedumpTool as a non-building component 2024-12-16 14:23:43 -05:00
Matt Nadareski
17da564b00 Fix old .NET support 2024-12-16 14:22:07 -05:00
Matt Nadareski
073176cccb Update Models to 1.5.8 2024-12-16 14:21:53 -05:00
Matt Nadareski
0434e63e34 Allow symbols to be packed 2024-12-16 14:21:34 -05:00
Matt Nadareski
2b75eb44cd Use publish script and update README 2024-12-06 11:34:34 -05:00
Matt Nadareski
10eecc866e Bump version 2024-12-05 22:11:48 -05:00
Matt Nadareski
84fa2f93ea Fix consecutive empty line logic 2024-12-05 21:15:26 -05:00
Matt Nadareski
5a92c0fc98 Bump version 2024-12-01 22:58:47 -05:00
Matt Nadareski
4ffc1b3160 Fix multi-newline formatting, add tests 2024-12-01 22:44:12 -05:00
Matt Nadareski
ffa8f2b16e Update ToDiscType and add tests 2024-12-01 22:31:19 -05:00
Matt Nadareski
70e3e074cc Update some extensions, update tests 2024-12-01 22:14:49 -05:00
Matt Nadareski
4858b4e459 None of these are TODOs on my part 2024-12-01 21:32:45 -05:00
Matt Nadareski
9495cd32c7 Handle some TODO items 2024-12-01 21:31:58 -05:00
Matt Nadareski
071571870e Add ToYesNo tests 2024-12-01 21:20:47 -05:00
Matt Nadareski
f03cd40181 Use automatic system name mapping 2024-12-01 21:14:08 -05:00
Matt Nadareski
ea51726645 Fill out more tests 2024-12-01 21:09:15 -05:00
Matt Nadareski
f0633d5aa7 Framework only matters for executable 2024-11-30 21:39:44 -05:00
Matt Nadareski
4c076aec0c Update packages 2024-11-30 21:38:41 -05:00
Matt Nadareski
2ec9d6a4a0 Use more targeted library for old .NET 2024-11-18 19:53:44 -05:00
Matt Nadareski
415b488005 Bump version 2024-11-14 22:23:20 -05:00
Matt Nadareski
5d300c9975 Make download helpers public for ease 2024-11-14 22:22:52 -05:00
Matt Nadareski
304236774f Bump version 2024-11-13 01:54:40 -05:00
Matt Nadareski
9924289c48 Fix casting issues 2024-11-13 01:54:24 -05:00
Matt Nadareski
240eb74ead Bump version 2024-11-13 01:30:42 -05:00
Matt Nadareski
a64b109d2c Remove unncessary Linq usage 2024-11-13 01:29:36 -05:00
Matt Nadareski
3e0f9b5410 Add .NET 9 to target frameworks 2024-11-13 00:59:20 -05:00
Matt Nadareski
668be418ac Bump version 2024-10-18 12:30:30 -04:00
Matt Nadareski
7d184a634e Always return ID list, if possible 2024-10-18 12:26:36 -04:00
Matt Nadareski
67aed0899d Don't null foreign title if missing 2024-10-18 11:58:29 -04:00
Matt Nadareski
9fbaf1a187 Bump version 2024-10-04 01:42:58 -04:00
Matt Nadareski
fe8686a2bb Allow forward slashes in queries sometimes 2024-10-04 01:41:05 -04:00
Matt Nadareski
652270c8c7 Add publish scripts 2024-10-01 13:56:40 -04:00
Matt Nadareski
905d8a94fb Bump version 2024-10-01 13:55:33 -04:00
Matt Nadareski
3ee8416695 Remove unnecessary tuples 2024-10-01 13:53:03 -04:00
Matt Nadareski
49fa06da55 Remove threading bridge package (unused) 2024-10-01 04:27:31 -04:00
Matt Nadareski
70e29afd89 Remove Linq requirement from old .NET 2024-10-01 04:25:35 -04:00
Matt Nadareski
2a402a53db Remove ValueTuple packages (usused) 2024-10-01 03:21:07 -04:00
Matt Nadareski
66bb3b75b2 Bump version 2024-07-24 11:05:25 -04:00
Matt Nadareski
9d7d46673a Fix deserializing submission from file 2024-07-23 22:18:06 -04:00
Matt Nadareski
16d196c902 Bump version 2024-07-16 14:13:28 -04:00
Matt Nadareski
c93da92f19 Add new helper class for site interaction 2024-07-16 14:12:28 -04:00
Matt Nadareski
a219d0c5de Add some client helper classes 2024-07-16 14:07:18 -04:00
Matt Nadareski
02e6f0e85f Port tests from MPF 2024-07-16 13:08:56 -04:00
45 changed files with 6831 additions and 1871 deletions

View File

@@ -1,4 +1,4 @@
name: Nuget Pack
name: Build and Test
on:
push:
@@ -12,29 +12,26 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Restore dependencies
run: dotnet restore
- name: Pack
run: dotnet pack
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: 'Nuget Package'
path: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
- name: Run tests
run: dotnet test
- name: Run publish script
run: ./publish-nix.sh -d
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
artifacts: "*.nupkg,*.snupkg"
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -11,7 +11,13 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Build
run: dotnet build
run: dotnet build
- name: Run tests
run: dotnet test

View File

@@ -1,5 +1,13 @@
# SabreTools.RedumpLib
[![Build and Test](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml)
This library comprises interaction logic for [Redump](http://redump.org/). Because there is no formal API for the site, this library interacts with the site through normal HTTP methods. It includes a fairly comprehensive reference of supported parts of the site, including URLs, page information, and packs.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.RedumpLib).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.RedumpLib/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.RedumpLib/releases/rolling)

278
RedumpTool/Program.cs Normal file
View File

@@ -0,0 +1,278 @@
using System;
using System.IO;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
namespace RedumpTool
{
public class Program
{
static void Main(string[] args)
{
// Show help if nothing is input
if (args == null || args.Length == 0)
{
ShowHelp();
return;
}
// Derive the feature, if possible
Feature feature = DeriveFeature(args[0]);
if (feature == Feature.NONE)
{
ShowHelp();
return;
}
// Create a new Downloader
var downloader = CreateDownloader(feature, args);
if (downloader == null)
{
ShowHelp();
return;
}
// Run the download task
var downloaderTask = downloader.Download();
downloaderTask.Wait();
// Get the downloader task results and print, if necessary
var downloaderResult = downloaderTask.Result;
if (downloaderResult.Count > 0)
{
string processedIds = string.Join(", ", [.. downloaderResult.ConvertAll(i => i.ToString())]);
Console.WriteLine($"Processed IDs: {processedIds}");
}
else if (downloaderResult.Count == 0 && downloader.Feature != Feature.Packs)
{
ShowHelp();
}
}
/// <summary>
/// Derive the feature from the supplied argument
/// </summary>
/// <param name="feature">Possible feature name to derive from</param>
/// <returns>True if the feature was set, false otherwise</returns>
private static Feature DeriveFeature(string feature)
{
return feature.ToLowerInvariant() switch
{
"site" => Feature.Site,
"wip" => Feature.WIP,
"packs" => Feature.Packs,
"user" => Feature.User,
"search" => Feature.Quicksearch,
"query" => Feature.Quicksearch,
_ => Feature.NONE,
};
}
/// <summary>
/// Create a Downloader from a feature and a set of arguments
/// </summary>
/// <param name="feature">Primary feature to use</param>
/// <param name="args">Arguments list to parse</param>
/// <returns>Initialized Downloader on success, null otherwise</returns>
private static Downloader? CreateDownloader(Feature feature, string[] args)
{
// Set temporary internal variables
string? outDir = null;
string? username = null;
string? password = null;
int minimumId = -1;
int maximumId = -1;
string? queryString = null;
bool useSubfolders = false;
bool onlyNew = false;
bool onlyList = false;
bool noSlash = false;
bool force = false;
// Now loop through all of the arguments
try
{
for (int i = 1; i < args.Length; i++)
{
switch (args[i])
{
// Output directory
case "-o":
case "--output":
outDir = args[++i].Trim('"');
break;
// Username
case "-u":
case "--username":
username = args[++i];
break;
// Password
case "-p":
case "--password":
password = args[++i];
break;
// Minimum Redump ID
case "-min":
case "--minimum":
if (!int.TryParse(args[++i], out minimumId))
minimumId = -1;
break;
// Maximum Redump ID
case "-max":
case "--maximum":
if (!int.TryParse(args[++i], out maximumId))
maximumId = -1;
break;
// Quicksearch text
case "-q":
case "--query":
queryString = args[++i];
break;
// Packs subfolders
case "-s":
case "--subfolders":
useSubfolders = true;
break;
// Use last modified
case "-n":
case "--onlynew":
onlyNew = true;
break;
// List instead of download
case "-l":
case "--list":
onlyList = true;
break;
// Don't filter forward slashes from queries
case "-ns":
case "--noslash":
noSlash = true;
break;
// Force continuation
case "-f":
case "--force":
force = true;
break;
// Everything else
default:
Console.WriteLine($"Unrecognized flag: {args[i]}");
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
// Output directory validation
if (!onlyList && string.IsNullOrEmpty(outDir))
{
Console.WriteLine("No output directory set!");
return null;
}
else if (!onlyList && !string.IsNullOrEmpty(outDir))
{
// Create the output directory, if it doesn't exist
try
{
if (!Directory.Exists(outDir))
Directory.CreateDirectory(outDir);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
// Range verification
if (feature == Feature.Site && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of Redump IDs");
return null;
}
else if (feature == Feature.WIP && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of WIP IDs");
return null;
}
// Query verification (and cleanup)
if (feature == Feature.Quicksearch && string.IsNullOrEmpty(queryString))
{
Console.WriteLine("Please enter a query for searching");
return null;
}
// Create and return the downloader
var downloader = new Downloader()
{
Feature = feature,
MinimumId = minimumId,
MaximumId = maximumId,
QueryString = queryString,
OutDir = outDir,
UseSubfolders = useSubfolders,
OnlyNew = onlyNew,
OnlyList = onlyList,
Force = force,
NoSlash = noSlash,
Username = username,
Password = password,
};
return downloader;
}
/// <summary>
/// Show the commandline help for the program
/// </summary>
private static void ShowHelp()
{
Console.WriteLine("RedumpTool - A Redump.org recovery tool");
Console.WriteLine();
Console.WriteLine("Usage: RedumpTool <feature> [options]");
Console.WriteLine();
Console.WriteLine("Common Options");
Console.WriteLine(" -o <folder>, --output <folder> - Set the base output directory");
Console.WriteLine(" -u <username>, --username <username> - Redump username");
Console.WriteLine(" -p <pass>, --password <pass> - Redump password");
Console.WriteLine();
Console.WriteLine("site - Download pages and related files from the main site");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine(" -f, --force - Force continuing downloads until user cancels (used with only new)");
Console.WriteLine();
Console.WriteLine("wip - Download pages and related files from the WIP list");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine();
Console.WriteLine("packs - Download available packs");
Console.WriteLine(" -s, --subfolders - Download packs to named subfolders");
Console.WriteLine();
Console.WriteLine("user - Download pages and related files for a particular user");
Console.WriteLine(" -n, --onlynew - Use the last modified view instead of sequential parsing");
Console.WriteLine(" -l, --list - Only list the page IDs for that user");
Console.WriteLine();
Console.WriteLine("query - Download pages and related files from a Redump-compatible query");
Console.WriteLine(" -q, --query - Redump-compatible query to run");
Console.WriteLine(" -l, --list - Only list the page IDs for that query");
Console.WriteLine(" -ns, --noslash - Don't replace forward slashes with '-'");
Console.WriteLine();
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.4</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,191 @@
using System;
using System.IO;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class BuilderTests
{
[Theory]
[InlineData("success_complete.json", false)]
[InlineData("success_invalid.json", false)] // Fully invalid returns a default object
[InlineData("success_partial.json", false)]
[InlineData("fail_invalid.json", true)]
public void CreateFromFileTest(string filename, bool expectNull)
{
// Get the full path to the test file
string path = Path.Combine(Environment.CurrentDirectory, "TestData", filename);
// Try to create the submission info from file
var si = Builder.CreateFromFile(path);
// Check for an expected result
Assert.Equal(expectNull, si == null);
}
[Fact]
public void EnsureAllSections_Null_Filled()
{
SubmissionInfo? si = null;
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Empty_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = null,
VersionAndEditions = null,
EDC = null,
ParentCloneRelationship = null,
Extras = null,
CopyProtection = null,
DumpersAndStatus = null,
TracksAndWriteOffsets = null,
SizeAndChecksums = null,
DumpingInfo = null,
Artifacts = null,
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Filled_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection
{
CommentsSpecialFields = [],
ContentsSpecialFields = [],
},
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
DumpingInfo = new DumpingInfoSection(),
Artifacts = [],
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void InjectSubmissionInformation_BothNull_Null()
{
SubmissionInfo? si = null;
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.Null(actual);
}
[Fact]
public void InjectSubmissionInformation_ValidInputNullSeed_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void InjectSubmissionInformation_BothValid_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = new SubmissionInfo();
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_EmptyString_Empty()
{
string original = string.Empty;
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Empty(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_NoReplace_Identical()
{
string original = "<p>Nothing here will be replaced</p>";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(original, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_StandardCode_Replaced()
{
string original = "<b>ISBN</b>: 000-0-00-000000-0";
string expected = "[T:ISBN] 000-0-00-000000-0";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_OutdatedCode_Replaced()
{
string original = "XMID: AB12345C";
string expected = "<b>XMID</b>: AB12345C";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
namespace SabreTools.RedumpLib.Test
{
public class DownloaderTests
{
// Tests here will require installing and using the Moq library
// to mock the RedumpClient type.
}
}

View File

@@ -0,0 +1,877 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class FormatterTests
{
#region ProcessSpecialFields
// TODO: Write tests for ProcessSpecialFields
#endregion
#region CommonDiscInfoSection
// TODO: Write tests for FormatOutputData(CommonDiscInfoSection)
[Fact]
public void FormatOutputData_CDINullSACNullTAWONull_Minimal()
{
string expected = "Common Disc Info:\n\tRegion: SPACE! (CHANGE THIS)\n\tLanguages: ADD LANGUAGES HERE (ONLY IF YOU TESTED)\n\n\tRingcode Information:\n\n\n";
var builder = new StringBuilder();
CommonDiscInfoSection? section = null;
SizeAndChecksumsSection? sac = null;
TracksAndWriteOffsetsSection? tawo = null;
int? fullyMatchedID = null;
List<int>? partiallyMatchedIDs = null;
Formatter.FormatOutputData(builder,
section,
sac,
tawo,
fullyMatchedID,
partiallyMatchedIDs);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region VersionAndEditionsSection
[Fact]
public void FormatOutputData_VAENull_Minimal()
{
string expected = "Version and Editions:\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_VAE_Formatted()
{
string expected = "Version and Editions:\n\tVersion: XXXXXX\n\tEdition/Release: XXXXXX\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = new VersionAndEditionsSection
{
Version = "XXXXXX",
OtherEditions = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region EDCSection
[Fact]
public void FormatOutputData_EDCNull_Minimal()
{
string expected = "EDC:\n";
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDCInvalidSystem_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDC_Formatted()
{
string expected = "EDC:\n\tEDC: Yes\n";
var builder = new StringBuilder();
EDCSection? section = new EDCSection { EDC = YesNo.Yes };
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region ExtrasSection
[Fact]
public void FormatOutputData_ExtrasNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_ExtrasInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = null,
PIC = null,
BCA = null,
SecuritySectorRanges = null,
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_Extras_Formatted()
{
string expected = "Extras:\n\tPrimary Volume Descriptor (PVD): XXXXXX\n\tDisc Key: XXXXXX\n\tDisc ID: XXXXXX\n\tPermanent Information & Control (PIC): XXXXXX\n\tHeader: XXXXXX\n\tBCA: XXXXXX\n\tSecurity Sector Ranges: XXXXXX\n";
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = "XXXXXX",
DiscKey = "XXXXXX",
DiscID = "XXXXXX",
PIC = "XXXXXX",
Header = "XXXXXX",
BCA = "XXXXXX",
SecuritySectorRanges = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region CopyProtectionSection
[Fact]
public void FormatOutputData_COPNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
Protection = null,
AntiModchip = null,
LibCrypt = null,
LibCryptData = null,
SecuROMData = null,
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COP_Formatted()
{
string expected = "Copy Protection:\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPPSX_Formatted()
{
string expected = "Copy Protection:\n\tAnti-modchip: Yes\n\tLibCrypt: Yes\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region TracksAndWriteOffsetsSection
[Fact]
public void FormatOutputData_TAWOInvalid_Minimal()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\n\n\n\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection();
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_TAWO_Formatted()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\nXXXXXX\n\n\n\tCuesheet: XXXXXX\n\tWrite Offset: XXXXXX\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection
{
ClrMameProData = "XXXXXX",
Cuesheet = "XXXXXX",
OtherWriteOffsets = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region SizeAndChecksumsSection
// TODO: Write tests for FormatOutputData(SizeAndChecksumsSection)
#endregion
#region DumpingInfoSection
[Fact]
public void FormatOutputData_DINull_Minimal()
{
string expected = "Dumping Info:\n";
var builder = new StringBuilder();
DumpingInfoSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_DI_Formatted()
{
string expected = "Dumping Info:\n\tFrontend Version: XXXXXX\n\tDumping Program: XXXXXX\n\tDate: XXXXXX\n\tParameters: XXXXXX\n\tManufacturer: XXXXXX\n\tModel: XXXXXX\n\tFirmware: XXXXXX\n\tReported Disc Type: XXXXXX\n\tC2 Error Count: XXXXXX\n";
var builder = new StringBuilder();
DumpingInfoSection? section = new DumpingInfoSection
{
FrontendVersion = "XXXXXX",
DumpingProgram = "XXXXXX",
DumpingDate = "XXXXXX",
DumpingParameters = "XXXXXX",
Manufacturer = "XXXXXX",
Model = "XXXXXX",
Firmware = "XXXXXX",
ReportedDiscType = "XXXXXX",
C2ErrorsCount = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region FormatSiteTag
[Fact]
public void FormatSiteTag_NoValue_Empty()
{
SiteCode code = SiteCode.AlternativeTitle;
string value = string.Empty;
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Standard_Formatted()
{
string expected = "[T:ALT] XXXXXX";
SiteCode code = SiteCode.AlternativeTitle;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanTrue_Formatted()
{
string expected = "[T:VCD]";
SiteCode code = SiteCode.VCD;
string value = "True";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanFalse_Empty()
{
SiteCode code = SiteCode.VCD;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Multiline_Formatted()
{
string expected = "[T:X]\nXXXXXX\n";
SiteCode code = SiteCode.Extras;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
#endregion
#region GetFixedMediaType
[Fact]
public void GetFixedMediaType_NullType_Null()
{
MediaType? mediaType = null;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Null(actual);
}
[Fact]
public void GetFixedMediaType_UnformattedType_Formatted()
{
string? expected = "CD-ROM";
MediaType? mediaType = MediaType.CDROM;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD9_Formatted()
{
string? expected = "DVD-ROM-9";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD5_Formatted()
{
string? expected = "DVD-ROM-5";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD128_Formatted()
{
string? expected = "BD-ROM-128";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = 12345;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD100_Formatted()
{
string? expected = "BD-ROM-100";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = 12345;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66PIC_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66Size_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 53_687_063_713;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD50_Formatted()
{
string? expected = "BD-ROM-50";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33PIC_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33Size_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 26_843_531_857;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD25_Formatted()
{
string? expected = "BD-ROM-25";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDDL_Formatted()
{
string? expected = "HD-DVD-ROM-DL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDSL_Formatted()
{
string? expected = "HD-DVD-ROM-SL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDDL_Formatted()
{
string? expected = "UMD-DL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDSL_Formatted()
{
string? expected = "UMD-SL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
#endregion
#region OrderCommentTags
[Fact]
public void OrderCommentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.Applications, "XXXXXX" },
};
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderCommentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedCommentCodes.SequenceEqual(actualCodes));
}
#endregion
#region OrderContentTags
[Fact]
public void OrderContentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.AlternativeTitle, "XXXXXX" },
};
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderContentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedContentCodes.SequenceEqual(actualCodes));
}
#endregion
#region RemoveConsecutiveEmptyLines
[Fact]
public void RemoveConsecutiveEmptyLines_Linux_Removed()
{
string expected = "data\n\nbase";
string newlines = "data\n\n\n\n\n\n\n\n\n\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
[Fact]
public void RemoveConsecutiveEmptyLines_Windows_Removed()
{
string expected = "data\r\n\r\nbase";
string newlines = "data\r\n\r\n\r\n\r\n\r\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -0,0 +1,47 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0;net9.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.12.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.17.0" />
<PackageReference Include="xunit.assert" Version="2.9.2" />
<PackageReference Include="xunit.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.2" />
<PackageReference Include="xunit.runner.console" Version="2.9.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,181 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class SubmissionInfoTests
{
[Fact]
public void EmptySerializationTest()
{
var submissionInfo = new SubmissionInfo();
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void PartialSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void FullSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
SchemaVersion = 1,
FullyMatchedID = 3,
PartiallyMatchedIDs = new List<int> { 0, 1, 2, 3 },
Added = DateTime.UtcNow,
LastModified = DateTime.UtcNow,
CommonDiscInfo = new CommonDiscInfoSection()
{
System = RedumpSystem.IBMPCcompatible,
Media = DiscType.CD,
Title = "Game Title",
ForeignTitleNonLatin = "Foreign Game Title",
DiscNumberLetter = "1",
DiscTitle = "Install Disc",
Category = DiscCategory.Games,
Region = Region.World,
Languages = new Language?[] { Language.English, Language.Spanish, Language.French },
LanguageSelection = new LanguageSelection?[] { LanguageSelection.BiosSettings },
Serial = "Disc Serial",
Layer0MasteringRing = "L0 Mastering Ring",
Layer0MasteringSID = "L0 Mastering SID",
Layer0ToolstampMasteringCode = "L0 Toolstamp",
Layer0MouldSID = "L0 Mould SID",
Layer0AdditionalMould = "L0 Additional Mould",
Layer1MasteringRing = "L1 Mastering Ring",
Layer1MasteringSID = "L1 Mastering SID",
Layer1ToolstampMasteringCode = "L1 Toolstamp",
Layer1MouldSID = "L1 Mould SID",
Layer1AdditionalMould = "L1 Additional Mould",
Layer2MasteringRing = "L2 Mastering Ring",
Layer2MasteringSID = "L2 Mastering SID",
Layer2ToolstampMasteringCode = "L2 Toolstamp",
Layer3MasteringRing = "L3 Mastering Ring",
Layer3MasteringSID = "L3 Mastering SID",
Layer3ToolstampMasteringCode = "L3 Toolstamp",
RingWriteOffset = "+12",
Barcode = "UPC Barcode",
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
CommentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
ContentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
},
VersionAndEditions = new VersionAndEditionsSection()
{
Version = "Original",
VersionDatfile = "Alt",
CommonEditions = new string[] { "Taikenban" },
OtherEditions = "Rerelease",
},
EDC = new EDCSection()
{
EDC = YesNo.Yes,
},
ParentCloneRelationship = new ParentCloneRelationshipSection()
{
ParentID = "12345",
RegionalParent = false,
},
Extras = new ExtrasSection()
{
PVD = "PVD",
DiscKey = "Disc key",
DiscID = "Disc ID",
PIC = "PIC",
Header = "Header",
BCA = "BCA",
SecuritySectorRanges = "SSv1 Ranges",
},
CopyProtection = new CopyProtectionSection()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.No,
LibCryptData = "LibCrypt data",
Protection = "List of protections",
SecuROMData = "SecuROM data",
},
DumpersAndStatus = new DumpersAndStatusSection()
{
Status = DumpStatus.TwoOrMoreGreen,
Dumpers = new string[] { "Dumper1", "Dumper2" },
OtherDumpers = "Dumper3",
},
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection()
{
ClrMameProData = "Datfile",
Cuesheet = "Cuesheet",
CommonWriteOffsets = new int[] { 0, 12, -12 },
OtherWriteOffsets = "-2",
},
SizeAndChecksums = new SizeAndChecksumsSection()
{
Layerbreak = 0,
Layerbreak2 = 1,
Layerbreak3 = 2,
Size = 12345,
CRC32 = "CRC32",
MD5 = "MD5",
SHA1 = "SHA1",
},
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
DumpingParameters = "cd dvd bd sacd fd hdd",
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",
ReportedDiscType = "CD-R",
},
Artifacts = new Dictionary<string, string>()
{
["Sample Artifact"] = "Sample Data",
},
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
}
}

View File

@@ -0,0 +1 @@
This isn't even JSON, I lied.

View File

@@ -0,0 +1,96 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_system": "ajcd",
"d_media": "cd",
"d_title": "Test Title",
"d_title_foreign": "Foreign Title",
"d_number": "1",
"d_label": "Install",
"d_category": "Games",
"d_region": "U",
"d_languages":
[
"en",
"fr",
"es"
],
"d_languages_selection": [],
"d_serial": "Serial",
"d_ring_0_ma1": "Ringcode 0 Layer 0",
"d_ring_0_ma1_sid": "SID 0 Layer 0",
"d_ring_0_ts1": "Toolstamp 0 Layer 0",
"d_ring_0_mo1_sid": "Mould SID 0 Layer 0",
"d_ring_0_mo1": "Additional Mould 0 Layer 0",
"d_ring_0_ma2": "Ringcode 0 Layer 1",
"d_ring_0_ma2_sid": "SID 0 Layer 1",
"d_ring_0_ts2": "Toolstamp 0 Layer 1",
"d_ring_0_mo2_sid": "Mould SID 0 Layer 1",
"d_ring_0_mo2": "Additional Mould 0 Layer 1",
"d_ring_0_ma3": "Ringcode 0 Layer 2",
"d_ring_0_ma3_sid": "SID 0 Layer 2",
"d_ring_0_ts3": "Toolstamp 0 Layer 2",
"d_ring_0_ma4": "Ringcode 0 Layer 3",
"d_ring_0_ma4_sid": "SID 0 Layer 2",
"d_ring_0_ts4": "Toolstamp 0 Layer 2",
"d_ring_0_offsets": "-22",
"d_ring_0_0_value": "-21",
"d_barcode": "0 12345 67890 1",
"d_date": "1980-01-01",
"d_errors": "0",
"d_comments": "This is a comment\nwith a newline",
"d_contents": "These are contents, sorry"
},
"versions_and_editions":
{
"d_version": "1.0.0.0",
"d_version_datfile": "1.00",
"d_editions_text": "Demo"
},
"edc":
{
"d_edc": false
},
"parent_clone_relationship":
{
"d_parent_id": "12345",
"d_is_regional_parent": false
},
"extras":
{
"d_pvd": "Pretend\nthis\nis\na\nPVD",
"d_d1_key": "Disc key",
"d_d2_key": "Disc ID",
"d_pic_data": "Pretend\nthis\nis\na\nPIC",
"d_header": "Pretend\nthis\nis\na\nHeader",
"d_bca": "Pretend\nthis\nis\na\nBCA",
"d_ssranges": "Pretend\nthis\nis\na\nsecurity_range"
},
"copy_protection":
{
"d_protection_a": false,
"d_protection_1": false,
"d_libcrypt": "Definitely\nLibCrypt\nData",
"d_protection": "Super easy to find protection",
"d_securom": "Definitely\nSecuROM\nData"
},
"tracks_and_write_offsets":
{
"d_tracks": "Hash data",
"d_cue": "Real cuesheet",
"d_offset_text": "-22"
},
"size_and_checksums":
{
"d_layerbreak": 1,
"d_layerbreak_2": 2,
"d_layerbreak_3": 3,
"d_pic_identifier": "Pretend\nthis\nis\na\nPIC",
"d_size": 123456,
"d_crc32": "cbf43926",
"d_md5": "d41d8cd98f00b204e9800998ecf8427e",
"d_sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
}

View File

@@ -0,0 +1,4 @@
{
"invalid_key": "invalid_value",
"invalid_x": 12345
}

View File

@@ -0,0 +1,7 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_title": "Test Title"
}
}

View File

@@ -0,0 +1,304 @@
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class ValidatorTests
{
// Most tests here will require installing and using the Moq library
// to mock the RedumpClient type.
[Fact]
public void NormalizeDiscType_InvalidMedia_Untouched()
{
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = null }
};
Validator.NormalizeDiscType(si);
Assert.Null(si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_InvalidSizeChecksums_Untouched()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = null,
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_UnformattedType_Fixed()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD9_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD9;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD5_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD5;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD128_Fixed(DiscType type)
{
DiscType expected = DiscType.BD128;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak3 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD100_Fixed(DiscType type)
{
DiscType expected = DiscType.BD100;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak2 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
Size = 50_050_629_633,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD50_Fixed(DiscType type)
{
DiscType expected = DiscType.BD50;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Size = 25_025_314_817,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD25_Fixed(DiscType type)
{
DiscType expected = DiscType.BD25;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDDL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDDL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDSL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDSL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
}
}

View File

@@ -5,6 +5,10 @@ VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "SabreTools.RedumpLib\SabreTools.RedumpLib.csproj", "{235D3A36-CA69-4348-9EC4-649B27ACFBB8}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib.Test", "SabreTools.RedumpLib.Test\SabreTools.RedumpLib.Test.csproj", "{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedumpTool", "RedumpTool\RedumpTool.csproj", "{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -18,5 +22,13 @@ Global
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.Build.0 = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.Build.0 = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.ActiveCfg = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.Build.0 = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.Build.0 = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.ActiveCfg = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
namespace SabreTools.RedumpLib.Attributes
{
@@ -25,24 +24,24 @@ namespace SabreTools.RedumpLib.Attributes
string? valueStr = value?.ToString();
if (string.IsNullOrEmpty(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
var enumValueMemberInfo = Array.Find(memberInfos, m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
if (attributes == null || attributes.Length == 0)
return null;
// Return the first attribute, if possible
return attributes.FirstOrDefault() as HumanReadableAttribute;
return attributes[0] as HumanReadableAttribute;
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
#if NET40_OR_GREATER || NETCOREAPP
using System.Net;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -269,22 +270,25 @@ namespace SabreTools.RedumpLib
if (title != null && firstParenLocation >= 0)
{
info.CommonDiscInfo!.Title = title.Substring(0, firstParenLocation);
var subMatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match subMatch in subMatches.Cast<Match>())
var submatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match? submatch in submatches)
{
var subMatchValue = subMatch.Groups[1].Value;
if (submatch == null)
continue;
var submatchValue = submatch.Groups[1].Value;
// Disc number or letter
if (subMatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = subMatchValue.Remove(0, "Disc ".Length);
if (submatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = submatchValue.Remove(0, "Disc ".Length);
// Issue number
else if (subMatchValue.All(c => char.IsNumber(c)))
info.CommonDiscInfo.Title += $" ({subMatchValue})";
else if (ulong.TryParse(submatchValue, out _))
info.CommonDiscInfo.Title += $" ({submatchValue})";
// Disc title
else
info.CommonDiscInfo.DiscTitle = subMatchValue;
info.CommonDiscInfo.DiscTitle = submatchValue;
}
}
// Otherwise, leave the title as-is
@@ -298,15 +302,13 @@ namespace SabreTools.RedumpLib
match = Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
else
info.CommonDiscInfo!.ForeignTitleNonLatin = null;
// Category
match = Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
info.CommonDiscInfo!.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
info.CommonDiscInfo.Category = DiscCategory.Games;
info.CommonDiscInfo!.Category = DiscCategory.Games;
// Region
if (info.CommonDiscInfo.Region == null)
@@ -321,12 +323,17 @@ namespace SabreTools.RedumpLib
if (matches.Count > 0)
{
var tempLanguages = new List<Language?>();
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
tempLanguages.Add(Extensions.ToLanguage(submatch.Groups[1].Value));
if (submatch == null)
continue;
var language = Extensions.ToLanguage(submatch.Groups[1].Value);
if (language != null)
tempLanguages.Add(language);
}
info.CommonDiscInfo.Languages = tempLanguages.Where(l => l != null).ToArray();
info.CommonDiscInfo.Languages = [.. tempLanguages];
}
// Serial
@@ -366,8 +373,11 @@ namespace SabreTools.RedumpLib
tempDumpers.Add(dumper);
}
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
if (submatch == null)
continue;
string? dumper = WebUtility.HtmlDecode(submatch.Groups[1].Value);
if (dumper != null)
tempDumpers.Add(dumper);
@@ -458,6 +468,7 @@ namespace SabreTools.RedumpLib
// Audio CD
case SiteCode.RingNonZeroDataStart:
case SiteCode.RingPerfectAudioOffset:
case SiteCode.UniversalHash:
continue;
@@ -477,6 +488,10 @@ namespace SabreTools.RedumpLib
// Nintendo Gamecube
case SiteCode.InternalName:
continue;
// Protection
case SiteCode.Protection:
continue;
}
// If we don't already have this site code, add it to the dictionary
@@ -643,6 +658,7 @@ namespace SabreTools.RedumpLib
info.TracksAndWriteOffsets ??= new TracksAndWriteOffsetsSection();
info.SizeAndChecksums ??= new SizeAndChecksumsSection();
info.DumpingInfo ??= new DumpingInfoSection();
info.Artifacts ??= [];
// Ensure special dictionaries
info.CommonDiscInfo.CommentsSpecialFields ??= [];
@@ -656,11 +672,11 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static void InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
public static SubmissionInfo? InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
{
// If we have any invalid info
if (seed == null)
return;
return info;
// Ensure that required sections exist
info = EnsureAllSections(info);
@@ -718,6 +734,8 @@ namespace SabreTools.RedumpLib
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
}
return info;
}
#endregion
@@ -729,9 +747,10 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="text">Text block to process</param>
/// <returns>Processed text block, if possible</returns>
private static string ReplaceHtmlWithSiteCodes(this string text)
internal static string ReplaceHtmlWithSiteCodes(this string text)
{
if (string.IsNullOrEmpty(text))
// Empty strings are ignored
if (text.Length == 0)
return text;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscCategory(value);
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscType(value);
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<Language> languages = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
Language? lang = Data.Extensions.ToLanguage(value);
if (lang != null)
languages.Add(lang.Value);
}
return [.. languages];
}
public override void WriteJson(JsonWriter writer, Language?[]? value, JsonSerializer serializer)

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<LanguageSelection> selections = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
LanguageSelection? sel = Data.Extensions.ToLanguageSelection(value);
if (sel != null)
selections.Add(sel.Value);
}
return [.. selections];
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[]? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRegion(value);
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRedumpSystem(value);
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is bool bVal)
return Data.Extensions.ToYesNo(bVal);
else if (reader.Value is string sVal)
return Data.Extensions.ToYesNo(sVal);
return null;
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)

View File

@@ -4,18 +4,17 @@ namespace SabreTools.RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>");
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the BCA field on a disc page
@@ -25,87 +24,87 @@ namespace SabreTools.RedumpLib.Data
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>");
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">");
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)");
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)", RegexOptions.Compiled);
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">");
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>");
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>");
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*", RegexOptions.Compiled);
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>");
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>");
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc key on a PS3 disc page
/// </summary>
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>");
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the PVD field on a disc page
@@ -115,62 +114,62 @@ namespace SabreTools.RedumpLib.Data
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>");
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />", RegexOptions.Compiled);
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>");
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
#endregion

View File

@@ -281,7 +281,7 @@ namespace SabreTools.RedumpLib.Data
[Language(LongName = "Bini; Edo", ThreeLetterCode = "bin")]
Bini,
[Language(LongName = "Bislama", TwoLetterCode = "bla", ThreeLetterCode = "bis")]
[Language(LongName = "Bislama", TwoLetterCode = "bi", ThreeLetterCode = "bis")]
Bislama,
// Blin; Bilin
@@ -2488,8 +2488,105 @@ namespace SabreTools.RedumpLib.Data
/// </remarks>
public enum Region
{
// TODO: Should "regions" and multi-country sets be phased out?
// TODO: Should "regions" be moved to the end?
#region Aggregates - Redump Only
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region A
@@ -2535,30 +2632,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ascension Island", ShortName = "Ac")]
AscensionIsland,
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia", ShortName = "Au")]
Australia,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria", ShortName = "At")]
Austria,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Azerbaijan", ShortName = "Az")]
Azerbaijan,
@@ -2584,9 +2663,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Belgium", ShortName = "Be")]
Belgium,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Belize", ShortName = "Bz")]
Belize,
@@ -2767,21 +2843,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ethiopia", ShortName = "Et")]
Ethiopia,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
// Commented out to avoid confusion
//[HumanReadable(LongName = "European Union", ShortName = "Eu")]
//EuropeanUnion,
@@ -2790,9 +2851,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "Eurozone", ShortName = "Ez")]
//Eurozone,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
#endregion
#region F
@@ -2821,9 +2879,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "France, Metropolitan", ShortName = "Fx")]
//FranceMetropolitan,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "French Guiana", ShortName = "Gf")]
FrenchGuiana,
@@ -2856,9 +2911,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Gibraltar", ShortName = "Gi")]
Gibraltar,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Greece", ShortName = "Gr")]
Greece,
@@ -2958,18 +3010,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Japan", ShortName = "J")]
Japan,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Jersey", ShortName = "Je")]
Jersey,
@@ -3009,9 +3049,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "(Laos) Lao People's Democratic Republic", ShortName = "La")]
Laos,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Latvia", ShortName = "Lv")]
Latvia,
@@ -3264,9 +3301,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Saudi Arabia", ShortName = "Sa")]
SaudiArabia,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Senegal", ShortName = "Sn")]
Senegal,
@@ -3310,9 +3344,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Spain", ShortName = "S")]
Spain,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "Sri Lanka", ShortName = "Lk")]
SriLanka,
@@ -3397,9 +3428,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "UK", ShortName = "Uk")]
UnitedKingdom,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "Ukraine", ShortName = "Ue")]
Ukraine,
@@ -3424,30 +3452,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "USA", ShortName = "U")]
UnitedStatesOfAmerica,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "USSR", ShortName = "Su")]
USSR,
@@ -3483,9 +3487,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Western Sahara", ShortName = "Eh")]
WesternSahara,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region Y
@@ -3526,7 +3527,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:ALTF]", LongName = "<b>Alternative Foreign Title</b>:")]
AlternativeForeignTitle,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Applications</b>:", LongName = "<b>Applications</b>:")]
Applications,
@@ -3536,30 +3537,30 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:BBFC]", LongName = "<b>BBFC Reg. No.</b>:")]
BBFCRegistrationNumber,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Bethesda ID</b>:", LongName = "<b>Bethesda ID</b>:")]
BethesdaID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>CD Projekt ID</b>:", LongName = "<b>CD Projekt ID</b>:")]
CDProjektID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Compatible OS</b>:", LongName = "<b>Compatible OS</b>:")]
CompatibleOS,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Hologram ID</b>:", LongName = "<b>Disc Hologram ID</b>:")]
DiscHologramID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>DMI</b>:", LongName = "<b>DMI</b>:")]
DMIHash,
[HumanReadable(ShortName = "[T:DNAS]", LongName = "<b>DNAS Disc ID</b>:")]
DNASDiscID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Eidos ID</b>:", LongName = "<b>Eidos ID</b>:")]
EidosID,
@@ -3569,7 +3570,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:X]", LongName = "<b>Extras</b>:")]
Extras,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Filename</b>:", LongName = "<b>Filename</b>:")]
Filename,
@@ -3579,7 +3580,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GF]", LongName = "<b>Game Footage</b>:")]
GameFootage,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Games</b>:", LongName = "<b>Games</b>:")]
Games,
@@ -3589,7 +3590,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GTID]", LongName = "<b>GT Interactive ID</b>:")]
GTInteractiveID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Internal Name</b>:", LongName = "<b>Internal Name</b>:")]
InternalName,
@@ -3617,11 +3618,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:LAID]", LongName = "<b>Lucas Arts ID</b>:")]
LucasArtsID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Microsoft ID</b>:", LongName = "<b>Microsoft ID</b>:")]
MicrosoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Multisession</b>:", LongName = "<b>Multisession</b>:")]
Multisession,
@@ -3643,7 +3644,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:P]", LongName = "<b>Patches</b>:")]
Patches,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>PFI</b>:", LongName = "<b>PFI</b>:")]
PFIHash,
@@ -3659,10 +3660,18 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:PPN]", LongName = "<b>PPN</b>:")]
PPN,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag for some systems yet
[HumanReadable(ShortName = "<b>Protection</b>:", LongName = "<b>Protection</b>:")]
Protection,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring non-zero data start</b>:", LongName = "<b>Ring non-zero data start</b>:")]
RingNonZeroDataStart,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring Perfect Audio Offset</b>:", LongName = "<b>Ring Perfect Audio Offset</b>:")]
RingPerfectAudioOffset,
[HumanReadable(ShortName = "[T:RD]", LongName = "<b>Rolling Demos</b>:")]
RollingDemos,
@@ -3678,15 +3687,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:S]", LongName = "<b>Series</b>:")]
Series,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Sierra ID</b>:", LongName = "<b>Sierra ID</b>:")]
SierraID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS</b>:", LongName = "<b>SS</b>:")]
SSHash,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS version</b>:", LongName = "<b>SS version</b>:")]
SSVersion,
@@ -3699,7 +3708,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:UID]", LongName = "<b>Ubisoft ID</b>:")]
UbisoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Universal Hash (SHA-1)</b>:", LongName = "<b>Universal Hash (SHA-1)</b>:")]
UniversalHash,
@@ -3718,11 +3727,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:VCD]", LongName = "<b>V-CD</b>")]
VCD,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XeMID</b>:", LongName = "<b>XeMID</b>:")]
XeMID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XMID</b>:", LongName = "<b>XMID</b>:")]
XMID,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
#if NET40_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
@@ -73,6 +75,20 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<string, string>? artifacts = null;
if (this.Artifacts != null)
{
artifacts = new Dictionary<string, string>();
foreach (var kvp in this.Artifacts)
{
artifacts[kvp.Key] = kvp.Value;
}
}
#else
var artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
@@ -90,7 +106,7 @@ namespace SabreTools.RedumpLib.Data
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Artifacts = artifacts,
};
}
}
@@ -101,16 +117,16 @@ namespace SabreTools.RedumpLib.Data
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_system", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_media", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_title", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
@@ -122,15 +138,15 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_category", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_region", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_languages", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[]? Languages { get; set; }
@@ -147,7 +163,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma1", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -162,7 +178,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma2", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -177,7 +193,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma3", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -186,7 +202,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma4", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -233,6 +249,31 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<SiteCode, string>? commentsSpecialFields = null;
if (this.CommentsSpecialFields != null)
{
commentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.CommentsSpecialFields)
{
commentsSpecialFields[kvp.Key] = kvp.Value;
}
}
Dictionary<SiteCode, string>? contentsSpecialFields = null;
if (this.ContentsSpecialFields != null)
{
contentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.ContentsSpecialFields)
{
contentsSpecialFields[kvp.Key] = kvp.Value;
}
}
#else
var commentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
var contentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CommonDiscInfoSection
{
System = this.System,
@@ -271,9 +312,9 @@ namespace SabreTools.RedumpLib.Data
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
CommentsSpecialFields = commentsSpecialFields,
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
ContentsSpecialFields = contentsSpecialFields,
};
}
}
@@ -414,13 +455,27 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<string, List<string>?>? fullProtections = null;
if (this.FullProtections != null)
{
fullProtections = new Dictionary<string, List<string>?>();
foreach (var kvp in this.FullProtections)
{
fullProtections[kvp.Key] = kvp.Value;
}
}
#else
var fullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
FullProtections = fullProtections,
SecuROMData = this.SecuROMData,
};
}
@@ -531,35 +586,35 @@ namespace SabreTools.RedumpLib.Data
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump -- Only used with MPF
[JsonProperty(PropertyName = "d_frontend_version", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_frontend_version", DefaultValueHandling = DefaultValueHandling.Include)]
public string? FrontendVersion { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_program", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_date", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_params", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_params", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingParameters { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_manufacturer", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_model", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_firmware", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_reported_disc_type", DefaultValueHandling = DefaultValueHandling.Include)]
public string? ReportedDiscType { get; set; }
// Name not defined by Redump -- Only used with Redumper

View File

@@ -0,0 +1,187 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
{
/// <summary>
/// Contains logic for dealing with downloads
/// </summary>
public class Downloader
{
#region Properties
/// <summary>
/// Which Redump feature is being used
/// </summary>
public Feature Feature { get; set; }
/// <summary>
/// Minimum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MinimumId { get; set; }
/// <summary>
/// Maximum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MaximumId { get; set; }
/// <summary>
/// Quicksearch text for downloading
/// </summary>
public string? QueryString { get; set; }
/// <summary>
/// Directory to save all outputted files to
/// </summary>
public string? OutDir { get; set; }
/// <summary>
/// Use named subfolders for discrete download sets (Feature.Packs only)
/// </summary>
public bool UseSubfolders { get; set; }
/// <summary>
/// Use the last modified page to try to grab all new discs (Feature.Site, Feature.WIP only)
/// </summary>
public bool OnlyNew { get; set; }
/// <summary>
/// Only list the page IDs but don't download
/// </summary>
public bool OnlyList { get; set; }
/// <summary>
/// Don't replace forward slashes with `-` in queries
/// </summary>
public bool NoSlash { get; set; }
/// <summary>
/// Force continuing downloads until user cancels or pages run out
/// </summary>
public bool Force { get; set; }
/// <summary>
/// Redump username
/// </summary>
public string? Username { get; set; }
/// <summary>
/// Redump password
/// </summary>
public string? Password { get; set; }
#endregion
#region Private Vars
/// <summary>
/// Current HTTP rc to use
/// </summary>
private readonly RedumpClient _client;
#endregion
/// <summary>
/// Constructor
/// </summary>
public Downloader()
{
_client = new RedumpClient();
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="client">Preconfigured client</param>
public Downloader(RedumpClient client)
{
_client = client;
}
/// <summary>
/// Run the downloads that should go
/// </summary>
/// <returns>List of IDs that were processed on success, empty on error</returns>
/// <remarks>Packs will never return anything other than empty</remarks>
public async Task<List<int>> Download()
{
// Login to Redump, if possible
if (!_client.LoggedIn)
await _client.Login(Username ?? string.Empty, Password ?? string.Empty);
// Create output list
List<int> processedIds = [];
switch (Feature)
{
case Feature.Packs:
await Packs.DownloadPacks(_client, OutDir, UseSubfolders);
break;
case Feature.Quicksearch:
processedIds = await ProcessQuicksearch();
break;
case Feature.Site:
processedIds = await ProcessSite();
break;
case Feature.User:
processedIds = await ProcessUser();
break;
case Feature.WIP:
processedIds = await ProcessWIP();
break;
default:
return [];
}
return processedIds;
}
/// <summary>
/// Process the Quicksearch feature
/// </summary>
private async Task<List<int>> ProcessQuicksearch()
{
if (OnlyList)
return await Search.ListSearchResults(_client, QueryString, NoSlash);
else
return await Search.DownloadSearchResults(_client, QueryString, OutDir, NoSlash);
}
/// <summary>
/// Process the Site feature
/// </summary>
private async Task<List<int>> ProcessSite()
{
if (OnlyNew)
return await Discs.DownloadLastModified(_client, OutDir, Force);
else
return await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
}
/// <summary>
/// Process the User feature
/// </summary>
private async Task<List<int>> ProcessUser()
{
if (OnlyList)
return await User.ListUser(_client, Username);
else if (OnlyNew)
return await User.DownloadUserLastModified(_client, Username, OutDir);
else
return await User.DownloadUser(_client, Username, OutDir);
}
/// <summary>
/// Process the WIP feature
/// </summary>
private async Task<List<int>> ProcessWIP()
{
if (OnlyNew)
return await WIP.DownloadLastSubmitted(_client, OutDir);
else
return await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
}
}
}

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
namespace SabreTools.RedumpLib
@@ -40,8 +39,13 @@ namespace SabreTools.RedumpLib
new StringBuilder(256) :
new StringBuilder(value.Length);
sb.Append(valueSpan.Take(index).ToArray());
HtmlDecode(valueSpan.Skip(index).ToArray(), ref sb);
char[] take = new char[index];
Array.Copy(valueSpan, take, index);
sb.Append(take);
char[] skip = new char[valueSpan.Length - index];
Array.Copy(valueSpan, index, skip, 0, skip.Length);
HtmlDecode(skip, ref sb);
return sb.ToString();
}
@@ -57,7 +61,8 @@ namespace SabreTools.RedumpLib
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
char[] inputSlice = input.Skip(i + 1).ToArray();
char[] inputSlice = new char[input.Length - (i + 1)];
Array.Copy(input, i + 1, inputSlice, 0, inputSlice.Length);
int semicolonPos = Array.IndexOf(inputSlice, ';');
int ampersandPos = Array.IndexOf(inputSlice, '&');
@@ -81,9 +86,13 @@ namespace SabreTools.RedumpLib
// &#xE5; --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
int offset = inputSlice[1] == 'x' || inputSlice[1] == 'X' ? 2 : 1;
char[] inputSliceNoPrefix = new char[entityLength - offset];
Array.Copy(inputSlice, offset, inputSliceNoPrefix, 0, inputSliceNoPrefix.Length);
bool parsedSuccessfully = inputSlice[1] == 'x' || inputSlice[1] == 'X'
? uint.TryParse(new string(inputSlice.Skip(2).Take(entityLength - 2).ToArray()), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSlice.Skip(1).Take(entityLength - 1).ToArray()), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
? uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
if (parsedSuccessfully)
{
@@ -112,7 +121,8 @@ namespace SabreTools.RedumpLib
}
else
{
char[] entity = inputSlice.Take(entityLength).ToArray();
char[] entity = new char[entityLength];
Array.Copy(inputSlice, entity, entityLength);
i = entityEndPosition; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
@@ -414,7 +424,10 @@ namespace SabreTools.RedumpLib
ulong key = BitConverter.ToUInt64(tableData, 0);
char value = (char)BitConverter.ToUInt16(tableData, sizeof(ulong));
dictionary[key] = value;
tableData = tableData.Skip((sizeof(ulong) + sizeof(char))).ToArray();
byte[] tempTableData = new byte[tableData.Length - (sizeof(ulong) + sizeof(char))];
Array.Copy(tableData, (sizeof(ulong) + sizeof(char)), tempTableData, 0, tempTableData.Length);
tableData = tempTableData;
}
return dictionary;
}

View File

@@ -1,46 +1,46 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.3.9</Version>
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.4</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="SabreTools.RedumpLib.Test" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<PackageReference Include="MinValueTupleBridge" Version="0.2.1" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`))">
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`))">
<PackageReference Include="Net35.Actions" Version="1.1.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
</ItemGroup>
</Project>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.5.8" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -22,6 +21,14 @@ namespace SabreTools.RedumpLib
switch (info.CommonDiscInfo.Media)
{
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.BD25:
case DiscType.BD33:
case DiscType.BD50:
@@ -32,13 +39,13 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
@@ -46,14 +53,6 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD25;
break;
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.HDDVDSL:
case DiscType.HDDVDDL:
if (info.SizeAndChecksums.Layerbreak != default)
@@ -130,27 +129,27 @@ namespace SabreTools.RedumpLib
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="sha1">SHA-1 hash to check against</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
{
// Get all matching IDs for the track
var newIds = await ListSearchResults(rc, sha1);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for track with SHA-1 of '{sha1}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for track with SHA-1 of '{sha1}'");
return newIds;
}
/// <summary>
@@ -159,17 +158,17 @@ namespace SabreTools.RedumpLib
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
{
// If we don't have special fields
if (info.CommonDiscInfo?.CommentsSpecialFields == null)
return (false, null, "Universal hash was missing");
return null;
// If we don't have a universal hash
string? universalHash = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
if (string.IsNullOrEmpty(universalHash))
return (false, null, "Universal hash was missing");
return null;
// Format the universal hash for finding within the comments
string universalHashQuery = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
@@ -179,19 +178,19 @@ namespace SabreTools.RedumpLib
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for universal hash of '{universalHash}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for universal hash of '{universalHash}'");
return newIds;
}
/// <summary>

View File

@@ -0,0 +1,21 @@
using System;
using System.Threading;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Helper class for delaying
/// </summary>
internal static class DelayHelper
{
/// <summary>
/// Delay a random amount of time up to 5 seconds
/// </summary>
public static void DelayRandom()
{
var r = new Random();
int delay = r.Next(0, 50);
Thread.Sleep(delay * 100);
}
}
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with disc pages
/// </summary>
public static class Discs
{
/// <summary>
/// Download the last modified disc pages, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="force">Force continuation of download</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
{
List<int> ids = [];
// Keep getting last modified pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the specified range of site disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn)
{
Console.WriteLine("Site download functionality is only available to Redump members");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleSiteID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

View File

@@ -0,0 +1,71 @@
using System;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with packs
/// </summary>
internal static class Packs
{
/// <summary>
/// Download premade packs
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacks(RedumpClient rc, string? outDir, bool useSubfolders)
{
var systems = (RedumpSystem[])Enum.GetValues(typeof(RedumpSystem));
await rc.DownloadPacks(Constants.PackCuesUrl, Array.FindAll(systems, s => s.HasCues()), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, Array.FindAll(systems, s => s.HasDat()), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, Array.FindAll(systems, s => s.HasDkeys()), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, Array.FindAll(systems, s => s.HasGdi()), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, Array.FindAll(systems, s => s.HasKeys()), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, Array.FindAll(systems, s => s.HasLsd()), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, Array.FindAll(systems, s => s.HasSbi()), "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
/// <summary>
/// Download premade packs for an individual system
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="system">RedumpSystem to get all possible packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacksForSystem(RedumpClient rc, RedumpSystem? system, string? outDir, bool useSubfolders)
{
if (system == null)
return false;
var systemAsArray = new RedumpSystem[] { system.Value };
if (system.HasCues())
await rc.DownloadPacks(Constants.PackCuesUrl, systemAsArray, "CUEs", outDir, useSubfolders ? "cue" : null);
if (system.HasDat())
await rc.DownloadPacks(Constants.PackDatfileUrl, systemAsArray, "DATs", outDir, useSubfolders ? "dat" : null);
if (system.HasDkeys())
await rc.DownloadPacks(Constants.PackDkeysUrl, systemAsArray, "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
if (system.HasGdi())
await rc.DownloadPacks(Constants.PackGdiUrl, systemAsArray, "GDIs", outDir, useSubfolders ? "gdi" : null);
if (system.HasKeys())
await rc.DownloadPacks(Constants.PackKeysUrl, systemAsArray, "KEYS", outDir, useSubfolders ? "keys" : null);
if (system.HasLsd())
await rc.DownloadPacks(Constants.PackLsdUrl, systemAsArray, "LSD", outDir, useSubfolders ? "lsd" : null);
if (system.HasSbi())
await rc.DownloadPacks(Constants.PackSbiUrl, systemAsArray, "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
#if NETCOREAPP
using System.Net.Http;
@@ -74,22 +73,22 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string?)> ValidateCredentials(string username, string password)
public async static Task<bool?> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return (false, null);
return false;
// Try logging in with the supplied credentials otherwise
var redumpClient = new RedumpClient();
bool? loggedIn = await redumpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
return true;
else if (loggedIn == false)
return (false, "Redump username and password denied!");
return false;
else
return (null, "An error occurred validating your credentials!");
return null;
}
/// <summary>
@@ -129,7 +128,7 @@ namespace SabreTools.RedumpLib.Web
try
{
// Get the current token from the login page
var loginPage = await DownloadStringWithRetries(Constants.LoginUrl);
var loginPage = await DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage ?? string.Empty).Groups[1].Value;
#if NETFRAMEWORK
@@ -187,6 +186,108 @@ namespace SabreTools.RedumpLib.Web
#endregion
#region Generic Helpers
/// <summary>
/// Download from a URI to a byte array
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>Byte array from the URI, null on error</returns>
public async Task<byte[]?> DownloadData(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadData(uri));
#else
return await _internalClient.GetByteArrayAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
public async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
public async Task<string?> DownloadString(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
#endregion
#region Single Page Helpers
/// <summary>
@@ -199,7 +300,7 @@ namespace SabreTools.RedumpLib.Web
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -217,7 +318,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -243,15 +344,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
@@ -259,17 +362,18 @@ namespace SabreTools.RedumpLib.Web
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
ids.Add(id);
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
return false;
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -278,9 +382,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -290,7 +395,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
/// <summary>
@@ -303,7 +408,7 @@ namespace SabreTools.RedumpLib.Web
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -311,7 +416,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -337,19 +442,21 @@ namespace SabreTools.RedumpLib.Web
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -358,9 +465,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -370,7 +478,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
#endregion
@@ -445,7 +553,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -483,7 +591,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -607,7 +715,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -645,7 +753,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -719,7 +827,7 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
@@ -727,7 +835,7 @@ namespace SabreTools.RedumpLib.Web
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -742,7 +850,7 @@ namespace SabreTools.RedumpLib.Web
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
packsDictionary.Add(system, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
@@ -759,13 +867,13 @@ namespace SabreTools.RedumpLib.Web
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
public async Task<bool> DownloadPacks(string url, RedumpSystem[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -786,72 +894,6 @@ namespace SabreTools.RedumpLib.Web
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string?> DownloadStringWithRetries(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>

View File

@@ -0,0 +1,102 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with searches
/// </summary>
internal static class Search
{
/// <summary>
/// List the disc IDs associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> ListSearchResults(RedumpClient rc, string? query, bool noSlash)
{
// If the query is invalid
if (string.IsNullOrEmpty(query))
return [];
List<int> ids = [];
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
/// <summary>
/// Download the disc pages associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> DownloadSearchResults(RedumpClient rc, string? query, string? outDir, bool noSlash)
{
List<int> ids = [];
// If the query is invalid
if (string.IsNullOrEmpty(query))
return ids;
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
}
}

View File

@@ -0,0 +1,110 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with users
/// </summary>
public static class User
{
/// <summary>
/// Download the disc pages associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUser(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the last modified disc pages associated with the given user, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting last modified user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// List the disc IDs associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> ListUser(RedumpClient rc, string? username)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with WIP queue
/// </summary>
public static class WIP
{
/// <summary>
/// Download the last submitted WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadLastSubmitted(RedumpClient rc, string? outDir)
{
return await rc.CheckSingleWIPPage(Constants.WipDumpsUrl, outDir, false);
}
/// <summary>
/// Download the specified range of WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn || !rc.IsStaff)
{
Console.WriteLine("WIP download functionality is only available to Redump moderators");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleWIPID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

151
publish-nix.sh Executable file
View File

@@ -0,0 +1,151 @@
#! /bin/bash
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
a)
NO_ARCHIVE=true
;;
*)
echo "Invalid option provided"
exit 1
;;
esac
done
# Set the current directory as a variable
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net9.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib/SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
done
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]; then
# Create Tool archives
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/RedumpTool/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/RedumpTool/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip .
done
done
# Reset the directory
cd $BUILD_FOLDER
fi

135
publish-win.ps1 Normal file
View File

@@ -0,0 +1,135 @@
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
param(
[Parameter(Mandatory = $false)]
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
[Parameter(Mandatory = $false)]
[Alias("NoArchive")]
[switch]$NO_ARCHIVE
)
# Set the current directory as a variable
$BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net9.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib\SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
}
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent) {
# Create Tool archives
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip *
}
}
# Reset the directory
Set-Location -Path $PSScriptRoot
}