Compare commits

...

57 Commits
1.4.1 ... 1.6.6

Author SHA1 Message Date
Matt Nadareski
f29a50c3b0 Bump version 2025-05-01 10:54:09 -04:00
Matt Nadareski
edf6e12371 Update Nuget packages 2025-05-01 10:53:21 -04:00
Matt Nadareski
146b2780c5 Fix skipped multiline site codes 2025-05-01 10:51:26 -04:00
Matt Nadareski
60ce7cbfa0 Bump version 2025-04-30 20:51:59 -04:00
Deterous
39fe46a162 Remove obsolete PhilipsCDiDigitalVideo system (#10)
* Remove obsolete PhilipsCDiDigitalVideo system

* Delete obsolete system
2025-04-17 08:04:02 -04:00
Matt Nadareski
a8674a21e4 Add 30 second timeout to web operations 2025-04-13 21:20:00 -04:00
Matt Nadareski
65f2d53a3f Fix how conditions are used for references 2025-02-25 21:16:16 -05:00
Matt Nadareski
0bc869543a Bump version 2024-12-31 21:10:46 -05:00
Matt Nadareski
aa7d513d2c Add Ring Perfect Audio Offset pseudo-tag 2024-12-31 21:09:11 -05:00
Matt Nadareski
3d35129529 Update copyright 2024-12-30 21:27:26 -05:00
Matt Nadareski
ec563938ba Remove unnecessary action step 2024-12-30 21:26:16 -05:00
Matt Nadareski
f0f3a1a194 Bump version 2024-12-28 13:52:24 -05:00
Deterous
55f5262198 Add new Protection pseudo site code (#9)
* Add Protection pseudo site tag

* Use new sitecode in redumplib
2024-12-27 12:33:35 -05:00
Matt Nadareski
1d247b1f6f Use string comparison on tab replacement when possible 2024-12-25 22:13:26 -05:00
Matt Nadareski
32c57736ae Duplicate write offset field for convenience (fixes #8) 2024-12-25 22:03:55 -05:00
Deterous
8ab312ba8b Convert <Tab> (#7) 2024-12-24 21:07:51 -05:00
Matt Nadareski
3ea01ca933 Ensure .NET versions are installed for testing 2024-12-19 10:52:22 -05:00
Matt Nadareski
27d99f7429 Bump version 2024-12-16 14:35:02 -05:00
Matt Nadareski
8b147f2041 Change empty language list message 2024-12-16 14:33:22 -05:00
Matt Nadareski
9c7a143d52 Add to publish scripts, not rolling build 2024-12-16 14:26:55 -05:00
Matt Nadareski
30bbef7bba Add RedumpTool as a non-building component 2024-12-16 14:23:43 -05:00
Matt Nadareski
17da564b00 Fix old .NET support 2024-12-16 14:22:07 -05:00
Matt Nadareski
073176cccb Update Models to 1.5.8 2024-12-16 14:21:53 -05:00
Matt Nadareski
0434e63e34 Allow symbols to be packed 2024-12-16 14:21:34 -05:00
Matt Nadareski
2b75eb44cd Use publish script and update README 2024-12-06 11:34:34 -05:00
Matt Nadareski
10eecc866e Bump version 2024-12-05 22:11:48 -05:00
Matt Nadareski
84fa2f93ea Fix consecutive empty line logic 2024-12-05 21:15:26 -05:00
Matt Nadareski
5a92c0fc98 Bump version 2024-12-01 22:58:47 -05:00
Matt Nadareski
4ffc1b3160 Fix multi-newline formatting, add tests 2024-12-01 22:44:12 -05:00
Matt Nadareski
ffa8f2b16e Update ToDiscType and add tests 2024-12-01 22:31:19 -05:00
Matt Nadareski
70e3e074cc Update some extensions, update tests 2024-12-01 22:14:49 -05:00
Matt Nadareski
4858b4e459 None of these are TODOs on my part 2024-12-01 21:32:45 -05:00
Matt Nadareski
9495cd32c7 Handle some TODO items 2024-12-01 21:31:58 -05:00
Matt Nadareski
071571870e Add ToYesNo tests 2024-12-01 21:20:47 -05:00
Matt Nadareski
f03cd40181 Use automatic system name mapping 2024-12-01 21:14:08 -05:00
Matt Nadareski
ea51726645 Fill out more tests 2024-12-01 21:09:15 -05:00
Matt Nadareski
f0633d5aa7 Framework only matters for executable 2024-11-30 21:39:44 -05:00
Matt Nadareski
4c076aec0c Update packages 2024-11-30 21:38:41 -05:00
Matt Nadareski
2ec9d6a4a0 Use more targeted library for old .NET 2024-11-18 19:53:44 -05:00
Matt Nadareski
415b488005 Bump version 2024-11-14 22:23:20 -05:00
Matt Nadareski
5d300c9975 Make download helpers public for ease 2024-11-14 22:22:52 -05:00
Matt Nadareski
304236774f Bump version 2024-11-13 01:54:40 -05:00
Matt Nadareski
9924289c48 Fix casting issues 2024-11-13 01:54:24 -05:00
Matt Nadareski
240eb74ead Bump version 2024-11-13 01:30:42 -05:00
Matt Nadareski
a64b109d2c Remove unncessary Linq usage 2024-11-13 01:29:36 -05:00
Matt Nadareski
3e0f9b5410 Add .NET 9 to target frameworks 2024-11-13 00:59:20 -05:00
Matt Nadareski
668be418ac Bump version 2024-10-18 12:30:30 -04:00
Matt Nadareski
7d184a634e Always return ID list, if possible 2024-10-18 12:26:36 -04:00
Matt Nadareski
67aed0899d Don't null foreign title if missing 2024-10-18 11:58:29 -04:00
Matt Nadareski
9fbaf1a187 Bump version 2024-10-04 01:42:58 -04:00
Matt Nadareski
fe8686a2bb Allow forward slashes in queries sometimes 2024-10-04 01:41:05 -04:00
Matt Nadareski
652270c8c7 Add publish scripts 2024-10-01 13:56:40 -04:00
Matt Nadareski
905d8a94fb Bump version 2024-10-01 13:55:33 -04:00
Matt Nadareski
3ee8416695 Remove unnecessary tuples 2024-10-01 13:53:03 -04:00
Matt Nadareski
49fa06da55 Remove threading bridge package (unused) 2024-10-01 04:27:31 -04:00
Matt Nadareski
70e29afd89 Remove Linq requirement from old .NET 2024-10-01 04:25:35 -04:00
Matt Nadareski
2a402a53db Remove ValueTuple packages (usused) 2024-10-01 03:21:07 -04:00
36 changed files with 5943 additions and 2923 deletions

View File

@@ -1,4 +1,4 @@
name: Nuget Pack
name: Build and Test
on:
push:
@@ -12,29 +12,26 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Restore dependencies
run: dotnet restore
- name: Pack
run: dotnet pack
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: 'Nuget Package'
path: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
- name: Run tests
run: dotnet test
- name: Run publish script
run: ./publish-nix.sh -d
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
artifacts: "*.nupkg,*.snupkg"
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -11,7 +11,13 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Build
run: dotnet build
run: dotnet build
- name: Run tests
run: dotnet test

View File

@@ -1,5 +1,13 @@
# SabreTools.RedumpLib
[![Build and Test](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml)
This library comprises interaction logic for [Redump](http://redump.org/). Because there is no formal API for the site, this library interacts with the site through normal HTTP methods. It includes a fairly comprehensive reference of supported parts of the site, including URLs, page information, and packs.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.RedumpLib).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.RedumpLib/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.RedumpLib/releases/rolling)

278
RedumpTool/Program.cs Normal file
View File

@@ -0,0 +1,278 @@
using System;
using System.IO;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
namespace RedumpTool
{
public class Program
{
static void Main(string[] args)
{
// Show help if nothing is input
if (args == null || args.Length == 0)
{
ShowHelp();
return;
}
// Derive the feature, if possible
Feature feature = DeriveFeature(args[0]);
if (feature == Feature.NONE)
{
ShowHelp();
return;
}
// Create a new Downloader
var downloader = CreateDownloader(feature, args);
if (downloader == null)
{
ShowHelp();
return;
}
// Run the download task
var downloaderTask = downloader.Download();
downloaderTask.Wait();
// Get the downloader task results and print, if necessary
var downloaderResult = downloaderTask.Result;
if (downloaderResult.Count > 0)
{
string processedIds = string.Join(", ", [.. downloaderResult.ConvertAll(i => i.ToString())]);
Console.WriteLine($"Processed IDs: {processedIds}");
}
else if (downloaderResult.Count == 0 && downloader.Feature != Feature.Packs)
{
ShowHelp();
}
}
/// <summary>
/// Derive the feature from the supplied argument
/// </summary>
/// <param name="feature">Possible feature name to derive from</param>
/// <returns>True if the feature was set, false otherwise</returns>
private static Feature DeriveFeature(string feature)
{
return feature.ToLowerInvariant() switch
{
"site" => Feature.Site,
"wip" => Feature.WIP,
"packs" => Feature.Packs,
"user" => Feature.User,
"search" => Feature.Quicksearch,
"query" => Feature.Quicksearch,
_ => Feature.NONE,
};
}
/// <summary>
/// Create a Downloader from a feature and a set of arguments
/// </summary>
/// <param name="feature">Primary feature to use</param>
/// <param name="args">Arguments list to parse</param>
/// <returns>Initialized Downloader on success, null otherwise</returns>
private static Downloader? CreateDownloader(Feature feature, string[] args)
{
// Set temporary internal variables
string? outDir = null;
string? username = null;
string? password = null;
int minimumId = -1;
int maximumId = -1;
string? queryString = null;
bool useSubfolders = false;
bool onlyNew = false;
bool onlyList = false;
bool noSlash = false;
bool force = false;
// Now loop through all of the arguments
try
{
for (int i = 1; i < args.Length; i++)
{
switch (args[i])
{
// Output directory
case "-o":
case "--output":
outDir = args[++i].Trim('"');
break;
// Username
case "-u":
case "--username":
username = args[++i];
break;
// Password
case "-p":
case "--password":
password = args[++i];
break;
// Minimum Redump ID
case "-min":
case "--minimum":
if (!int.TryParse(args[++i], out minimumId))
minimumId = -1;
break;
// Maximum Redump ID
case "-max":
case "--maximum":
if (!int.TryParse(args[++i], out maximumId))
maximumId = -1;
break;
// Quicksearch text
case "-q":
case "--query":
queryString = args[++i];
break;
// Packs subfolders
case "-s":
case "--subfolders":
useSubfolders = true;
break;
// Use last modified
case "-n":
case "--onlynew":
onlyNew = true;
break;
// List instead of download
case "-l":
case "--list":
onlyList = true;
break;
// Don't filter forward slashes from queries
case "-ns":
case "--noslash":
noSlash = true;
break;
// Force continuation
case "-f":
case "--force":
force = true;
break;
// Everything else
default:
Console.WriteLine($"Unrecognized flag: {args[i]}");
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
// Output directory validation
if (!onlyList && string.IsNullOrEmpty(outDir))
{
Console.WriteLine("No output directory set!");
return null;
}
else if (!onlyList && !string.IsNullOrEmpty(outDir))
{
// Create the output directory, if it doesn't exist
try
{
if (!Directory.Exists(outDir))
Directory.CreateDirectory(outDir);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
// Range verification
if (feature == Feature.Site && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of Redump IDs");
return null;
}
else if (feature == Feature.WIP && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of WIP IDs");
return null;
}
// Query verification (and cleanup)
if (feature == Feature.Quicksearch && string.IsNullOrEmpty(queryString))
{
Console.WriteLine("Please enter a query for searching");
return null;
}
// Create and return the downloader
var downloader = new Downloader()
{
Feature = feature,
MinimumId = minimumId,
MaximumId = maximumId,
QueryString = queryString,
OutDir = outDir,
UseSubfolders = useSubfolders,
OnlyNew = onlyNew,
OnlyList = onlyList,
Force = force,
NoSlash = noSlash,
Username = username,
Password = password,
};
return downloader;
}
/// <summary>
/// Show the commandline help for the program
/// </summary>
private static void ShowHelp()
{
Console.WriteLine("RedumpTool - A Redump.org recovery tool");
Console.WriteLine();
Console.WriteLine("Usage: RedumpTool <feature> [options]");
Console.WriteLine();
Console.WriteLine("Common Options");
Console.WriteLine(" -o <folder>, --output <folder> - Set the base output directory");
Console.WriteLine(" -u <username>, --username <username> - Redump username");
Console.WriteLine(" -p <pass>, --password <pass> - Redump password");
Console.WriteLine();
Console.WriteLine("site - Download pages and related files from the main site");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine(" -f, --force - Force continuing downloads until user cancels (used with only new)");
Console.WriteLine();
Console.WriteLine("wip - Download pages and related files from the WIP list");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine();
Console.WriteLine("packs - Download available packs");
Console.WriteLine(" -s, --subfolders - Download packs to named subfolders");
Console.WriteLine();
Console.WriteLine("user - Download pages and related files for a particular user");
Console.WriteLine(" -n, --onlynew - Use the last modified view instead of sequential parsing");
Console.WriteLine(" -l, --list - Only list the page IDs for that user");
Console.WriteLine();
Console.WriteLine("query - Download pages and related files from a Redump-compatible query");
Console.WriteLine(" -q, --query - Redump-compatible query to run");
Console.WriteLine(" -l, --list - Only list the page IDs for that query");
Console.WriteLine(" -ns, --noslash - Don't replace forward slashes with '-'");
Console.WriteLine();
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.6</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -2,6 +2,7 @@
using System;
using System.IO;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
@@ -10,7 +11,7 @@ namespace SabreTools.RedumpLib.Test
{
[Theory]
[InlineData("success_complete.json", false)]
[InlineData("success_invalid.json", false)] // Fully in valid returns a default object
[InlineData("success_invalid.json", false)] // Fully invalid returns a default object
[InlineData("success_partial.json", false)]
[InlineData("fail_invalid.json", true)]
public void CreateFromFileTest(string filename, bool expectNull)
@@ -24,5 +25,167 @@ namespace SabreTools.RedumpLib.Test
// Check for an expected result
Assert.Equal(expectNull, si == null);
}
[Fact]
public void EnsureAllSections_Null_Filled()
{
SubmissionInfo? si = null;
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Empty_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = null,
VersionAndEditions = null,
EDC = null,
ParentCloneRelationship = null,
Extras = null,
CopyProtection = null,
DumpersAndStatus = null,
TracksAndWriteOffsets = null,
SizeAndChecksums = null,
DumpingInfo = null,
Artifacts = null,
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Filled_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection
{
CommentsSpecialFields = [],
ContentsSpecialFields = [],
},
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
DumpingInfo = new DumpingInfoSection(),
Artifacts = [],
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void InjectSubmissionInformation_BothNull_Null()
{
SubmissionInfo? si = null;
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.Null(actual);
}
[Fact]
public void InjectSubmissionInformation_ValidInputNullSeed_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void InjectSubmissionInformation_BothValid_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = new SubmissionInfo();
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_EmptyString_Empty()
{
string original = string.Empty;
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Empty(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_NoReplace_Identical()
{
string original = "<p>Nothing here will be replaced</p>";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(original, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_StandardCode_Replaced()
{
string original = "<b>ISBN</b>: 000-0-00-000000-0";
string expected = "[T:ISBN] 000-0-00-000000-0";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_OutdatedCode_Replaced()
{
string original = "XMID: AB12345C";
string expected = "<b>XMID</b>: AB12345C";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
namespace SabreTools.RedumpLib.Test
{
public class DownloaderTests
{
// Tests here will require installing and using the Moq library
// to mock the RedumpClient type.
}
}

View File

@@ -1,201 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class EnumExtensionsTests
{
/// <summary>
/// MediaType values that support drive speeds
/// </summary>
private static readonly MediaType?[] _supportDriveSpeeds =
[
MediaType.CDROM,
MediaType.DVD,
MediaType.GDROM,
MediaType.HDDVD,
MediaType.BluRay,
MediaType.NintendoGameCubeGameDisc,
MediaType.NintendoWiiOpticalDisc,
];
/// <summary>
/// RedumpSystem values that are considered Audio
/// </summary>
private static readonly RedumpSystem?[] _audioSystems =
[
RedumpSystem.AtariJaguarCDInteractiveMultimediaSystem,
RedumpSystem.AudioCD,
RedumpSystem.DVDAudio,
RedumpSystem.HasbroiONEducationalGamingSystem,
RedumpSystem.HasbroVideoNow,
RedumpSystem.HasbroVideoNowColor,
RedumpSystem.HasbroVideoNowJr,
RedumpSystem.HasbroVideoNowXP,
RedumpSystem.PlayStationGameSharkUpdates,
RedumpSystem.PhilipsCDi,
RedumpSystem.SuperAudioCD,
];
/// <summary>
/// RedumpSystem values that are considered markers
/// </summary>
private static readonly RedumpSystem?[] _markerSystems =
[
RedumpSystem.MarkerArcadeEnd,
RedumpSystem.MarkerComputerEnd,
RedumpSystem.MarkerDiscBasedConsoleEnd,
RedumpSystem.MarkerOtherEnd,
];
/// <summary>
/// RedumpSystem values that are have reversed ringcodes
/// </summary>
private static readonly RedumpSystem?[] _reverseRingcodeSystems =
[
RedumpSystem.SonyPlayStation2,
RedumpSystem.SonyPlayStation3,
RedumpSystem.SonyPlayStation4,
RedumpSystem.SonyPlayStation5,
RedumpSystem.SonyPlayStationPortable,
];
/// <summary>
/// RedumpSystem values that are considered XGD
/// </summary>
private static readonly RedumpSystem?[] _xgdSystems =
[
RedumpSystem.MicrosoftXbox,
RedumpSystem.MicrosoftXbox360,
RedumpSystem.MicrosoftXboxOne,
RedumpSystem.MicrosoftXboxSeriesXS,
];
/// <summary>
/// Check that all systems with reversed ringcodes are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateReversedRingcodeSystemsTestData))]
public void HasReversedRingcodesTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.HasReversedRingcodes();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all audio systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateAudioSystemsTestData))]
public void IsAudioTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsAudio();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all marker systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateMarkerSystemsTestData))]
public void IsMarkerTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsMarker();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all XGD systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateXGDSystemsTestData))]
public void IsXGDTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsXGD();
Assert.Equal(expected, actual);
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered Audio
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateAudioSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_audioSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered markers
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateMarkerSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_markerSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered markers
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateReversedRingcodeSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_reverseRingcodeSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered XGD
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateXGDSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_xgdSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
}
}

View File

@@ -1,717 +0,0 @@
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
// TODO: Add tests for string-to-enum conversion
public class ExtensionsTests
{
#region Cross-Enumeration
/// <summary>
/// DiscType values that map to MediaType
/// </summary>
private static readonly DiscType?[] _mappableDiscTypes = new DiscType?[]
{
DiscType.BD25,
DiscType.BD33,
DiscType.BD50,
DiscType.BD66,
DiscType.BD100,
DiscType.BD128,
DiscType.CD,
DiscType.DVD5,
DiscType.DVD9,
DiscType.GDROM,
DiscType.HDDVDSL,
DiscType.HDDVDDL,
DiscType.NintendoGameCubeGameDisc,
DiscType.NintendoWiiOpticalDiscSL,
DiscType.NintendoWiiOpticalDiscDL,
DiscType.NintendoWiiUOpticalDiscSL,
DiscType.UMDSL,
DiscType.UMDDL,
};
/// <summary>
/// MediaType values that map to DiscType
/// </summary>
private static readonly MediaType?[] _mappableMediaTypes = new MediaType?[]
{
MediaType.BluRay,
MediaType.CDROM,
MediaType.DVD,
MediaType.GDROM,
MediaType.HDDVD,
MediaType.NintendoGameCubeGameDisc,
MediaType.NintendoWiiOpticalDisc,
MediaType.NintendoWiiUOpticalDisc,
MediaType.UMD,
};
/// <summary>
/// Check that every supported system has some set of MediaTypes supported
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
[Theory]
[MemberData(nameof(GenerateRedumpSystemMappingTestData))]
public void MediaTypesTest(RedumpSystem? redumpSystem)
{
var actual = redumpSystem.MediaTypes();
Assert.NotEmpty(actual);
}
/// <summary>
/// Check that both mappable and unmappable media types output correctly
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null mapping, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeMappingTestData))]
public void ToDiscTypeTest(MediaType? mediaType, bool expectNull)
{
DiscType? actual = mediaType.ToDiscType();
Assert.Equal(expectNull, actual == null);
}
/// <summary>
/// Check that DiscType values all map to something appropriate
/// </summary>
/// <param name="discType">DiscType value to check</param>
/// <param name="expectNull">True to expect a null mapping, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscTypeMappingTestData))]
public void ToMediaTypeTest(DiscType? discType, bool expectNull)
{
MediaType? actual = discType.ToMediaType();
Assert.Equal(expectNull, actual == null);
}
/// <summary>
/// Generate a test set of DiscType values
/// </summary>
/// <returns>MemberData-compatible list of DiscType values</returns>
public static List<object?[]> GenerateDiscTypeMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscType? discType in Enum.GetValues(typeof(DiscType)))
{
if (_mappableDiscTypes.Contains(discType))
testData.Add(new object?[] { discType, false });
else
testData.Add(new object?[] { discType, true });
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateRedumpSystemMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null } };
foreach (RedumpSystem? redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
testData.Add(new object?[] { redumpSystem });
}
return testData;
}
/// <summary>
/// Generate a test set of mappable media types
/// </summary>
/// <returns>MemberData-compatible list of MediaTypes</returns>
public static List<object?[]> GenerateMediaTypeMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (MediaType? mediaType in Enum.GetValues(typeof(MediaType)))
{
if (_mappableMediaTypes.Contains(mediaType))
testData.Add(new object?[] { mediaType, false });
else
testData.Add(new object?[] { mediaType, true });
}
return testData;
}
#endregion
#region Disc Category
/// <summary>
/// Check that every DiscCategory has a long name provided
/// </summary>
/// <param name="discCategory">DiscCategory value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscCategoryTestData))]
public void DiscCategoryLongNameTest(DiscCategory? discCategory, bool expectNull)
{
var actual = discCategory.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of DiscCategory values
/// </summary>
/// <returns>MemberData-compatible list of DiscCategory values</returns>
public static List<object?[]> GenerateDiscCategoryTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscCategory? discCategory in Enum.GetValues(typeof(DiscCategory)))
{
testData.Add(new object?[] { discCategory, false });
}
return testData;
}
#endregion
#region Disc Type
/// <summary>
/// Check that every DiscType has a long name provided
/// </summary>
/// <param name="discType">DiscType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscTypeTestData))]
public void DiscTypeLongNameTest(DiscType? discType, bool expectNull)
{
var actual = discType.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of DiscType values
/// </summary>
/// <returns>MemberData-compatible list of DiscType values</returns>
public static List<object?[]> GenerateDiscTypeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscType? discType in Enum.GetValues(typeof(DiscType)))
{
if (discType == DiscType.NONE)
testData.Add(new object?[] { discType, true });
else
testData.Add(new object?[] { discType, false });
}
return testData;
}
#endregion
#region Language
/// <summary>
/// Check that every Language has a long name provided
/// </summary>
/// <param name="language">Language value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageTestData))]
public void LanguageLongNameTest(Language? language, bool expectNull)
{
var actual = language.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every Language has a short name provided
/// </summary>
/// <param name="language">Language value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageTestData))]
public void LanguageShortNameTest(Language? language, bool expectNull)
{
var actual = language.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Ensure that every Language that has an ISO 639-1 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateTwoLetterCodeTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.TwoLetterCode();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Ensure that every Language that has a standard/bibliographic ISO 639-2 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateThreeLetterCodeTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.ThreeLetterCode();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Ensure that every Language that has a terminology ISO 639-2 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateThreeLetterCodeAltTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.ThreeLetterCodeAlt();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Generate a test set of Language values
/// </summary>
/// <returns>MemberData-compatible list of Language values</returns>
public static List<object?[]> GenerateLanguageTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (Language? language in Enum.GetValues(typeof(Language)))
{
testData.Add(new object?[] { language, false });
}
return testData;
}
#endregion
#region Language Selection
/// <summary>
/// Check that every LanguageSelection has a long name provided
/// </summary>
/// <param name="languageSelection">LanguageSelection value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageSelectionTestData))]
public void LanguageSelectionLongNameTest(LanguageSelection? languageSelection, bool expectNull)
{
var actual = languageSelection.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of LanguageSelection values
/// </summary>
/// <returns>MemberData-compatible list of LanguageSelection values</returns>
public static List<object?[]> GenerateLanguageSelectionTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (LanguageSelection? languageSelection in Enum.GetValues(typeof(LanguageSelection)))
{
testData.Add(new object?[] { languageSelection, false });
}
return testData;
}
#endregion
#region Media Type
/// <summary>
/// Check that every MediaType has a long name provided
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeTestData))]
public void MediaTypeLongNameTest(MediaType? mediaType, bool expectNull)
{
var actual = mediaType.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every MediaType has a short name provided
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeTestData))]
public void MediaTypeShortNameTest(MediaType? mediaType, bool expectNull)
{
var actual = mediaType.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of MediaType values
/// </summary>
/// <returns>MemberData-compatible list of MediaType values</returns>
public static List<object?[]> GenerateMediaTypeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (MediaType? mediaType in Enum.GetValues(typeof(MediaType)))
{
testData.Add(new object?[] { mediaType, false });
}
return testData;
}
#endregion
#region Region
/// <summary>
/// Check that every Region has a long name provided
/// </summary>
/// <param name="region">Region value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRegionTestData))]
public void RegionLongNameTest(Region? region, bool expectNull)
{
var actual = region.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every Region has a short name provided
/// </summary>
/// <param name="region">Region value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRegionTestData))]
public void RegionShortNameTest(Region? region, bool expectNull)
{
var actual = region.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Ensure that every Language that has an ISO 639-1 code is unique
/// </summary>
[Fact]
public void RegionNoDuplicateShortNameTest()
{
var fullRegions = Enum.GetValues(typeof(Region)).Cast<Region?>().ToList();
var filteredRegions = new Dictionary<string, Region?>();
int totalCount = 0;
foreach (Region? region in fullRegions)
{
var code = region.ShortName();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredRegions.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredRegions[code] = region;
totalCount++;
}
Assert.Equal(totalCount, filteredRegions.Count);
}
/// <summary>
/// Generate a test set of Region values
/// </summary>
/// <returns>MemberData-compatible list of Region values</returns>
public static List<object?[]> GenerateRegionTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (Region? region in Enum.GetValues(typeof(Region)))
{
testData.Add(new object?[] { region, false });
}
return testData;
}
#endregion
#region Site Code
/// <summary>
/// Check that every SiteCode has a long name provided
/// </summary>
/// <param name="siteCode">SiteCode value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSiteCodeTestData))]
public void SiteCodeLongNameTest(SiteCode? siteCode, bool expectNull)
{
var actual = siteCode.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every SiteCode has a short name provided
/// </summary>
/// <param name="siteCode">SiteCode value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSiteCodeTestData))]
public void SiteCodeShortNameTest(SiteCode? siteCode, bool expectNull)
{
var actual = siteCode.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of SiteCode values
/// </summary>
/// <returns>MemberData-compatible list of SiteCode values</returns>
public static List<object?[]> GenerateSiteCodeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
testData.Add(new object?[] { siteCode, false });
}
return testData;
}
#endregion
#region System
/// <summary>
/// Check that every RedumpSystem has a long name provided
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRedumpSystemTestData))]
public void RedumpSystemLongNameTest(RedumpSystem? redumpSystem, bool expectNull)
{
var actual = redumpSystem.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
// TODO: Re-enable the following test once non-Redump systems are accounted for
/// <summary>
/// Check that every RedumpSystem has a short name provided
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
//[Theory]
//[MemberData(nameof(GenerateRedumpSystemTestData))]
//public void RedumpSystemShortNameTest(RedumpSystem? redumpSystem, bool expectNull)
//{
// string actual = redumpSystem.ShortName();
// if (expectNull)
// Assert.Null(actual);
// else
// Assert.NotNull(actual);
//}
// TODO: Test the other attributes as well
// Most are bool checks so they're not as interesting to have unit tests around
// SystemCategory always returns something as well, so is it worth testing?
/// <summary>
/// Generate a test set of RedumpSystem values
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateRedumpSystemTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (RedumpSystem? redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
// We want to skip all markers for this
if (redumpSystem.IsMarker())
continue;
testData.Add(new object?[] { redumpSystem, false });
}
return testData;
}
#endregion
#region System Category
/// <summary>
/// Check that every SystemCategory has a long name provided
/// </summary>
/// <param name="systemCategory">SystemCategory value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSystemCategoryTestData))]
public void SystemCategoryLongNameTest(SystemCategory? systemCategory, bool expectNull)
{
var actual = systemCategory.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of SystemCategory values
/// </summary>
/// <returns>MemberData-compatible list of SystemCategory values</returns>
public static List<object?[]> GenerateSystemCategoryTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (SystemCategory? systemCategory in Enum.GetValues(typeof(SystemCategory)))
{
if (systemCategory == SystemCategory.NONE)
testData.Add(new object?[] { systemCategory, true });
else
testData.Add(new object?[] { systemCategory, false });
}
return testData;
}
#endregion
#region Yes/No
/// <summary>
/// Check that every YesNo has a long name provided
/// </summary>
/// <param name="yesNo">YesNo value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateYesNoTestData))]
public void YesNoLongNameTest(YesNo? yesNo, bool expectNull)
{
string actual = yesNo.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of YesNo values
/// </summary>
/// <returns>MemberData-compatible list of YesNo values</returns>
public static List<object?[]> GenerateYesNoTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (YesNo? yesNo in Enum.GetValues(typeof(YesNo)))
{
testData.Add(new object?[] { yesNo, false });
}
return testData;
}
#endregion
}
}

View File

@@ -0,0 +1,877 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class FormatterTests
{
#region ProcessSpecialFields
// TODO: Write tests for ProcessSpecialFields
#endregion
#region CommonDiscInfoSection
// TODO: Write tests for FormatOutputData(CommonDiscInfoSection)
[Fact]
public void FormatOutputData_CDINullSACNullTAWONull_Minimal()
{
string expected = "Common Disc Info:\n\tRegion: SPACE! (CHANGE THIS)\n\tLanguages: ADD LANGUAGES HERE (ONLY IF YOU TESTED)\n\n\tRingcode Information:\n\n\n";
var builder = new StringBuilder();
CommonDiscInfoSection? section = null;
SizeAndChecksumsSection? sac = null;
TracksAndWriteOffsetsSection? tawo = null;
int? fullyMatchedID = null;
List<int>? partiallyMatchedIDs = null;
Formatter.FormatOutputData(builder,
section,
sac,
tawo,
fullyMatchedID,
partiallyMatchedIDs);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region VersionAndEditionsSection
[Fact]
public void FormatOutputData_VAENull_Minimal()
{
string expected = "Version and Editions:\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_VAE_Formatted()
{
string expected = "Version and Editions:\n\tVersion: XXXXXX\n\tEdition/Release: XXXXXX\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = new VersionAndEditionsSection
{
Version = "XXXXXX",
OtherEditions = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region EDCSection
[Fact]
public void FormatOutputData_EDCNull_Minimal()
{
string expected = "EDC:\n";
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDCInvalidSystem_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDC_Formatted()
{
string expected = "EDC:\n\tEDC: Yes\n";
var builder = new StringBuilder();
EDCSection? section = new EDCSection { EDC = YesNo.Yes };
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region ExtrasSection
[Fact]
public void FormatOutputData_ExtrasNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_ExtrasInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = null,
PIC = null,
BCA = null,
SecuritySectorRanges = null,
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_Extras_Formatted()
{
string expected = "Extras:\n\tPrimary Volume Descriptor (PVD): XXXXXX\n\tDisc Key: XXXXXX\n\tDisc ID: XXXXXX\n\tPermanent Information & Control (PIC): XXXXXX\n\tHeader: XXXXXX\n\tBCA: XXXXXX\n\tSecurity Sector Ranges: XXXXXX\n";
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = "XXXXXX",
DiscKey = "XXXXXX",
DiscID = "XXXXXX",
PIC = "XXXXXX",
Header = "XXXXXX",
BCA = "XXXXXX",
SecuritySectorRanges = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region CopyProtectionSection
[Fact]
public void FormatOutputData_COPNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
Protection = null,
AntiModchip = null,
LibCrypt = null,
LibCryptData = null,
SecuROMData = null,
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COP_Formatted()
{
string expected = "Copy Protection:\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPPSX_Formatted()
{
string expected = "Copy Protection:\n\tAnti-modchip: Yes\n\tLibCrypt: Yes\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region TracksAndWriteOffsetsSection
[Fact]
public void FormatOutputData_TAWOInvalid_Minimal()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\n\n\n\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection();
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_TAWO_Formatted()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\nXXXXXX\n\n\n\tCuesheet: XXXXXX\n\tWrite Offset: XXXXXX\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection
{
ClrMameProData = "XXXXXX",
Cuesheet = "XXXXXX",
OtherWriteOffsets = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region SizeAndChecksumsSection
// TODO: Write tests for FormatOutputData(SizeAndChecksumsSection)
#endregion
#region DumpingInfoSection
[Fact]
public void FormatOutputData_DINull_Minimal()
{
string expected = "Dumping Info:\n";
var builder = new StringBuilder();
DumpingInfoSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_DI_Formatted()
{
string expected = "Dumping Info:\n\tFrontend Version: XXXXXX\n\tDumping Program: XXXXXX\n\tDate: XXXXXX\n\tParameters: XXXXXX\n\tManufacturer: XXXXXX\n\tModel: XXXXXX\n\tFirmware: XXXXXX\n\tReported Disc Type: XXXXXX\n\tC2 Error Count: XXXXXX\n";
var builder = new StringBuilder();
DumpingInfoSection? section = new DumpingInfoSection
{
FrontendVersion = "XXXXXX",
DumpingProgram = "XXXXXX",
DumpingDate = "XXXXXX",
DumpingParameters = "XXXXXX",
Manufacturer = "XXXXXX",
Model = "XXXXXX",
Firmware = "XXXXXX",
ReportedDiscType = "XXXXXX",
C2ErrorsCount = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region FormatSiteTag
[Fact]
public void FormatSiteTag_NoValue_Empty()
{
SiteCode code = SiteCode.AlternativeTitle;
string value = string.Empty;
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Standard_Formatted()
{
string expected = "[T:ALT] XXXXXX";
SiteCode code = SiteCode.AlternativeTitle;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanTrue_Formatted()
{
string expected = "[T:VCD]";
SiteCode code = SiteCode.VCD;
string value = "True";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanFalse_Empty()
{
SiteCode code = SiteCode.VCD;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Multiline_Formatted()
{
string expected = "[T:X]\nXXXXXX\n";
SiteCode code = SiteCode.Extras;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
#endregion
#region GetFixedMediaType
[Fact]
public void GetFixedMediaType_NullType_Null()
{
MediaType? mediaType = null;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Null(actual);
}
[Fact]
public void GetFixedMediaType_UnformattedType_Formatted()
{
string? expected = "CD-ROM";
MediaType? mediaType = MediaType.CDROM;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD9_Formatted()
{
string? expected = "DVD-ROM-9";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD5_Formatted()
{
string? expected = "DVD-ROM-5";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD128_Formatted()
{
string? expected = "BD-ROM-128";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = 12345;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD100_Formatted()
{
string? expected = "BD-ROM-100";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = 12345;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66PIC_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66Size_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 53_687_063_713;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD50_Formatted()
{
string? expected = "BD-ROM-50";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33PIC_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33Size_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 26_843_531_857;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD25_Formatted()
{
string? expected = "BD-ROM-25";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDDL_Formatted()
{
string? expected = "HD-DVD-ROM-DL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDSL_Formatted()
{
string? expected = "HD-DVD-ROM-SL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDDL_Formatted()
{
string? expected = "UMD-DL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDSL_Formatted()
{
string? expected = "UMD-SL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
#endregion
#region OrderCommentTags
[Fact]
public void OrderCommentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.Applications, "XXXXXX" },
};
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderCommentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedCommentCodes.SequenceEqual(actualCodes));
}
#endregion
#region OrderContentTags
[Fact]
public void OrderContentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.AlternativeTitle, "XXXXXX" },
};
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderContentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedContentCodes.SequenceEqual(actualCodes));
}
#endregion
#region RemoveConsecutiveEmptyLines
[Fact]
public void RemoveConsecutiveEmptyLines_Linux_Removed()
{
string expected = "data\n\nbase";
string newlines = "data\n\n\n\n\n\n\n\n\n\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
[Fact]
public void RemoveConsecutiveEmptyLines_Windows_Removed()
{
string expected = "data\r\n\r\nbase";
string newlines = "data\r\n\r\n\r\n\r\n\r\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -1,47 +1,47 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<TargetFrameworks>net6.0;net8.0;net9.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0-release-24177-07" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.13.0" />
<PackageReference Include="xunit.assert" Version="2.8.0" />
<PackageReference Include="xunit.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.execution" Version="2.8.0" />
<PackageReference Include="xunit.runner.console" Version="2.8.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.13.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.21.0" />
<PackageReference Include="xunit.assert" Version="2.9.3" />
<PackageReference Include="xunit.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.3" />
<PackageReference Include="xunit.runner.console" Version="2.9.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>
</Project>

View File

@@ -161,6 +161,7 @@ namespace SabreTools.RedumpLib.Test
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
DumpingParameters = "cd dvd bd sacd fd hdd",
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",

View File

@@ -0,0 +1,304 @@
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class ValidatorTests
{
// Most tests here will require installing and using the Moq library
// to mock the RedumpClient type.
[Fact]
public void NormalizeDiscType_InvalidMedia_Untouched()
{
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = null }
};
Validator.NormalizeDiscType(si);
Assert.Null(si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_InvalidSizeChecksums_Untouched()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = null,
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_UnformattedType_Fixed()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD9_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD9;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD5_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD5;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD128_Fixed(DiscType type)
{
DiscType expected = DiscType.BD128;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak3 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD100_Fixed(DiscType type)
{
DiscType expected = DiscType.BD100;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak2 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
Size = 50_050_629_633,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD50_Fixed(DiscType type)
{
DiscType expected = DiscType.BD50;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Size = 25_025_314_817,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD25_Fixed(DiscType type)
{
DiscType expected = DiscType.BD25;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDDL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDDL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDSL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDSL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
}
}

View File

@@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "Sab
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib.Test", "SabreTools.RedumpLib.Test\SabreTools.RedumpLib.Test.csproj", "{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedumpTool", "RedumpTool\RedumpTool.csproj", "{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -24,5 +26,9 @@ Global
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.Build.0 = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.ActiveCfg = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.Build.0 = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.Build.0 = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.ActiveCfg = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
namespace SabreTools.RedumpLib.Attributes
{
@@ -25,24 +24,24 @@ namespace SabreTools.RedumpLib.Attributes
string? valueStr = value?.ToString();
if (string.IsNullOrEmpty(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
var enumValueMemberInfo = Array.Find(memberInfos, m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
if (attributes == null || attributes.Length == 0)
return null;
// Return the first attribute, if possible
return attributes.FirstOrDefault() as HumanReadableAttribute;
return attributes[0] as HumanReadableAttribute;
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
#if NET40_OR_GREATER || NETCOREAPP
using System.Net;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -269,22 +270,25 @@ namespace SabreTools.RedumpLib
if (title != null && firstParenLocation >= 0)
{
info.CommonDiscInfo!.Title = title.Substring(0, firstParenLocation);
var subMatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match subMatch in subMatches.Cast<Match>())
var submatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match? submatch in submatches)
{
var subMatchValue = subMatch.Groups[1].Value;
if (submatch == null)
continue;
var submatchValue = submatch.Groups[1].Value;
// Disc number or letter
if (subMatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = subMatchValue.Remove(0, "Disc ".Length);
if (submatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = submatchValue.Remove(0, "Disc ".Length);
// Issue number
else if (subMatchValue.All(c => char.IsNumber(c)))
info.CommonDiscInfo.Title += $" ({subMatchValue})";
else if (ulong.TryParse(submatchValue, out _))
info.CommonDiscInfo.Title += $" ({submatchValue})";
// Disc title
else
info.CommonDiscInfo.DiscTitle = subMatchValue;
info.CommonDiscInfo.DiscTitle = submatchValue;
}
}
// Otherwise, leave the title as-is
@@ -298,15 +302,13 @@ namespace SabreTools.RedumpLib
match = Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
else
info.CommonDiscInfo!.ForeignTitleNonLatin = null;
// Category
match = Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
info.CommonDiscInfo!.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
info.CommonDiscInfo.Category = DiscCategory.Games;
info.CommonDiscInfo!.Category = DiscCategory.Games;
// Region
if (info.CommonDiscInfo.Region == null)
@@ -321,12 +323,17 @@ namespace SabreTools.RedumpLib
if (matches.Count > 0)
{
var tempLanguages = new List<Language?>();
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
tempLanguages.Add(Extensions.ToLanguage(submatch.Groups[1].Value));
if (submatch == null)
continue;
var language = Extensions.ToLanguage(submatch.Groups[1].Value);
if (language != null)
tempLanguages.Add(language);
}
info.CommonDiscInfo.Languages = tempLanguages.Where(l => l != null).ToArray();
info.CommonDiscInfo.Languages = [.. tempLanguages];
}
// Serial
@@ -366,8 +373,11 @@ namespace SabreTools.RedumpLib
tempDumpers.Add(dumper);
}
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
if (submatch == null)
continue;
string? dumper = WebUtility.HtmlDecode(submatch.Groups[1].Value);
if (dumper != null)
tempDumpers.Add(dumper);
@@ -448,36 +458,8 @@ namespace SabreTools.RedumpLib
addToLast = siteCode.IsMultiLine();
// Skip certain site codes because of data issues
switch (siteCode)
{
// Multiple
case SiteCode.InternalSerialName:
case SiteCode.Multisession:
case SiteCode.VolumeLabel:
continue;
// Audio CD
case SiteCode.RingNonZeroDataStart:
case SiteCode.UniversalHash:
continue;
// Microsoft Xbox and Xbox 360
case SiteCode.DMIHash:
case SiteCode.PFIHash:
case SiteCode.SSHash:
case SiteCode.SSVersion:
case SiteCode.XMID:
case SiteCode.XeMID:
continue;
// Microsoft Xbox One and Series X/S
case SiteCode.Filename:
continue;
// Nintendo Gamecube
case SiteCode.InternalName:
continue;
}
if (ShouldSkipSiteCode(siteCode))
continue;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields!.ContainsKey(siteCode.Value))
@@ -493,14 +475,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode != null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.CommentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
else if (!addToLast || lastSiteCode == null)
{
newComments += $"{commentLine}\n";
}
@@ -581,14 +563,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode != null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.ContentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
else if (!addToLast || lastSiteCode == null)
{
newContents += $"{contentLine}\n";
}
@@ -643,6 +625,7 @@ namespace SabreTools.RedumpLib
info.TracksAndWriteOffsets ??= new TracksAndWriteOffsetsSection();
info.SizeAndChecksums ??= new SizeAndChecksumsSection();
info.DumpingInfo ??= new DumpingInfoSection();
info.Artifacts ??= [];
// Ensure special dictionaries
info.CommonDiscInfo.CommentsSpecialFields ??= [];
@@ -656,11 +639,11 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static void InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
public static SubmissionInfo? InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
{
// If we have any invalid info
if (seed == null)
return;
return info;
// Ensure that required sections exist
info = EnsureAllSections(info);
@@ -718,6 +701,46 @@ namespace SabreTools.RedumpLib
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
}
return info;
}
/// <summary>
/// Determine if a site code should be skipped on pulling
/// </summary>
private static bool ShouldSkipSiteCode(SiteCode? siteCode)
{
return siteCode switch
{
// Multiple
SiteCode.InternalSerialName
or SiteCode.Multisession
or SiteCode.VolumeLabel => true,
// Audio CD
SiteCode.RingNonZeroDataStart
or SiteCode.RingPerfectAudioOffset
or SiteCode.UniversalHash => true,
// Microsoft Xbox and Xbox 360
SiteCode.DMIHash
or SiteCode.PFIHash
or SiteCode.SSHash
or SiteCode.SSVersion
or SiteCode.XMID
or SiteCode.XeMID => true,
// Microsoft Xbox One and Series X/S
SiteCode.Filename => true,
// Nintendo Gamecube
SiteCode.InternalName => true,
// Protection
SiteCode.Protection => true,
_ => false,
};
}
#endregion
@@ -729,9 +752,10 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="text">Text block to process</param>
/// <returns>Processed text block, if possible</returns>
private static string ReplaceHtmlWithSiteCodes(this string text)
internal static string ReplaceHtmlWithSiteCodes(this string text)
{
if (string.IsNullOrEmpty(text))
// Empty strings are ignored
if (text.Length == 0)
return text;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))

View File

@@ -4,18 +4,17 @@ namespace SabreTools.RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>");
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the BCA field on a disc page
@@ -25,87 +24,87 @@ namespace SabreTools.RedumpLib.Data
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>");
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">");
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)");
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)", RegexOptions.Compiled);
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">");
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>");
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>");
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*", RegexOptions.Compiled);
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>");
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>");
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc key on a PS3 disc page
/// </summary>
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>");
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the PVD field on a disc page
@@ -115,62 +114,62 @@ namespace SabreTools.RedumpLib.Data
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>");
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />", RegexOptions.Compiled);
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>");
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
#endregion

View File

@@ -281,7 +281,7 @@ namespace SabreTools.RedumpLib.Data
[Language(LongName = "Bini; Edo", ThreeLetterCode = "bin")]
Bini,
[Language(LongName = "Bislama", TwoLetterCode = "bla", ThreeLetterCode = "bis")]
[Language(LongName = "Bislama", TwoLetterCode = "bi", ThreeLetterCode = "bis")]
Bislama,
// Blin; Bilin
@@ -2008,9 +2008,6 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i", ShortName = "cdi", HasCues = true, HasDat = true)]
PhilipsCDi,
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i Digital Video", ShortName = "cdi-video", IsBanned = true)]
PhilipsCDiDigitalVideo,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Pioneer LaserActive")]
PioneerLaserActive,
@@ -2488,8 +2485,105 @@ namespace SabreTools.RedumpLib.Data
/// </remarks>
public enum Region
{
// TODO: Should "regions" and multi-country sets be phased out?
// TODO: Should "regions" be moved to the end?
#region Aggregates - Redump Only
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region A
@@ -2535,30 +2629,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ascension Island", ShortName = "Ac")]
AscensionIsland,
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia", ShortName = "Au")]
Australia,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria", ShortName = "At")]
Austria,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Azerbaijan", ShortName = "Az")]
Azerbaijan,
@@ -2584,9 +2660,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Belgium", ShortName = "Be")]
Belgium,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Belize", ShortName = "Bz")]
Belize,
@@ -2767,21 +2840,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ethiopia", ShortName = "Et")]
Ethiopia,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
// Commented out to avoid confusion
//[HumanReadable(LongName = "European Union", ShortName = "Eu")]
//EuropeanUnion,
@@ -2790,9 +2848,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "Eurozone", ShortName = "Ez")]
//Eurozone,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
#endregion
#region F
@@ -2821,9 +2876,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "France, Metropolitan", ShortName = "Fx")]
//FranceMetropolitan,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "French Guiana", ShortName = "Gf")]
FrenchGuiana,
@@ -2856,9 +2908,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Gibraltar", ShortName = "Gi")]
Gibraltar,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Greece", ShortName = "Gr")]
Greece,
@@ -2958,18 +3007,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Japan", ShortName = "J")]
Japan,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Jersey", ShortName = "Je")]
Jersey,
@@ -3009,9 +3046,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "(Laos) Lao People's Democratic Republic", ShortName = "La")]
Laos,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Latvia", ShortName = "Lv")]
Latvia,
@@ -3264,9 +3298,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Saudi Arabia", ShortName = "Sa")]
SaudiArabia,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Senegal", ShortName = "Sn")]
Senegal,
@@ -3310,9 +3341,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Spain", ShortName = "S")]
Spain,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "Sri Lanka", ShortName = "Lk")]
SriLanka,
@@ -3397,9 +3425,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "UK", ShortName = "Uk")]
UnitedKingdom,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "Ukraine", ShortName = "Ue")]
Ukraine,
@@ -3424,30 +3449,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "USA", ShortName = "U")]
UnitedStatesOfAmerica,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "USSR", ShortName = "Su")]
USSR,
@@ -3483,9 +3484,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Western Sahara", ShortName = "Eh")]
WesternSahara,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region Y
@@ -3526,7 +3524,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:ALTF]", LongName = "<b>Alternative Foreign Title</b>:")]
AlternativeForeignTitle,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Applications</b>:", LongName = "<b>Applications</b>:")]
Applications,
@@ -3536,30 +3534,30 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:BBFC]", LongName = "<b>BBFC Reg. No.</b>:")]
BBFCRegistrationNumber,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Bethesda ID</b>:", LongName = "<b>Bethesda ID</b>:")]
BethesdaID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>CD Projekt ID</b>:", LongName = "<b>CD Projekt ID</b>:")]
CDProjektID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Compatible OS</b>:", LongName = "<b>Compatible OS</b>:")]
CompatibleOS,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Hologram ID</b>:", LongName = "<b>Disc Hologram ID</b>:")]
DiscHologramID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>DMI</b>:", LongName = "<b>DMI</b>:")]
DMIHash,
[HumanReadable(ShortName = "[T:DNAS]", LongName = "<b>DNAS Disc ID</b>:")]
DNASDiscID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Eidos ID</b>:", LongName = "<b>Eidos ID</b>:")]
EidosID,
@@ -3569,7 +3567,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:X]", LongName = "<b>Extras</b>:")]
Extras,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Filename</b>:", LongName = "<b>Filename</b>:")]
Filename,
@@ -3579,7 +3577,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GF]", LongName = "<b>Game Footage</b>:")]
GameFootage,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Games</b>:", LongName = "<b>Games</b>:")]
Games,
@@ -3589,7 +3587,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GTID]", LongName = "<b>GT Interactive ID</b>:")]
GTInteractiveID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Internal Name</b>:", LongName = "<b>Internal Name</b>:")]
InternalName,
@@ -3617,11 +3615,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:LAID]", LongName = "<b>Lucas Arts ID</b>:")]
LucasArtsID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Microsoft ID</b>:", LongName = "<b>Microsoft ID</b>:")]
MicrosoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Multisession</b>:", LongName = "<b>Multisession</b>:")]
Multisession,
@@ -3643,7 +3641,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:P]", LongName = "<b>Patches</b>:")]
Patches,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>PFI</b>:", LongName = "<b>PFI</b>:")]
PFIHash,
@@ -3659,10 +3657,18 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:PPN]", LongName = "<b>PPN</b>:")]
PPN,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag for some systems yet
[HumanReadable(ShortName = "<b>Protection</b>:", LongName = "<b>Protection</b>:")]
Protection,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring non-zero data start</b>:", LongName = "<b>Ring non-zero data start</b>:")]
RingNonZeroDataStart,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring Perfect Audio Offset</b>:", LongName = "<b>Ring Perfect Audio Offset</b>:")]
RingPerfectAudioOffset,
[HumanReadable(ShortName = "[T:RD]", LongName = "<b>Rolling Demos</b>:")]
RollingDemos,
@@ -3678,15 +3684,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:S]", LongName = "<b>Series</b>:")]
Series,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Sierra ID</b>:", LongName = "<b>Sierra ID</b>:")]
SierraID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS</b>:", LongName = "<b>SS</b>:")]
SSHash,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS version</b>:", LongName = "<b>SS version</b>:")]
SSVersion,
@@ -3699,7 +3705,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:UID]", LongName = "<b>Ubisoft ID</b>:")]
UbisoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Universal Hash (SHA-1)</b>:", LongName = "<b>Universal Hash (SHA-1)</b>:")]
UniversalHash,
@@ -3718,11 +3724,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:VCD]", LongName = "<b>V-CD</b>")]
VCD,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XeMID</b>:", LongName = "<b>XeMID</b>:")]
XeMID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XMID</b>:", LongName = "<b>XMID</b>:")]
XMID,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
#if NET40_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
@@ -73,6 +75,20 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<string, string>? artifacts = null;
if (this.Artifacts != null)
{
artifacts = new Dictionary<string, string>();
foreach (var kvp in this.Artifacts)
{
artifacts[kvp.Key] = kvp.Value;
}
}
#else
var artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
@@ -90,7 +106,7 @@ namespace SabreTools.RedumpLib.Data
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Artifacts = artifacts,
};
}
}
@@ -233,6 +249,31 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<SiteCode, string>? commentsSpecialFields = null;
if (this.CommentsSpecialFields != null)
{
commentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.CommentsSpecialFields)
{
commentsSpecialFields[kvp.Key] = kvp.Value;
}
}
Dictionary<SiteCode, string>? contentsSpecialFields = null;
if (this.ContentsSpecialFields != null)
{
contentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.ContentsSpecialFields)
{
contentsSpecialFields[kvp.Key] = kvp.Value;
}
}
#else
var commentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
var contentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CommonDiscInfoSection
{
System = this.System,
@@ -271,9 +312,9 @@ namespace SabreTools.RedumpLib.Data
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
CommentsSpecialFields = commentsSpecialFields,
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
ContentsSpecialFields = contentsSpecialFields,
};
}
}
@@ -414,13 +455,27 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<string, List<string>?>? fullProtections = null;
if (this.FullProtections != null)
{
fullProtections = new Dictionary<string, List<string>?>();
foreach (var kvp in this.FullProtections)
{
fullProtections[kvp.Key] = kvp.Value;
}
}
#else
var fullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
FullProtections = fullProtections,
SecuROMData = this.SecuROMData,
};
}

View File

@@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -51,6 +52,11 @@ namespace SabreTools.RedumpLib
/// </summary>
public bool OnlyList { get; set; }
/// <summary>
/// Don't replace forward slashes with `-` in queries
/// </summary>
public bool NoSlash { get; set; }
/// <summary>
/// Force continuing downloads until user cancels or pages run out
/// </summary>
@@ -97,49 +103,85 @@ namespace SabreTools.RedumpLib
/// <summary>
/// Run the downloads that should go
/// </summary>
/// <returns>True if there was a valid download type, false otherwise</returns>
public async Task<bool> Download()
/// <returns>List of IDs that were processed on success, empty on error</returns>
/// <remarks>Packs will never return anything other than empty</remarks>
public async Task<List<int>> Download()
{
// Login to Redump, if possible
if (!_client.LoggedIn)
await _client.Login(Username ?? string.Empty, Password ?? string.Empty);
// Create output list
List<int> processedIds = [];
switch (Feature)
{
case Feature.Site:
if (OnlyNew)
await Discs.DownloadLastModified(_client, OutDir, Force);
else
await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
break;
case Feature.WIP:
if (OnlyNew)
await WIP.DownloadLastSubmitted(_client, OutDir);
else
await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
break;
case Feature.Packs:
await Packs.DownloadPacks(_client, OutDir, UseSubfolders);
break;
case Feature.User:
if (OnlyList)
await User.ListUser(_client, Username);
else if (OnlyNew)
await User.DownloadUserLastModified(_client, Username, OutDir);
else
await User.DownloadUser(_client, Username, OutDir);
break;
case Feature.Quicksearch:
if (OnlyList)
await Search.ListSearchResults(_client, QueryString);
else
await Search.DownloadSearchResults(_client, QueryString, OutDir);
processedIds = await ProcessQuicksearch();
break;
case Feature.Site:
processedIds = await ProcessSite();
break;
case Feature.User:
processedIds = await ProcessUser();
break;
case Feature.WIP:
processedIds = await ProcessWIP();
break;
default:
return false;
return [];
}
return true;
return processedIds;
}
/// <summary>
/// Process the Quicksearch feature
/// </summary>
private async Task<List<int>> ProcessQuicksearch()
{
if (OnlyList)
return await Search.ListSearchResults(_client, QueryString, NoSlash);
else
return await Search.DownloadSearchResults(_client, QueryString, OutDir, NoSlash);
}
/// <summary>
/// Process the Site feature
/// </summary>
private async Task<List<int>> ProcessSite()
{
if (OnlyNew)
return await Discs.DownloadLastModified(_client, OutDir, Force);
else
return await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
}
/// <summary>
/// Process the User feature
/// </summary>
private async Task<List<int>> ProcessUser()
{
if (OnlyList)
return await User.ListUser(_client, Username);
else if (OnlyNew)
return await User.DownloadUserLastModified(_client, Username, OutDir);
else
return await User.DownloadUser(_client, Username, OutDir);
}
/// <summary>
/// Process the WIP feature
/// </summary>
private async Task<List<int>> ProcessWIP()
{
if (OnlyNew)
return await WIP.DownloadLastSubmitted(_client, OutDir);
else
return await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
}
}
}

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
namespace SabreTools.RedumpLib
@@ -40,8 +39,13 @@ namespace SabreTools.RedumpLib
new StringBuilder(256) :
new StringBuilder(value.Length);
sb.Append(valueSpan.Take(index).ToArray());
HtmlDecode(valueSpan.Skip(index).ToArray(), ref sb);
char[] take = new char[index];
Array.Copy(valueSpan, take, index);
sb.Append(take);
char[] skip = new char[valueSpan.Length - index];
Array.Copy(valueSpan, index, skip, 0, skip.Length);
HtmlDecode(skip, ref sb);
return sb.ToString();
}
@@ -57,7 +61,8 @@ namespace SabreTools.RedumpLib
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
char[] inputSlice = input.Skip(i + 1).ToArray();
char[] inputSlice = new char[input.Length - (i + 1)];
Array.Copy(input, i + 1, inputSlice, 0, inputSlice.Length);
int semicolonPos = Array.IndexOf(inputSlice, ';');
int ampersandPos = Array.IndexOf(inputSlice, '&');
@@ -81,9 +86,13 @@ namespace SabreTools.RedumpLib
// &#xE5; --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
int offset = inputSlice[1] == 'x' || inputSlice[1] == 'X' ? 2 : 1;
char[] inputSliceNoPrefix = new char[entityLength - offset];
Array.Copy(inputSlice, offset, inputSliceNoPrefix, 0, inputSliceNoPrefix.Length);
bool parsedSuccessfully = inputSlice[1] == 'x' || inputSlice[1] == 'X'
? uint.TryParse(new string(inputSlice.Skip(2).Take(entityLength - 2).ToArray()), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSlice.Skip(1).Take(entityLength - 1).ToArray()), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
? uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
if (parsedSuccessfully)
{
@@ -112,7 +121,8 @@ namespace SabreTools.RedumpLib
}
else
{
char[] entity = inputSlice.Take(entityLength).ToArray();
char[] entity = new char[entityLength];
Array.Copy(inputSlice, entity, entityLength);
i = entityEndPosition; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
@@ -414,7 +424,10 @@ namespace SabreTools.RedumpLib
ulong key = BitConverter.ToUInt64(tableData, 0);
char value = (char)BitConverter.ToUInt16(tableData, sizeof(ulong));
dictionary[key] = value;
tableData = tableData.Skip((sizeof(ulong) + sizeof(char))).ToArray();
byte[] tempTableData = new byte[tableData.Length - (sizeof(ulong) + sizeof(char))];
Array.Copy(tableData, (sizeof(ulong) + sizeof(char)), tempTableData, 0, tempTableData.Length);
tableData = tempTableData;
}
return dictionary;
}

View File

@@ -1,46 +1,40 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.4.1</Version>
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.6</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="SabreTools.RedumpLib.Test" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<PackageReference Include="MinValueTupleBridge" Version="0.2.1" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`))">
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MinAsyncBridge" Version="0.12.4" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))" />
<PackageReference Include="Net35.Actions" Version="1.4.0" Condition="$(TargetFramework.StartsWith(`net2`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.5.8" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -22,6 +21,14 @@ namespace SabreTools.RedumpLib
switch (info.CommonDiscInfo.Media)
{
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.BD25:
case DiscType.BD33:
case DiscType.BD50:
@@ -32,13 +39,13 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
@@ -46,14 +53,6 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD25;
break;
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.HDDVDSL:
case DiscType.HDDVDDL:
if (info.SizeAndChecksums.Layerbreak != default)
@@ -130,27 +129,27 @@ namespace SabreTools.RedumpLib
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="sha1">SHA-1 hash to check against</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
{
// Get all matching IDs for the track
var newIds = await ListSearchResults(rc, sha1);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for track with SHA-1 of '{sha1}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for track with SHA-1 of '{sha1}'");
return newIds;
}
/// <summary>
@@ -159,17 +158,17 @@ namespace SabreTools.RedumpLib
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
{
// If we don't have special fields
if (info.CommonDiscInfo?.CommentsSpecialFields == null)
return (false, null, "Universal hash was missing");
return null;
// If we don't have a universal hash
string? universalHash = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
if (string.IsNullOrEmpty(universalHash))
return (false, null, "Universal hash was missing");
return null;
// Format the universal hash for finding within the comments
string universalHashQuery = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
@@ -179,19 +178,19 @@ namespace SabreTools.RedumpLib
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for universal hash of '{universalHash}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for universal hash of '{universalHash}'");
return newIds;
}
/// <summary>

View File

@@ -32,7 +32,10 @@ namespace SabreTools.RedumpLib.Web
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
{
webRequest.Timeout = 30 * 1000; // 30 seconds
webRequest.CookieContainer = _container;
}
return request;
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -15,17 +16,22 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="force">Force continuation of download</param>
public static async Task<bool> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
{
List<int> ids = [];
// Keep getting last modified pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
@@ -35,21 +41,25 @@ namespace SabreTools.RedumpLib.Web
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
public static async Task<bool> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn)
{
Console.WriteLine("Site download functionality is only available to Redump members");
return false;
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleSiteID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return true;
return ids;
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -18,19 +17,15 @@ namespace SabreTools.RedumpLib.Web
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacks(RedumpClient rc, string? outDir, bool useSubfolders)
{
#if NETFRAMEWORK || NETCOREAPP3_1
var systems = Enum.GetValues(typeof(RedumpSystem)).OfType<RedumpSystem>().Select(s => new Nullable<RedumpSystem>(s));
#else
var systems = Enum.GetValues<RedumpSystem>().Select(s => new RedumpSystem?(s));
#endif
var systems = (RedumpSystem[])Enum.GetValues(typeof(RedumpSystem));
await rc.DownloadPacks(Constants.PackCuesUrl, systems.Where(s => s.HasCues()).ToArray(), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, systems.Where(s => s.HasDat()).ToArray(), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, systems.Where(s => s.HasDkeys()).ToArray(), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, systems.Where(s => s.HasGdi()).ToArray(), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, systems.Where(s => s.HasKeys()).ToArray(), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, systems.Where(s => s.HasLsd()).ToArray(), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, systems.Where(s => s.HasSbi()).ToArray(), "SBIs", outDir, useSubfolders ? "sbi" : null);
await rc.DownloadPacks(Constants.PackCuesUrl, Array.FindAll(systems, s => s.HasCues()), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, Array.FindAll(systems, s => s.HasDat()), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, Array.FindAll(systems, s => s.HasDkeys()), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, Array.FindAll(systems, s => s.HasGdi()), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, Array.FindAll(systems, s => s.HasKeys()), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, Array.FindAll(systems, s => s.HasLsd()), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, Array.FindAll(systems, s => s.HasSbi()), "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
@@ -44,7 +39,10 @@ namespace SabreTools.RedumpLib.Web
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacksForSystem(RedumpClient rc, RedumpSystem? system, string? outDir, bool useSubfolders)
{
var systemAsArray = new RedumpSystem?[] { system };
if (system == null)
return false;
var systemAsArray = new RedumpSystem[] { system.Value };
if (system.HasCues())
await rc.DownloadPacks(Constants.PackCuesUrl, systemAsArray, "CUEs", outDir, useSubfolders ? "cue" : null);

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
#if NETCOREAPP
using System.Net.Http;
@@ -53,7 +52,7 @@ namespace SabreTools.RedumpLib.Web
#if NETFRAMEWORK
_internalClient = new CookieWebClient();
#else
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true });
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true }) { Timeout = TimeSpan.FromSeconds(30) };
#endif
}
@@ -74,22 +73,22 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string?)> ValidateCredentials(string username, string password)
public async static Task<bool?> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return (false, null);
return false;
// Try logging in with the supplied credentials otherwise
var redumpClient = new RedumpClient();
bool? loggedIn = await redumpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
return true;
else if (loggedIn == false)
return (false, "Redump username and password denied!");
return false;
else
return (null, "An error occurred validating your credentials!");
return null;
}
/// <summary>
@@ -129,7 +128,7 @@ namespace SabreTools.RedumpLib.Web
try
{
// Get the current token from the login page
var loginPage = await DownloadStringWithRetries(Constants.LoginUrl);
var loginPage = await DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage ?? string.Empty).Groups[1].Value;
#if NETFRAMEWORK
@@ -187,6 +186,108 @@ namespace SabreTools.RedumpLib.Web
#endregion
#region Generic Helpers
/// <summary>
/// Download from a URI to a byte array
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>Byte array from the URI, null on error</returns>
public async Task<byte[]?> DownloadData(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadData(uri));
#else
return await _internalClient.GetByteArrayAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
public async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
public async Task<string?> DownloadString(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
#endregion
#region Single Page Helpers
/// <summary>
@@ -199,7 +300,7 @@ namespace SabreTools.RedumpLib.Web
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -217,7 +318,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -243,15 +344,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
@@ -259,17 +362,18 @@ namespace SabreTools.RedumpLib.Web
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
ids.Add(id);
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
return false;
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -278,9 +382,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -290,7 +395,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
/// <summary>
@@ -303,7 +408,7 @@ namespace SabreTools.RedumpLib.Web
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -311,7 +416,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -337,19 +442,21 @@ namespace SabreTools.RedumpLib.Web
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -358,9 +465,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -370,7 +478,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
#endregion
@@ -445,7 +553,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -483,7 +591,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -607,7 +715,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -645,7 +753,7 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -719,7 +827,7 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
@@ -727,7 +835,7 @@ namespace SabreTools.RedumpLib.Web
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -742,7 +850,7 @@ namespace SabreTools.RedumpLib.Web
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
packsDictionary.Add(system, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
@@ -759,13 +867,13 @@ namespace SabreTools.RedumpLib.Web
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
public async Task<bool> DownloadPacks(string url, RedumpSystem[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -786,72 +894,6 @@ namespace SabreTools.RedumpLib.Web
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string?> DownloadStringWithRetries(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>

View File

@@ -15,12 +15,13 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <returns>All disc IDs for the given query, null on error</returns>
public static async Task<List<int>?> ListSearchResults(RedumpClient rc, string? query)
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> ListSearchResults(RedumpClient rc, string? query, bool noSlash)
{
// If the query is invalid
if (string.IsNullOrEmpty(query))
return null;
return [];
List<int> ids = [];
@@ -29,8 +30,9 @@ namespace SabreTools.RedumpLib.Web
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('/', '-');
query = query.Replace('\\', '/');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
@@ -50,7 +52,7 @@ namespace SabreTools.RedumpLib.Web
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return null;
return [];
}
return ids;
@@ -62,19 +64,24 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadSearchResults(RedumpClient rc, string? query, string? outDir)
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> DownloadSearchResults(RedumpClient rc, string? query, string? outDir, bool noSlash)
{
List<int> ids = [];
// If the query is invalid
if (string.IsNullOrEmpty(query))
return false;
return ids;
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('/', '-');
query = query.Replace('\\', '/');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
@@ -83,11 +90,13 @@ namespace SabreTools.RedumpLib.Web
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
}
}

View File

@@ -16,23 +16,28 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadUser(RedumpClient rc, string? username, string? outDir)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUser(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return false;
return ids;
}
// Keep getting user pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
@@ -41,23 +46,28 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return false;
return ids;
}
// Keep getting last modified user pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
@@ -65,8 +75,8 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <returns>All disc IDs for the given user, null on error</returns>
public static async Task<List<int>?> ListUser(RedumpClient rc, string? username)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> ListUser(RedumpClient rc, string? username)
{
List<int> ids = [];
@@ -82,7 +92,7 @@ namespace SabreTools.RedumpLib.Web
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
@@ -91,7 +101,7 @@ namespace SabreTools.RedumpLib.Web
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return null;
return [];
}
return ids;

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -14,7 +15,8 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadLastSubmitted(RedumpClient rc, string? outDir)
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadLastSubmitted(RedumpClient rc, string? outDir)
{
return await rc.CheckSingleWIPPage(Constants.WipDumpsUrl, outDir, false);
}
@@ -26,21 +28,25 @@ namespace SabreTools.RedumpLib.Web
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
public static async Task<bool> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn || !rc.IsStaff)
{
Console.WriteLine("WIP download functionality is only available to Redump moderators");
return false;
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleWIPID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return true;
return ids;
}
}
}

151
publish-nix.sh Executable file
View File

@@ -0,0 +1,151 @@
#! /bin/bash
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
a)
NO_ARCHIVE=true
;;
*)
echo "Invalid option provided"
exit 1
;;
esac
done
# Set the current directory as a variable
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net9.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib/SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
done
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]; then
# Create Tool archives
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/RedumpTool/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/RedumpTool/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip .
done
done
# Reset the directory
cd $BUILD_FOLDER
fi

135
publish-win.ps1 Normal file
View File

@@ -0,0 +1,135 @@
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
param(
[Parameter(Mandatory = $false)]
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
[Parameter(Mandatory = $false)]
[Alias("NoArchive")]
[switch]$NO_ARCHIVE
)
# Set the current directory as a variable
$BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net9.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib\SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
}
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent) {
# Create Tool archives
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip *
}
}
# Reset the directory
Set-Location -Path $PSScriptRoot
}