Compare commits

...

98 Commits
1.3.7 ... 1.7.2

Author SHA1 Message Date
Matt Nadareski
1f4cca7fa0 Bump version 2025-09-05 10:14:28 -04:00
Matt Nadareski
5161913d96 Update Nuget packages 2025-09-05 09:45:00 -04:00
Deterous
c10b21a485 Add Polymega system (#11)
* Add Polymega system

* Polymega detected by windows
2025-08-31 01:22:51 -04:00
Matt Nadareski
99adad582c Forgot this test update 2025-08-23 09:52:31 -04:00
Matt Nadareski
6956617386 Bump version 2025-08-23 09:48:43 -04:00
Matt Nadareski
b5522c5bb0 Add High Siera volume descriptor pseudo-tag 2025-08-23 09:48:07 -04:00
Matt Nadareski
889cedb87a Bump version 2025-07-24 09:01:21 -04:00
Matt Nadareski
eba722b23a Be consistent about end-of-file newlines 2025-07-24 08:59:57 -04:00
Matt Nadareski
5d77af7e97 Add .NET Standard 2.0 and 2.1 2025-07-24 08:55:29 -04:00
Matt Nadareski
d9cb9394e0 Update nuget packages 2025-07-24 08:48:37 -04:00
Matt Nadareski
dfb5ac762a Bump version 2025-07-21 12:08:21 -04:00
Matt Nadareski
2459165990 Add Interplay ID pseudo-tag 2025-07-20 20:40:59 -04:00
Matt Nadareski
e16ffcb78a Bump version 2025-07-11 12:26:54 -04:00
Matt Nadareski
da27675588 Add PC/Mac Hybrid pseudo-tag 2025-07-11 12:16:40 -04:00
Matt Nadareski
0021c7653c Bump version 2025-05-23 12:39:49 -04:00
Matt Nadareski
091a32b223 Add new non-tag site codes 2025-05-23 12:35:16 -04:00
Matt Nadareski
f29a50c3b0 Bump version 2025-05-01 10:54:09 -04:00
Matt Nadareski
edf6e12371 Update Nuget packages 2025-05-01 10:53:21 -04:00
Matt Nadareski
146b2780c5 Fix skipped multiline site codes 2025-05-01 10:51:26 -04:00
Matt Nadareski
60ce7cbfa0 Bump version 2025-04-30 20:51:59 -04:00
Deterous
39fe46a162 Remove obsolete PhilipsCDiDigitalVideo system (#10)
* Remove obsolete PhilipsCDiDigitalVideo system

* Delete obsolete system
2025-04-17 08:04:02 -04:00
Matt Nadareski
a8674a21e4 Add 30 second timeout to web operations 2025-04-13 21:20:00 -04:00
Matt Nadareski
65f2d53a3f Fix how conditions are used for references 2025-02-25 21:16:16 -05:00
Matt Nadareski
0bc869543a Bump version 2024-12-31 21:10:46 -05:00
Matt Nadareski
aa7d513d2c Add Ring Perfect Audio Offset pseudo-tag 2024-12-31 21:09:11 -05:00
Matt Nadareski
3d35129529 Update copyright 2024-12-30 21:27:26 -05:00
Matt Nadareski
ec563938ba Remove unnecessary action step 2024-12-30 21:26:16 -05:00
Matt Nadareski
f0f3a1a194 Bump version 2024-12-28 13:52:24 -05:00
Deterous
55f5262198 Add new Protection pseudo site code (#9)
* Add Protection pseudo site tag

* Use new sitecode in redumplib
2024-12-27 12:33:35 -05:00
Matt Nadareski
1d247b1f6f Use string comparison on tab replacement when possible 2024-12-25 22:13:26 -05:00
Matt Nadareski
32c57736ae Duplicate write offset field for convenience (fixes #8) 2024-12-25 22:03:55 -05:00
Deterous
8ab312ba8b Convert <Tab> (#7) 2024-12-24 21:07:51 -05:00
Matt Nadareski
3ea01ca933 Ensure .NET versions are installed for testing 2024-12-19 10:52:22 -05:00
Matt Nadareski
27d99f7429 Bump version 2024-12-16 14:35:02 -05:00
Matt Nadareski
8b147f2041 Change empty language list message 2024-12-16 14:33:22 -05:00
Matt Nadareski
9c7a143d52 Add to publish scripts, not rolling build 2024-12-16 14:26:55 -05:00
Matt Nadareski
30bbef7bba Add RedumpTool as a non-building component 2024-12-16 14:23:43 -05:00
Matt Nadareski
17da564b00 Fix old .NET support 2024-12-16 14:22:07 -05:00
Matt Nadareski
073176cccb Update Models to 1.5.8 2024-12-16 14:21:53 -05:00
Matt Nadareski
0434e63e34 Allow symbols to be packed 2024-12-16 14:21:34 -05:00
Matt Nadareski
2b75eb44cd Use publish script and update README 2024-12-06 11:34:34 -05:00
Matt Nadareski
10eecc866e Bump version 2024-12-05 22:11:48 -05:00
Matt Nadareski
84fa2f93ea Fix consecutive empty line logic 2024-12-05 21:15:26 -05:00
Matt Nadareski
5a92c0fc98 Bump version 2024-12-01 22:58:47 -05:00
Matt Nadareski
4ffc1b3160 Fix multi-newline formatting, add tests 2024-12-01 22:44:12 -05:00
Matt Nadareski
ffa8f2b16e Update ToDiscType and add tests 2024-12-01 22:31:19 -05:00
Matt Nadareski
70e3e074cc Update some extensions, update tests 2024-12-01 22:14:49 -05:00
Matt Nadareski
4858b4e459 None of these are TODOs on my part 2024-12-01 21:32:45 -05:00
Matt Nadareski
9495cd32c7 Handle some TODO items 2024-12-01 21:31:58 -05:00
Matt Nadareski
071571870e Add ToYesNo tests 2024-12-01 21:20:47 -05:00
Matt Nadareski
f03cd40181 Use automatic system name mapping 2024-12-01 21:14:08 -05:00
Matt Nadareski
ea51726645 Fill out more tests 2024-12-01 21:09:15 -05:00
Matt Nadareski
f0633d5aa7 Framework only matters for executable 2024-11-30 21:39:44 -05:00
Matt Nadareski
4c076aec0c Update packages 2024-11-30 21:38:41 -05:00
Matt Nadareski
2ec9d6a4a0 Use more targeted library for old .NET 2024-11-18 19:53:44 -05:00
Matt Nadareski
415b488005 Bump version 2024-11-14 22:23:20 -05:00
Matt Nadareski
5d300c9975 Make download helpers public for ease 2024-11-14 22:22:52 -05:00
Matt Nadareski
304236774f Bump version 2024-11-13 01:54:40 -05:00
Matt Nadareski
9924289c48 Fix casting issues 2024-11-13 01:54:24 -05:00
Matt Nadareski
240eb74ead Bump version 2024-11-13 01:30:42 -05:00
Matt Nadareski
a64b109d2c Remove unncessary Linq usage 2024-11-13 01:29:36 -05:00
Matt Nadareski
3e0f9b5410 Add .NET 9 to target frameworks 2024-11-13 00:59:20 -05:00
Matt Nadareski
668be418ac Bump version 2024-10-18 12:30:30 -04:00
Matt Nadareski
7d184a634e Always return ID list, if possible 2024-10-18 12:26:36 -04:00
Matt Nadareski
67aed0899d Don't null foreign title if missing 2024-10-18 11:58:29 -04:00
Matt Nadareski
9fbaf1a187 Bump version 2024-10-04 01:42:58 -04:00
Matt Nadareski
fe8686a2bb Allow forward slashes in queries sometimes 2024-10-04 01:41:05 -04:00
Matt Nadareski
652270c8c7 Add publish scripts 2024-10-01 13:56:40 -04:00
Matt Nadareski
905d8a94fb Bump version 2024-10-01 13:55:33 -04:00
Matt Nadareski
3ee8416695 Remove unnecessary tuples 2024-10-01 13:53:03 -04:00
Matt Nadareski
49fa06da55 Remove threading bridge package (unused) 2024-10-01 04:27:31 -04:00
Matt Nadareski
70e29afd89 Remove Linq requirement from old .NET 2024-10-01 04:25:35 -04:00
Matt Nadareski
2a402a53db Remove ValueTuple packages (usused) 2024-10-01 03:21:07 -04:00
Matt Nadareski
66bb3b75b2 Bump version 2024-07-24 11:05:25 -04:00
Matt Nadareski
9d7d46673a Fix deserializing submission from file 2024-07-23 22:18:06 -04:00
Matt Nadareski
16d196c902 Bump version 2024-07-16 14:13:28 -04:00
Matt Nadareski
c93da92f19 Add new helper class for site interaction 2024-07-16 14:12:28 -04:00
Matt Nadareski
a219d0c5de Add some client helper classes 2024-07-16 14:07:18 -04:00
Matt Nadareski
02e6f0e85f Port tests from MPF 2024-07-16 13:08:56 -04:00
Deterous
9dfec64e4e Bump version (#6) 2024-07-09 11:09:28 -04:00
Deterous
4c12693a33 Update Sharp X68000 shortname to x68k (#5) 2024-07-08 22:35:28 -04:00
Deterous
acea06c05f Update Triforce shortname to trf (#4) 2024-07-08 21:43:40 -04:00
Matt Nadareski
067c5cfbbc Move project to subfolder 2024-06-27 16:49:18 -04:00
Matt Nadareski
ff04b8ec6f Fix helper classes 2024-06-27 09:43:01 -04:00
Matt Nadareski
e09c895cf1 Add retry count constructor to client 2024-06-27 09:34:58 -04:00
Matt Nadareski
1b68253089 Remove now-redundant classes 2024-06-27 01:41:17 -04:00
Matt Nadareski
5b9a2d6b74 CookieWebClient should be internal 2024-06-27 01:33:15 -04:00
Matt Nadareski
d5c7ef74d4 Reduce code complexity in new client (nw) 2024-06-27 01:30:30 -04:00
Matt Nadareski
f60cd2985d Add unified Client (nw) 2024-06-27 01:12:57 -04:00
Matt Nadareski
7fcb6aa949 Create separate cookie web client 2024-06-27 00:34:15 -04:00
Matt Nadareski
f22b1b036b Minor fixes to organization, add sleep 2024-06-27 00:18:33 -04:00
Matt Nadareski
ec045448c5 Make retry count into a field, sync clients 2024-06-27 00:05:53 -04:00
Matt Nadareski
93873ea204 Add dumping parameters field 2024-06-26 10:02:28 -04:00
Matt Nadareski
341edc56bd Bump version 2024-05-15 17:05:15 -04:00
Matt Nadareski
da4bdac6e2 Update Models 2024-05-15 17:04:55 -04:00
Matt Nadareski
7fe595ee0a Fix ordering of site codes 2024-05-15 16:44:07 -04:00
Matt Nadareski
8a9f62f5a4 Add new tags to formatting list 2024-05-15 16:40:01 -04:00
Matt Nadareski
dbb7cf7ef9 Add missing comment fields to listing 2024-05-15 16:38:17 -04:00
53 changed files with 7336 additions and 3071 deletions

View File

@@ -1,4 +1,4 @@
name: Nuget Pack
name: Build and Test
on:
push:
@@ -12,29 +12,26 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Restore dependencies
run: dotnet restore
- name: Pack
run: dotnet pack
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: 'Nuget Package'
path: 'bin/Release/*.nupkg'
- name: Run tests
run: dotnet test
- name: Run publish script
run: ./publish-nix.sh -d
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: 'bin/Release/*.nupkg'
artifacts: "*.nupkg,*.snupkg"
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -11,7 +11,13 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Build
run: dotnet build
run: dotnet build
- name: Run tests
run: dotnet test

View File

@@ -1,687 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib
{
public static class Formatter
{
/// <summary>
/// Format the output data in a human readable way, separating each printed line into a new item in the list
/// </summary>
/// <param name="info">Information object that should contain normalized values</param>
/// <param name="enableRedumpCompatibility">True to enable Redump compatiblity, false otherwise</param>
/// <returns>List of strings representing each line of an output file, null on error</returns>
public static (List<string>?, string?) FormatOutputData(SubmissionInfo? info, bool enableRedumpCompatibility)
{
// Check to see if the inputs are valid
if (info == null)
return (null, "Submission information was missing");
try
{
// Sony-printed discs have layers in the opposite order
var system = info.CommonDiscInfo?.System;
bool reverseOrder = system.HasReversedRingcodes();
// Preamble for submission
#pragma warning disable IDE0028
var output = new List<string>
{
"Users who wish to submit this information to Redump must ensure that all of the fields below are accurate for the exact media they have.",
"Please double-check to ensure that there are no fields that need verification, such as the version or copy protection.",
"If there are no fields in need of verification or all fields are accurate, this preamble can be removed before submission.",
"",
};
// Common Disc Info section
output.Add("Common Disc Info:");
AddIfExists(output, Template.TitleField, info.CommonDiscInfo?.Title, 1);
AddIfExists(output, Template.ForeignTitleField, info.CommonDiscInfo?.ForeignTitleNonLatin, 1);
AddIfExists(output, Template.DiscNumberField, info.CommonDiscInfo?.DiscNumberLetter, 1);
AddIfExists(output, Template.DiscTitleField, info.CommonDiscInfo?.DiscTitle, 1);
AddIfExists(output, Template.SystemField, info.CommonDiscInfo?.System.LongName(), 1);
AddIfExists(output, Template.MediaTypeField, GetFixedMediaType(
info.CommonDiscInfo?.Media.ToMediaType(),
info.SizeAndChecksums?.PICIdentifier,
info.SizeAndChecksums?.Size,
info.SizeAndChecksums?.Layerbreak,
info.SizeAndChecksums?.Layerbreak2,
info.SizeAndChecksums?.Layerbreak3),
1);
AddIfExists(output, Template.CategoryField, info.CommonDiscInfo?.Category.LongName(), 1);
AddIfExists(output, Template.FullyMatchingIDField, info.FullyMatchedID?.ToString(), 1);
AddIfExists(output, Template.PartiallyMatchingIDsField, info.PartiallyMatchedIDs, 1);
AddIfExists(output, Template.RegionField, info.CommonDiscInfo?.Region.LongName() ?? "SPACE! (CHANGE THIS)", 1);
AddIfExists(output, Template.LanguagesField, (info.CommonDiscInfo?.Languages ?? [null]).Select(l => l.LongName() ?? "SILENCE! (CHANGE THIS)").ToArray(), 1);
AddIfExists(output, Template.PlaystationLanguageSelectionViaField, (info.CommonDiscInfo?.LanguageSelection ?? []).Select(l => l.LongName()).ToArray(), 1);
AddIfExists(output, Template.DiscSerialField, info.CommonDiscInfo?.Serial, 1);
// All ringcode information goes in an indented area
output.Add(""); output.Add("\tRingcode Information:"); output.Add("");
// If we have a triple-layer disc
if (info.SizeAndChecksums?.Layerbreak3 != default && info.SizeAndChecksums?.Layerbreak3 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, info.CommonDiscInfo?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, info.CommonDiscInfo?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, info.CommonDiscInfo?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer1AdditionalMould, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringRingField, info.CommonDiscInfo?.Layer2MasteringRing, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringSIDField, info.CommonDiscInfo?.Layer2MasteringSID, 0);
AddIfExists(output, "Layer 2 " + Template.ToolstampField, info.CommonDiscInfo?.Layer2ToolstampMasteringCode, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer3MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer3MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer3ToolstampMasteringCode, 0);
}
// If we have a triple-layer disc
else if (info.SizeAndChecksums?.Layerbreak2 != default && info.SizeAndChecksums?.Layerbreak2 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, info.CommonDiscInfo?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, info.CommonDiscInfo?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, info.CommonDiscInfo?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer1AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer2MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer2MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer2ToolstampMasteringCode, 0);
}
// If we have a dual-layer disc
else if (info.SizeAndChecksums?.Layerbreak != default && info.SizeAndChecksums?.Layerbreak != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer0AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringRingField, info.CommonDiscInfo?.Layer1MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringSIDField, info.CommonDiscInfo?.Layer1MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.ToolstampField, info.CommonDiscInfo?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer1AdditionalMould, 0);
}
// If we have a single-layer disc
else
{
AddIfExists(output, "Data Side " + Template.MasteringRingField, info.CommonDiscInfo?.Layer0MasteringRing, 0);
AddIfExists(output, "Data Side " + Template.MasteringSIDField, info.CommonDiscInfo?.Layer0MasteringSID, 0);
AddIfExists(output, "Data Side " + Template.ToolstampField, info.CommonDiscInfo?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer0AdditionalMould, 0);
AddIfExists(output, "Label Side " + Template.MasteringRingField, info.CommonDiscInfo?.Layer1MasteringRing, 0);
AddIfExists(output, "Label Side " + Template.MasteringSIDField, info.CommonDiscInfo?.Layer1MasteringSID, 0);
AddIfExists(output, "Label Side " + Template.ToolstampField, info.CommonDiscInfo?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, info.CommonDiscInfo?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, info.CommonDiscInfo?.Layer1AdditionalMould, 0);
}
output.Add("");
AddIfExists(output, Template.BarcodeField, info.CommonDiscInfo?.Barcode, 1);
AddIfExists(output, Template.EXEDateBuildDate, info.CommonDiscInfo?.EXEDateBuildDate, 1);
AddIfExists(output, Template.ErrorCountField, info.CommonDiscInfo?.ErrorsCount, 1);
AddIfExists(output, Template.CommentsField, info.CommonDiscInfo?.Comments?.Trim(), 1);
AddIfExists(output, Template.ContentsField, info.CommonDiscInfo?.Contents?.Trim(), 1);
// Version and Editions section
output.Add(""); output.Add("Version and Editions:");
AddIfExists(output, Template.VersionField, info.VersionAndEditions?.Version, 1);
AddIfExists(output, Template.EditionField, info.VersionAndEditions?.OtherEditions, 1);
// EDC section
if (info.CommonDiscInfo?.System == RedumpSystem.SonyPlayStation)
{
output.Add(""); output.Add("EDC:");
AddIfExists(output, Template.PlayStationEDCField, info.EDC?.EDC.LongName(), 1);
}
// Parent/Clone Relationship section
// output.Add(""); output.Add("Parent/Clone Relationship:");
// AddIfExists(output, Template.ParentIDField, info.ParentID);
// AddIfExists(output, Template.RegionalParentField, info.RegionalParent.ToString());
// Extras section
if (info.Extras?.PVD != null || info.Extras?.PIC != null || info.Extras?.BCA != null || info.Extras?.SecuritySectorRanges != null)
{
output.Add(""); output.Add("Extras:");
AddIfExists(output, Template.PVDField, info.Extras.PVD?.Trim(), 1);
AddIfExists(output, Template.PlayStation3WiiDiscKeyField, info.Extras.DiscKey, 1);
AddIfExists(output, Template.PlayStation3DiscIDField, info.Extras.DiscID, 1);
AddIfExists(output, Template.PICField, info.Extras.PIC, 1);
AddIfExists(output, Template.HeaderField, info.Extras.Header, 1);
AddIfExists(output, Template.GameCubeWiiBCAField, info.Extras.BCA, 1);
AddIfExists(output, Template.XBOXSSRanges, info.Extras.SecuritySectorRanges, 1);
}
// Copy Protection section
if (!string.IsNullOrEmpty(info.CopyProtection?.Protection)
|| (info.CopyProtection?.AntiModchip != null && info.CopyProtection.AntiModchip != YesNo.NULL)
|| (info.CopyProtection?.LibCrypt != null && info.CopyProtection.LibCrypt != YesNo.NULL)
|| !string.IsNullOrEmpty(info.CopyProtection?.LibCryptData)
|| !string.IsNullOrEmpty(info.CopyProtection?.SecuROMData))
{
output.Add(""); output.Add("Copy Protection:");
if (info.CommonDiscInfo?.System == RedumpSystem.SonyPlayStation)
{
AddIfExists(output, Template.PlayStationAntiModchipField, info.CopyProtection!.AntiModchip.LongName(), 1);
AddIfExists(output, Template.PlayStationLibCryptField, info.CopyProtection.LibCrypt.LongName(), 1);
AddIfExists(output, Template.SubIntentionField, info.CopyProtection.LibCryptData, 1);
}
AddIfExists(output, Template.CopyProtectionField, info.CopyProtection!.Protection, 1);
AddIfExists(output, Template.SubIntentionField, info.CopyProtection.SecuROMData, 1);
}
// Dumpers and Status section
// output.Add(""); output.Add("Dumpers and Status");
// AddIfExists(output, Template.StatusField, info.Status.Name());
// AddIfExists(output, Template.OtherDumpersField, info.OtherDumpers);
// Tracks and Write Offsets section
if (!string.IsNullOrEmpty(info.TracksAndWriteOffsets?.ClrMameProData))
{
output.Add(""); output.Add("Tracks and Write Offsets:");
AddIfExists(output, Template.DATField, info.TracksAndWriteOffsets!.ClrMameProData + "\n", 1);
AddIfExists(output, Template.CuesheetField, info.TracksAndWriteOffsets.Cuesheet, 1);
var offset = info.TracksAndWriteOffsets.OtherWriteOffsets;
if (Int32.TryParse(offset, out int i))
offset = i.ToString("+#;-#;0");
AddIfExists(output, Template.WriteOffsetField, offset, 1);
}
// Size & Checksum section
else
{
output.Add(""); output.Add("Size & Checksum:");
// Gross hack because of automatic layerbreaks in Redump
if (!enableRedumpCompatibility
|| (info.CommonDiscInfo?.Media.ToMediaType() != MediaType.BluRay
&& info.CommonDiscInfo?.System.IsXGD() == false))
{
AddIfExists(output, Template.LayerbreakField, info.SizeAndChecksums?.Layerbreak, 1);
}
AddIfExists(output, Template.SizeField, info.SizeAndChecksums?.Size.ToString(), 1);
AddIfExists(output, Template.CRC32Field, info.SizeAndChecksums?.CRC32, 1);
AddIfExists(output, Template.MD5Field, info.SizeAndChecksums?.MD5, 1);
AddIfExists(output, Template.SHA1Field, info.SizeAndChecksums?.SHA1, 1);
}
// Dumping Info section
output.Add(""); output.Add("Dumping Info:");
AddIfExists(output, Template.FrontendVersionField, info.DumpingInfo?.FrontendVersion, 1);
AddIfExists(output, Template.DumpingProgramField, info.DumpingInfo?.DumpingProgram, 1);
AddIfExists(output, Template.DumpingDateField, info.DumpingInfo?.DumpingDate, 1);
AddIfExists(output, Template.DumpingDriveManufacturer, info.DumpingInfo?.Manufacturer, 1);
AddIfExists(output, Template.DumpingDriveModel, info.DumpingInfo?.Model, 1);
AddIfExists(output, Template.DumpingDriveFirmware, info.DumpingInfo?.Firmware, 1);
AddIfExists(output, Template.ReportedDiscType, info.DumpingInfo?.ReportedDiscType, 1);
AddIfExists(output, Template.C2ErrorCountField, info.DumpingInfo?.C2ErrorsCount, 1);
// Make sure there aren't any instances of two blank lines in a row
string? last = null;
for (int i = 0; i < output.Count;)
{
if (output[i] == last && string.IsNullOrEmpty(last))
{
output.RemoveAt(i);
}
else
{
last = output[i];
i++;
}
}
return (output, "Formatting complete!");
}
catch (Exception ex)
{
return (null, $"Error formatting submission info: {ex}");
}
}
/// <summary>
/// Process any fields that have to be combined
/// </summary>
/// <param name="info">Information object to normalize</param>
public static void ProcessSpecialFields(SubmissionInfo? info)
{
// If there is no submission info
if (info == null)
return;
// Process the comments field
if (info.CommonDiscInfo?.CommentsSpecialFields != null && info.CommonDiscInfo.CommentsSpecialFields?.Any() == true)
{
// If the field is missing, add an empty one to fill in
if (info.CommonDiscInfo.Comments == null)
info.CommonDiscInfo.Comments = string.Empty;
// Add all special fields before any comments
info.CommonDiscInfo.Comments = string.Join(
"\n", OrderCommentTags(info.CommonDiscInfo.CommentsSpecialFields)
.Where(kvp => !string.IsNullOrEmpty(kvp.Value))
.Select(FormatSiteTag)
.Where(s => !string.IsNullOrEmpty(s))
.ToArray()
) + "\n" + info.CommonDiscInfo.Comments;
// Normalize newlines
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Replace("\r\n", "\n");
// Trim the comments field
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.CommentsSpecialFields = null;
}
// Process the contents field
if (info.CommonDiscInfo?.ContentsSpecialFields != null && info.CommonDiscInfo.ContentsSpecialFields?.Any() == true)
{
// If the field is missing, add an empty one to fill in
if (info.CommonDiscInfo.Contents == null)
info.CommonDiscInfo.Contents = string.Empty;
// Add all special fields before any contents
info.CommonDiscInfo.Contents = string.Join(
"\n", OrderContentTags(info.CommonDiscInfo.ContentsSpecialFields)
.Where(kvp => !string.IsNullOrEmpty(kvp.Value))
.Select(FormatSiteTag)
.Where(s => !string.IsNullOrEmpty(s))
.ToArray()
) + "\n" + info.CommonDiscInfo.Contents;
// Normalize newlines
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Replace("\r\n", "\n");
// Trim the contents field
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.ContentsSpecialFields = null;
}
}
#region Helpers
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">Output list</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(List<string> output, string key, string? value, int indent)
{
// If there's no valid value to write
if (value == null)
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
// Skip fields that need to keep internal whitespace intact
if (key != "Primary Volume Descriptor (PVD)"
&& key != "Header"
&& key != "Cuesheet")
{
// Convert to tabs
value = value.Replace("<tab>", "\t");
value = value.Replace("<TAB>", "\t");
value = value.Replace(" ", "\t");
// Sanitize whitespace around tabs
value = Regex.Replace(value, @"\s*\t\s*", "\t", RegexOptions.Compiled);
}
// If the value contains a newline
value = value.Replace("\r\n", "\n");
if (value.Contains('\n'))
{
output.Add(prefix + key + ":"); output.Add("");
string[] values = value.Split('\n');
foreach (string val in values)
output.Add(val);
output.Add("");
}
// For all regular values
else
{
output.Add(prefix + key + ": " + value);
}
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">Output list</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(List<string> output, string key, string?[]? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Length == 0)
return;
AddIfExists(output, key, string.Join(", ", value), indent);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">Output list</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(List<string> output, string key, long? value, int indent)
{
// If there's no valid value to write
if (value == null || value == default(long))
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
output.Add(prefix + key + ": " + value);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">Output list</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(List<string> output, string key, List<int>? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Count == 0)
return;
AddIfExists(output, key, string.Join(", ", value.Select(o => o.ToString()).ToArray()), indent);
}
/// <summary>
/// Format a single site tag to string
/// </summary>
/// <param name="kvp">KeyValuePair representing the site tag and value</param>
/// <returns>String-formatted tag and value</returns>
private static string FormatSiteTag(KeyValuePair<SiteCode?, string> kvp)
{
bool isMultiLine = kvp.Key.IsMultiLine();
string line = $"{kvp.Key.ShortName()}{(isMultiLine ? "\n" : " ")}";
// Special case for boolean fields
if (IsBoolean(kvp.Key))
{
if (kvp.Value != true.ToString())
return string.Empty;
return line.Trim();
}
return $"{line}{kvp.Value}{(isMultiLine ? "\n" : string.Empty)}";
}
/// <summary>
/// Get the adjusted name of the media based on layers, if applicable
/// </summary>
/// <param name="mediaType">MediaType to get the proper name for</param>
/// <param name="picIdentifier">PIC identifier string (BD only)</param>
/// <param name="size">Size of the current media</param>
/// <param name="layerbreak">First layerbreak value, as applicable</param>
/// <param name="layerbreak2">Second layerbreak value, as applicable</param>
/// <param name="layerbreak3">Third layerbreak value, as applicable</param>
/// <returns>String representation of the media, including layer specification</returns>
/// TODO: Figure out why we have this and NormalizeDiscType as well
private static string? GetFixedMediaType(MediaType? mediaType, string? picIdentifier, long? size, long? layerbreak, long? layerbreak2, long? layerbreak3)
{
switch (mediaType)
{
case MediaType.DVD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-9";
else
return $"{mediaType.LongName()}-5";
case MediaType.BluRay:
if (layerbreak3 != default && layerbreak3 != default(long))
return $"{mediaType.LongName()}-128";
else if (layerbreak2 != default && layerbreak2 != default(long))
return $"{mediaType.LongName()}-100";
else if (layerbreak != default && layerbreak != default(long) && picIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long) && size > 53_687_063_712)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-50";
else if (picIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-33";
else if (size > 26_843_531_856)
return $"{mediaType.LongName()}-33";
else
return $"{mediaType.LongName()}-25";
case MediaType.UMD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-DL";
else
return $"{mediaType.LongName()}-SL";
default:
return mediaType.LongName();
}
}
/// <summary>
/// Check if a site code is boolean or not
/// </summary>
/// <param name="siteCode">SiteCode to check</param>
/// <returns>True if the code field is a flag with no value, false otherwise</returns>
/// <remarks>TODO: This should move to Extensions at some point</remarks>
private static bool IsBoolean(SiteCode? siteCode)
{
return siteCode switch
{
SiteCode.PostgapType => true,
SiteCode.VCD => true,
_ => false,
};
}
/// <summary>
/// Order comment code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
private static List<KeyValuePair<SiteCode?, string>> OrderCommentTags(Dictionary<SiteCode, string> tags)
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return sorted;
// Identifying Info
if (tags.ContainsKey(SiteCode.AlternativeTitle))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.AlternativeTitle, tags[SiteCode.AlternativeTitle]));
if (tags.ContainsKey(SiteCode.AlternativeForeignTitle))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.AlternativeForeignTitle, tags[SiteCode.AlternativeForeignTitle]));
if (tags.ContainsKey(SiteCode.InternalName))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.InternalName, tags[SiteCode.InternalName]));
if (tags.ContainsKey(SiteCode.InternalSerialName))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.InternalSerialName, tags[SiteCode.InternalSerialName]));
if (tags.ContainsKey(SiteCode.VolumeLabel))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.VolumeLabel, tags[SiteCode.VolumeLabel]));
if (tags.ContainsKey(SiteCode.Multisession))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Multisession, tags[SiteCode.Multisession]));
if (tags.ContainsKey(SiteCode.UniversalHash))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.UniversalHash, tags[SiteCode.UniversalHash]));
if (tags.ContainsKey(SiteCode.RingNonZeroDataStart))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.RingNonZeroDataStart, tags[SiteCode.RingNonZeroDataStart]));
if (tags.ContainsKey(SiteCode.XMID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.XMID, tags[SiteCode.XMID]));
if (tags.ContainsKey(SiteCode.XeMID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.XeMID, tags[SiteCode.XeMID]));
if (tags.ContainsKey(SiteCode.DMIHash))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.DMIHash, tags[SiteCode.DMIHash]));
if (tags.ContainsKey(SiteCode.PFIHash))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.PFIHash, tags[SiteCode.PFIHash]));
if (tags.ContainsKey(SiteCode.SSHash))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.SSHash, tags[SiteCode.SSHash]));
if (tags.ContainsKey(SiteCode.SSVersion))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.SSVersion, tags[SiteCode.SSVersion]));
if (tags.ContainsKey(SiteCode.Filename))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Filename, tags[SiteCode.Filename]));
if (tags.ContainsKey(SiteCode.BBFCRegistrationNumber))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.BBFCRegistrationNumber, tags[SiteCode.BBFCRegistrationNumber]));
if (tags.ContainsKey(SiteCode.CDProjektID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.CDProjektID, tags[SiteCode.CDProjektID]));
if (tags.ContainsKey(SiteCode.DiscHologramID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.DiscHologramID, tags[SiteCode.DiscHologramID]));
if (tags.ContainsKey(SiteCode.DNASDiscID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.DNASDiscID, tags[SiteCode.DNASDiscID]));
if (tags.ContainsKey(SiteCode.ISBN))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.ISBN, tags[SiteCode.ISBN]));
if (tags.ContainsKey(SiteCode.ISSN))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.ISSN, tags[SiteCode.ISSN]));
if (tags.ContainsKey(SiteCode.PPN))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.PPN, tags[SiteCode.PPN]));
if (tags.ContainsKey(SiteCode.VFCCode))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.VFCCode, tags[SiteCode.VFCCode]));
if (tags.ContainsKey(SiteCode.Genre))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Genre, tags[SiteCode.Genre]));
if (tags.ContainsKey(SiteCode.Series))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Series, tags[SiteCode.Series]));
if (tags.ContainsKey(SiteCode.PostgapType))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.PostgapType, tags[SiteCode.PostgapType]));
if (tags.ContainsKey(SiteCode.VCD))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.VCD, tags[SiteCode.VCD]));
// Publisher / Company IDs
if (tags.ContainsKey(SiteCode.AcclaimID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.AcclaimID, tags[SiteCode.AcclaimID]));
if (tags.ContainsKey(SiteCode.ActivisionID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.ActivisionID, tags[SiteCode.ActivisionID]));
if (tags.ContainsKey(SiteCode.BandaiID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.BandaiID, tags[SiteCode.BandaiID]));
if (tags.ContainsKey(SiteCode.ElectronicArtsID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.ElectronicArtsID, tags[SiteCode.ElectronicArtsID]));
if (tags.ContainsKey(SiteCode.FoxInteractiveID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.FoxInteractiveID, tags[SiteCode.FoxInteractiveID]));
if (tags.ContainsKey(SiteCode.GTInteractiveID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.GTInteractiveID, tags[SiteCode.GTInteractiveID]));
if (tags.ContainsKey(SiteCode.JASRACID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.JASRACID, tags[SiteCode.JASRACID]));
if (tags.ContainsKey(SiteCode.KingRecordsID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.KingRecordsID, tags[SiteCode.KingRecordsID]));
if (tags.ContainsKey(SiteCode.KoeiID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.KoeiID, tags[SiteCode.KoeiID]));
if (tags.ContainsKey(SiteCode.KonamiID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.KonamiID, tags[SiteCode.KonamiID]));
if (tags.ContainsKey(SiteCode.LucasArtsID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.LucasArtsID, tags[SiteCode.LucasArtsID]));
if (tags.ContainsKey(SiteCode.MicrosoftID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.MicrosoftID, tags[SiteCode.MicrosoftID]));
if (tags.ContainsKey(SiteCode.NaganoID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.NaganoID, tags[SiteCode.NaganoID]));
if (tags.ContainsKey(SiteCode.NamcoID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.NamcoID, tags[SiteCode.NamcoID]));
if (tags.ContainsKey(SiteCode.NipponIchiSoftwareID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.NipponIchiSoftwareID, tags[SiteCode.NipponIchiSoftwareID]));
if (tags.ContainsKey(SiteCode.OriginID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.OriginID, tags[SiteCode.OriginID]));
if (tags.ContainsKey(SiteCode.PonyCanyonID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.PonyCanyonID, tags[SiteCode.PonyCanyonID]));
if (tags.ContainsKey(SiteCode.SegaID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.SegaID, tags[SiteCode.SegaID]));
if (tags.ContainsKey(SiteCode.SelenID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.SelenID, tags[SiteCode.SelenID]));
if (tags.ContainsKey(SiteCode.SierraID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.SierraID, tags[SiteCode.SierraID]));
if (tags.ContainsKey(SiteCode.TaitoID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.TaitoID, tags[SiteCode.TaitoID]));
if (tags.ContainsKey(SiteCode.UbisoftID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.UbisoftID, tags[SiteCode.UbisoftID]));
if (tags.ContainsKey(SiteCode.ValveID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.ValveID, tags[SiteCode.ValveID]));
return sorted;
}
/// <summary>
/// Order content code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
private static List<KeyValuePair<SiteCode?, string>> OrderContentTags(Dictionary<SiteCode, string> tags)
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return sorted;
// Games
if (tags.ContainsKey(SiteCode.Games))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Games, tags[SiteCode.Games]));
if (tags.ContainsKey(SiteCode.NetYarozeGames))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.NetYarozeGames, tags[SiteCode.NetYarozeGames]));
// Demos
if (tags.ContainsKey(SiteCode.PlayableDemos))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.PlayableDemos, tags[SiteCode.PlayableDemos]));
if (tags.ContainsKey(SiteCode.RollingDemos))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.RollingDemos, tags[SiteCode.RollingDemos]));
if (tags.ContainsKey(SiteCode.TechDemos))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.TechDemos, tags[SiteCode.TechDemos]));
// Video
if (tags.ContainsKey(SiteCode.GameFootage))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.GameFootage, tags[SiteCode.GameFootage]));
if (tags.ContainsKey(SiteCode.Videos))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Videos, tags[SiteCode.Videos]));
// Miscellaneous
if (tags.ContainsKey(SiteCode.Patches))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Patches, tags[SiteCode.Patches]));
if (tags.ContainsKey(SiteCode.Savegames))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Savegames, tags[SiteCode.Savegames]));
if (tags.ContainsKey(SiteCode.Extras))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.Extras, tags[SiteCode.Extras]));
return sorted;
}
#endregion
}
}

View File

@@ -1,5 +1,13 @@
# SabreTools.RedumpLib
[![Build and Test](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml)
This library comprises interaction logic for [Redump](http://redump.org/). Because there is no formal API for the site, this library interacts with the site through normal HTTP methods. It includes a fairly comprehensive reference of supported parts of the site, including URLs, page information, and packs.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.RedumpLib).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.RedumpLib/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.RedumpLib/releases/rolling)

278
RedumpTool/Program.cs Normal file
View File

@@ -0,0 +1,278 @@
using System;
using System.IO;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
namespace RedumpTool
{
public class Program
{
static void Main(string[] args)
{
// Show help if nothing is input
if (args == null || args.Length == 0)
{
ShowHelp();
return;
}
// Derive the feature, if possible
Feature feature = DeriveFeature(args[0]);
if (feature == Feature.NONE)
{
ShowHelp();
return;
}
// Create a new Downloader
var downloader = CreateDownloader(feature, args);
if (downloader == null)
{
ShowHelp();
return;
}
// Run the download task
var downloaderTask = downloader.Download();
downloaderTask.Wait();
// Get the downloader task results and print, if necessary
var downloaderResult = downloaderTask.Result;
if (downloaderResult.Count > 0)
{
string processedIds = string.Join(", ", [.. downloaderResult.ConvertAll(i => i.ToString())]);
Console.WriteLine($"Processed IDs: {processedIds}");
}
else if (downloaderResult.Count == 0 && downloader.Feature != Feature.Packs)
{
ShowHelp();
}
}
/// <summary>
/// Derive the feature from the supplied argument
/// </summary>
/// <param name="feature">Possible feature name to derive from</param>
/// <returns>True if the feature was set, false otherwise</returns>
private static Feature DeriveFeature(string feature)
{
return feature.ToLowerInvariant() switch
{
"site" => Feature.Site,
"wip" => Feature.WIP,
"packs" => Feature.Packs,
"user" => Feature.User,
"search" => Feature.Quicksearch,
"query" => Feature.Quicksearch,
_ => Feature.NONE,
};
}
/// <summary>
/// Create a Downloader from a feature and a set of arguments
/// </summary>
/// <param name="feature">Primary feature to use</param>
/// <param name="args">Arguments list to parse</param>
/// <returns>Initialized Downloader on success, null otherwise</returns>
private static Downloader? CreateDownloader(Feature feature, string[] args)
{
// Set temporary internal variables
string? outDir = null;
string? username = null;
string? password = null;
int minimumId = -1;
int maximumId = -1;
string? queryString = null;
bool useSubfolders = false;
bool onlyNew = false;
bool onlyList = false;
bool noSlash = false;
bool force = false;
// Now loop through all of the arguments
try
{
for (int i = 1; i < args.Length; i++)
{
switch (args[i])
{
// Output directory
case "-o":
case "--output":
outDir = args[++i].Trim('"');
break;
// Username
case "-u":
case "--username":
username = args[++i];
break;
// Password
case "-p":
case "--password":
password = args[++i];
break;
// Minimum Redump ID
case "-min":
case "--minimum":
if (!int.TryParse(args[++i], out minimumId))
minimumId = -1;
break;
// Maximum Redump ID
case "-max":
case "--maximum":
if (!int.TryParse(args[++i], out maximumId))
maximumId = -1;
break;
// Quicksearch text
case "-q":
case "--query":
queryString = args[++i];
break;
// Packs subfolders
case "-s":
case "--subfolders":
useSubfolders = true;
break;
// Use last modified
case "-n":
case "--onlynew":
onlyNew = true;
break;
// List instead of download
case "-l":
case "--list":
onlyList = true;
break;
// Don't filter forward slashes from queries
case "-ns":
case "--noslash":
noSlash = true;
break;
// Force continuation
case "-f":
case "--force":
force = true;
break;
// Everything else
default:
Console.WriteLine($"Unrecognized flag: {args[i]}");
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
// Output directory validation
if (!onlyList && string.IsNullOrEmpty(outDir))
{
Console.WriteLine("No output directory set!");
return null;
}
else if (!onlyList && !string.IsNullOrEmpty(outDir))
{
// Create the output directory, if it doesn't exist
try
{
if (!Directory.Exists(outDir))
Directory.CreateDirectory(outDir);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
// Range verification
if (feature == Feature.Site && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of Redump IDs");
return null;
}
else if (feature == Feature.WIP && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of WIP IDs");
return null;
}
// Query verification (and cleanup)
if (feature == Feature.Quicksearch && string.IsNullOrEmpty(queryString))
{
Console.WriteLine("Please enter a query for searching");
return null;
}
// Create and return the downloader
var downloader = new Downloader()
{
Feature = feature,
MinimumId = minimumId,
MaximumId = maximumId,
QueryString = queryString,
OutDir = outDir,
UseSubfolders = useSubfolders,
OnlyNew = onlyNew,
OnlyList = onlyList,
Force = force,
NoSlash = noSlash,
Username = username,
Password = password,
};
return downloader;
}
/// <summary>
/// Show the commandline help for the program
/// </summary>
private static void ShowHelp()
{
Console.WriteLine("RedumpTool - A Redump.org recovery tool");
Console.WriteLine();
Console.WriteLine("Usage: RedumpTool <feature> [options]");
Console.WriteLine();
Console.WriteLine("Common Options");
Console.WriteLine(" -o <folder>, --output <folder> - Set the base output directory");
Console.WriteLine(" -u <username>, --username <username> - Redump username");
Console.WriteLine(" -p <pass>, --password <pass> - Redump password");
Console.WriteLine();
Console.WriteLine("site - Download pages and related files from the main site");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine(" -f, --force - Force continuing downloads until user cancels (used with only new)");
Console.WriteLine();
Console.WriteLine("wip - Download pages and related files from the WIP list");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine();
Console.WriteLine("packs - Download available packs");
Console.WriteLine(" -s, --subfolders - Download packs to named subfolders");
Console.WriteLine();
Console.WriteLine("user - Download pages and related files for a particular user");
Console.WriteLine(" -n, --onlynew - Use the last modified view instead of sequential parsing");
Console.WriteLine(" -l, --list - Only list the page IDs for that user");
Console.WriteLine();
Console.WriteLine("query - Download pages and related files from a Redump-compatible query");
Console.WriteLine(" -q, --query - Redump-compatible query to run");
Console.WriteLine(" -l, --list - Only list the page IDs for that query");
Console.WriteLine(" -ns, --noslash - Don't replace forward slashes with '-'");
Console.WriteLine();
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.2</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,191 @@
using System;
using System.IO;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class BuilderTests
{
[Theory]
[InlineData("success_complete.json", false)]
[InlineData("success_invalid.json", false)] // Fully invalid returns a default object
[InlineData("success_partial.json", false)]
[InlineData("fail_invalid.json", true)]
public void CreateFromFileTest(string filename, bool expectNull)
{
// Get the full path to the test file
string path = Path.Combine(Environment.CurrentDirectory, "TestData", filename);
// Try to create the submission info from file
var si = Builder.CreateFromFile(path);
// Check for an expected result
Assert.Equal(expectNull, si == null);
}
[Fact]
public void EnsureAllSections_Null_Filled()
{
SubmissionInfo? si = null;
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Empty_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = null,
VersionAndEditions = null,
EDC = null,
ParentCloneRelationship = null,
Extras = null,
CopyProtection = null,
DumpersAndStatus = null,
TracksAndWriteOffsets = null,
SizeAndChecksums = null,
DumpingInfo = null,
Artifacts = null,
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Filled_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection
{
CommentsSpecialFields = [],
ContentsSpecialFields = [],
},
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
DumpingInfo = new DumpingInfoSection(),
Artifacts = [],
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void InjectSubmissionInformation_BothNull_Null()
{
SubmissionInfo? si = null;
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.Null(actual);
}
[Fact]
public void InjectSubmissionInformation_ValidInputNullSeed_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void InjectSubmissionInformation_BothValid_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = new SubmissionInfo();
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_EmptyString_Empty()
{
string original = string.Empty;
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Empty(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_NoReplace_Identical()
{
string original = "<p>Nothing here will be replaced</p>";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(original, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_StandardCode_Replaced()
{
string original = "<b>ISBN</b>: 000-0-00-000000-0";
string expected = "[T:ISBN] 000-0-00-000000-0";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_OutdatedCode_Replaced()
{
string original = "XMID: AB12345C";
string expected = "<b>XMID</b>: AB12345C";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
namespace SabreTools.RedumpLib.Test
{
public class DownloaderTests
{
// Tests here will require installing and using the Moq library
// to mock the RedumpClient type.
}
}

View File

@@ -0,0 +1,877 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class FormatterTests
{
#region ProcessSpecialFields
// TODO: Write tests for ProcessSpecialFields
#endregion
#region CommonDiscInfoSection
// TODO: Write tests for FormatOutputData(CommonDiscInfoSection)
[Fact]
public void FormatOutputData_CDINullSACNullTAWONull_Minimal()
{
string expected = "Common Disc Info:\n\tRegion: SPACE! (CHANGE THIS)\n\tLanguages: ADD LANGUAGES HERE (ONLY IF YOU TESTED)\n\n\tRingcode Information:\n\n\n";
var builder = new StringBuilder();
CommonDiscInfoSection? section = null;
SizeAndChecksumsSection? sac = null;
TracksAndWriteOffsetsSection? tawo = null;
int? fullyMatchedID = null;
List<int>? partiallyMatchedIDs = null;
Formatter.FormatOutputData(builder,
section,
sac,
tawo,
fullyMatchedID,
partiallyMatchedIDs);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region VersionAndEditionsSection
[Fact]
public void FormatOutputData_VAENull_Minimal()
{
string expected = "Version and Editions:\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_VAE_Formatted()
{
string expected = "Version and Editions:\n\tVersion: XXXXXX\n\tEdition/Release: XXXXXX\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = new VersionAndEditionsSection
{
Version = "XXXXXX",
OtherEditions = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region EDCSection
[Fact]
public void FormatOutputData_EDCNull_Minimal()
{
string expected = "EDC:\n";
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDCInvalidSystem_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDC_Formatted()
{
string expected = "EDC:\n\tEDC: Yes\n";
var builder = new StringBuilder();
EDCSection? section = new EDCSection { EDC = YesNo.Yes };
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region ExtrasSection
[Fact]
public void FormatOutputData_ExtrasNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_ExtrasInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = null,
PIC = null,
BCA = null,
SecuritySectorRanges = null,
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_Extras_Formatted()
{
string expected = "Extras:\n\tPrimary Volume Descriptor (PVD): XXXXXX\n\tDisc Key: XXXXXX\n\tDisc ID: XXXXXX\n\tPermanent Information & Control (PIC): XXXXXX\n\tHeader: XXXXXX\n\tBCA: XXXXXX\n\tSecurity Sector Ranges: XXXXXX\n";
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = "XXXXXX",
DiscKey = "XXXXXX",
DiscID = "XXXXXX",
PIC = "XXXXXX",
Header = "XXXXXX",
BCA = "XXXXXX",
SecuritySectorRanges = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region CopyProtectionSection
[Fact]
public void FormatOutputData_COPNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
Protection = null,
AntiModchip = null,
LibCrypt = null,
LibCryptData = null,
SecuROMData = null,
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COP_Formatted()
{
string expected = "Copy Protection:\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPPSX_Formatted()
{
string expected = "Copy Protection:\n\tAnti-modchip: Yes\n\tLibCrypt: Yes\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region TracksAndWriteOffsetsSection
[Fact]
public void FormatOutputData_TAWOInvalid_Minimal()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\n\n\n\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection();
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_TAWO_Formatted()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\nXXXXXX\n\n\n\tCuesheet: XXXXXX\n\tWrite Offset: XXXXXX\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection
{
ClrMameProData = "XXXXXX",
Cuesheet = "XXXXXX",
OtherWriteOffsets = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region SizeAndChecksumsSection
// TODO: Write tests for FormatOutputData(SizeAndChecksumsSection)
#endregion
#region DumpingInfoSection
[Fact]
public void FormatOutputData_DINull_Minimal()
{
string expected = "Dumping Info:\n";
var builder = new StringBuilder();
DumpingInfoSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_DI_Formatted()
{
string expected = "Dumping Info:\n\tFrontend Version: XXXXXX\n\tDumping Program: XXXXXX\n\tDate: XXXXXX\n\tParameters: XXXXXX\n\tManufacturer: XXXXXX\n\tModel: XXXXXX\n\tFirmware: XXXXXX\n\tReported Disc Type: XXXXXX\n\tC2 Error Count: XXXXXX\n";
var builder = new StringBuilder();
DumpingInfoSection? section = new DumpingInfoSection
{
FrontendVersion = "XXXXXX",
DumpingProgram = "XXXXXX",
DumpingDate = "XXXXXX",
DumpingParameters = "XXXXXX",
Manufacturer = "XXXXXX",
Model = "XXXXXX",
Firmware = "XXXXXX",
ReportedDiscType = "XXXXXX",
C2ErrorsCount = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region FormatSiteTag
[Fact]
public void FormatSiteTag_NoValue_Empty()
{
SiteCode code = SiteCode.AlternativeTitle;
string value = string.Empty;
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Standard_Formatted()
{
string expected = "[T:ALT] XXXXXX";
SiteCode code = SiteCode.AlternativeTitle;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanTrue_Formatted()
{
string expected = "[T:VCD]";
SiteCode code = SiteCode.VCD;
string value = "True";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanFalse_Empty()
{
SiteCode code = SiteCode.VCD;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Multiline_Formatted()
{
string expected = "[T:X]\nXXXXXX\n";
SiteCode code = SiteCode.Extras;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
#endregion
#region GetFixedMediaType
[Fact]
public void GetFixedMediaType_NullType_Null()
{
MediaType? mediaType = null;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Null(actual);
}
[Fact]
public void GetFixedMediaType_UnformattedType_Formatted()
{
string? expected = "CD-ROM";
MediaType? mediaType = MediaType.CDROM;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD9_Formatted()
{
string? expected = "DVD-ROM-9";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD5_Formatted()
{
string? expected = "DVD-ROM-5";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD128_Formatted()
{
string? expected = "BD-ROM-128";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = 12345;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD100_Formatted()
{
string? expected = "BD-ROM-100";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = 12345;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66PIC_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66Size_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 53_687_063_713;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD50_Formatted()
{
string? expected = "BD-ROM-50";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33PIC_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33Size_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 26_843_531_857;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD25_Formatted()
{
string? expected = "BD-ROM-25";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDDL_Formatted()
{
string? expected = "HD-DVD-ROM-DL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDSL_Formatted()
{
string? expected = "HD-DVD-ROM-SL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDDL_Formatted()
{
string? expected = "UMD-DL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDSL_Formatted()
{
string? expected = "UMD-SL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
#endregion
#region OrderCommentTags
[Fact]
public void OrderCommentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.Applications, "XXXXXX" },
};
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderCommentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedCommentCodes.SequenceEqual(actualCodes));
}
#endregion
#region OrderContentTags
[Fact]
public void OrderContentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.AlternativeTitle, "XXXXXX" },
};
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderContentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedContentCodes.SequenceEqual(actualCodes));
}
#endregion
#region RemoveConsecutiveEmptyLines
[Fact]
public void RemoveConsecutiveEmptyLines_Linux_Removed()
{
string expected = "data\n\nbase";
string newlines = "data\n\n\n\n\n\n\n\n\n\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
[Fact]
public void RemoveConsecutiveEmptyLines_Windows_Removed()
{
string expected = "data\r\n\r\nbase";
string newlines = "data\r\n\r\n\r\n\r\n\r\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -0,0 +1,47 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net8.0;net9.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.14.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.24.0" />
<PackageReference Include="xunit.assert" Version="2.9.3" />
<PackageReference Include="xunit.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.3" />
<PackageReference Include="xunit.runner.console" Version="2.9.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,181 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class SubmissionInfoTests
{
[Fact]
public void EmptySerializationTest()
{
var submissionInfo = new SubmissionInfo();
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void PartialSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void FullSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
SchemaVersion = 1,
FullyMatchedID = 3,
PartiallyMatchedIDs = new List<int> { 0, 1, 2, 3 },
Added = DateTime.UtcNow,
LastModified = DateTime.UtcNow,
CommonDiscInfo = new CommonDiscInfoSection()
{
System = RedumpSystem.IBMPCcompatible,
Media = DiscType.CD,
Title = "Game Title",
ForeignTitleNonLatin = "Foreign Game Title",
DiscNumberLetter = "1",
DiscTitle = "Install Disc",
Category = DiscCategory.Games,
Region = Region.World,
Languages = new Language?[] { Language.English, Language.Spanish, Language.French },
LanguageSelection = new LanguageSelection?[] { LanguageSelection.BiosSettings },
Serial = "Disc Serial",
Layer0MasteringRing = "L0 Mastering Ring",
Layer0MasteringSID = "L0 Mastering SID",
Layer0ToolstampMasteringCode = "L0 Toolstamp",
Layer0MouldSID = "L0 Mould SID",
Layer0AdditionalMould = "L0 Additional Mould",
Layer1MasteringRing = "L1 Mastering Ring",
Layer1MasteringSID = "L1 Mastering SID",
Layer1ToolstampMasteringCode = "L1 Toolstamp",
Layer1MouldSID = "L1 Mould SID",
Layer1AdditionalMould = "L1 Additional Mould",
Layer2MasteringRing = "L2 Mastering Ring",
Layer2MasteringSID = "L2 Mastering SID",
Layer2ToolstampMasteringCode = "L2 Toolstamp",
Layer3MasteringRing = "L3 Mastering Ring",
Layer3MasteringSID = "L3 Mastering SID",
Layer3ToolstampMasteringCode = "L3 Toolstamp",
RingWriteOffset = "+12",
Barcode = "UPC Barcode",
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
CommentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
ContentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
},
VersionAndEditions = new VersionAndEditionsSection()
{
Version = "Original",
VersionDatfile = "Alt",
CommonEditions = new string[] { "Taikenban" },
OtherEditions = "Rerelease",
},
EDC = new EDCSection()
{
EDC = YesNo.Yes,
},
ParentCloneRelationship = new ParentCloneRelationshipSection()
{
ParentID = "12345",
RegionalParent = false,
},
Extras = new ExtrasSection()
{
PVD = "PVD",
DiscKey = "Disc key",
DiscID = "Disc ID",
PIC = "PIC",
Header = "Header",
BCA = "BCA",
SecuritySectorRanges = "SSv1 Ranges",
},
CopyProtection = new CopyProtectionSection()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.No,
LibCryptData = "LibCrypt data",
Protection = "List of protections",
SecuROMData = "SecuROM data",
},
DumpersAndStatus = new DumpersAndStatusSection()
{
Status = DumpStatus.TwoOrMoreGreen,
Dumpers = new string[] { "Dumper1", "Dumper2" },
OtherDumpers = "Dumper3",
},
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection()
{
ClrMameProData = "Datfile",
Cuesheet = "Cuesheet",
CommonWriteOffsets = new int[] { 0, 12, -12 },
OtherWriteOffsets = "-2",
},
SizeAndChecksums = new SizeAndChecksumsSection()
{
Layerbreak = 0,
Layerbreak2 = 1,
Layerbreak3 = 2,
Size = 12345,
CRC32 = "CRC32",
MD5 = "MD5",
SHA1 = "SHA1",
},
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
DumpingParameters = "cd dvd bd sacd fd hdd",
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",
ReportedDiscType = "CD-R",
},
Artifacts = new Dictionary<string, string>()
{
["Sample Artifact"] = "Sample Data",
},
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
}
}

View File

@@ -0,0 +1 @@
This isn't even JSON, I lied.

View File

@@ -0,0 +1,96 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_system": "ajcd",
"d_media": "cd",
"d_title": "Test Title",
"d_title_foreign": "Foreign Title",
"d_number": "1",
"d_label": "Install",
"d_category": "Games",
"d_region": "U",
"d_languages":
[
"en",
"fr",
"es"
],
"d_languages_selection": [],
"d_serial": "Serial",
"d_ring_0_ma1": "Ringcode 0 Layer 0",
"d_ring_0_ma1_sid": "SID 0 Layer 0",
"d_ring_0_ts1": "Toolstamp 0 Layer 0",
"d_ring_0_mo1_sid": "Mould SID 0 Layer 0",
"d_ring_0_mo1": "Additional Mould 0 Layer 0",
"d_ring_0_ma2": "Ringcode 0 Layer 1",
"d_ring_0_ma2_sid": "SID 0 Layer 1",
"d_ring_0_ts2": "Toolstamp 0 Layer 1",
"d_ring_0_mo2_sid": "Mould SID 0 Layer 1",
"d_ring_0_mo2": "Additional Mould 0 Layer 1",
"d_ring_0_ma3": "Ringcode 0 Layer 2",
"d_ring_0_ma3_sid": "SID 0 Layer 2",
"d_ring_0_ts3": "Toolstamp 0 Layer 2",
"d_ring_0_ma4": "Ringcode 0 Layer 3",
"d_ring_0_ma4_sid": "SID 0 Layer 2",
"d_ring_0_ts4": "Toolstamp 0 Layer 2",
"d_ring_0_offsets": "-22",
"d_ring_0_0_value": "-21",
"d_barcode": "0 12345 67890 1",
"d_date": "1980-01-01",
"d_errors": "0",
"d_comments": "This is a comment\nwith a newline",
"d_contents": "These are contents, sorry"
},
"versions_and_editions":
{
"d_version": "1.0.0.0",
"d_version_datfile": "1.00",
"d_editions_text": "Demo"
},
"edc":
{
"d_edc": false
},
"parent_clone_relationship":
{
"d_parent_id": "12345",
"d_is_regional_parent": false
},
"extras":
{
"d_pvd": "Pretend\nthis\nis\na\nPVD",
"d_d1_key": "Disc key",
"d_d2_key": "Disc ID",
"d_pic_data": "Pretend\nthis\nis\na\nPIC",
"d_header": "Pretend\nthis\nis\na\nHeader",
"d_bca": "Pretend\nthis\nis\na\nBCA",
"d_ssranges": "Pretend\nthis\nis\na\nsecurity_range"
},
"copy_protection":
{
"d_protection_a": false,
"d_protection_1": false,
"d_libcrypt": "Definitely\nLibCrypt\nData",
"d_protection": "Super easy to find protection",
"d_securom": "Definitely\nSecuROM\nData"
},
"tracks_and_write_offsets":
{
"d_tracks": "Hash data",
"d_cue": "Real cuesheet",
"d_offset_text": "-22"
},
"size_and_checksums":
{
"d_layerbreak": 1,
"d_layerbreak_2": 2,
"d_layerbreak_3": 3,
"d_pic_identifier": "Pretend\nthis\nis\na\nPIC",
"d_size": 123456,
"d_crc32": "cbf43926",
"d_md5": "d41d8cd98f00b204e9800998ecf8427e",
"d_sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
}

View File

@@ -0,0 +1,4 @@
{
"invalid_key": "invalid_value",
"invalid_x": 12345
}

View File

@@ -0,0 +1,7 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_title": "Test Title"
}
}

View File

@@ -0,0 +1,304 @@
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class ValidatorTests
{
// Most tests here will require installing and using the Moq library
// to mock the RedumpClient type.
[Fact]
public void NormalizeDiscType_InvalidMedia_Untouched()
{
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = null }
};
Validator.NormalizeDiscType(si);
Assert.Null(si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_InvalidSizeChecksums_Untouched()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = null,
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_UnformattedType_Fixed()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD9_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD9;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD5_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD5;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD128_Fixed(DiscType type)
{
DiscType expected = DiscType.BD128;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak3 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD100_Fixed(DiscType type)
{
DiscType expected = DiscType.BD100;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak2 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
Size = 50_050_629_633,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD50_Fixed(DiscType type)
{
DiscType expected = DiscType.BD50;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Size = 25_025_314_817,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD25_Fixed(DiscType type)
{
DiscType expected = DiscType.BD25;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDDL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDDL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDSL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDSL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
}
}

View File

@@ -1,46 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.3.7</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="README.md" Pack="true" PackagePath="" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<PackageReference Include="MinValueTupleBridge" Version="0.2.1" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`))">
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.3.0" />
</ItemGroup>
</Project>

View File

@@ -3,7 +3,11 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "SabreTools.RedumpLib.csproj", "{235D3A36-CA69-4348-9EC4-649B27ACFBB8}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "SabreTools.RedumpLib\SabreTools.RedumpLib.csproj", "{235D3A36-CA69-4348-9EC4-649B27ACFBB8}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib.Test", "SabreTools.RedumpLib.Test\SabreTools.RedumpLib.Test.csproj", "{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedumpTool", "RedumpTool\RedumpTool.csproj", "{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -18,5 +22,13 @@ Global
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.Build.0 = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.Build.0 = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.ActiveCfg = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.Build.0 = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.Build.0 = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.ActiveCfg = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
namespace SabreTools.RedumpLib.Attributes
{
@@ -25,24 +24,24 @@ namespace SabreTools.RedumpLib.Attributes
string? valueStr = value?.ToString();
if (string.IsNullOrEmpty(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
var enumValueMemberInfo = Array.Find(memberInfos, m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
if (attributes == null || attributes.Length == 0)
return null;
// Return the first attribute, if possible
return attributes.FirstOrDefault() as HumanReadableAttribute;
return attributes[0] as HumanReadableAttribute;
}
}
}
}

View File

@@ -22,4 +22,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public string? ShortName { get; set; }
}
}
}

View File

@@ -23,4 +23,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public string? ThreeLetterCodeAlt { get; set; }
}
}
}

View File

@@ -52,4 +52,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public bool HasSbi { get; set; } = false;
}
}
}

View File

@@ -1,8 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
#if NET40_OR_GREATER || NETCOREAPP || NETSTANDARD2_0_OR_GREATER
using System.Net;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@@ -246,25 +247,15 @@ namespace SabreTools.RedumpLib
/// <summary>
/// Fill out an existing SubmissionInfo object based on a disc page
/// </summary>
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="includeAllData">True to include all pullable information, false to do bare minimum</param>
#if NETFRAMEWORK
public async static Task<bool> FillFromId(RedumpWebClient wc, SubmissionInfo info, int id, bool includeAllData)
#else
public async static Task<bool> FillFromId(RedumpHttpClient wc, SubmissionInfo info, int id, bool includeAllData)
#endif
public async static Task<bool> FillFromId(RedumpClient rc, SubmissionInfo info, int id, bool includeAllData)
{
// Ensure that required sections exist
info = EnsureAllSections(info);
#if NET40
var discData = await Task.Factory.StartNew(() => wc.DownloadSingleSiteID(id));
#elif NETFRAMEWORK
var discData = await Task.Run(() => wc.DownloadSingleSiteID(id));
#else
var discData = await wc.DownloadSingleSiteID(id);
#endif
var discData = await rc.DownloadSingleSiteID(id);
if (string.IsNullOrEmpty(discData))
return false;
@@ -279,22 +270,25 @@ namespace SabreTools.RedumpLib
if (title != null && firstParenLocation >= 0)
{
info.CommonDiscInfo!.Title = title.Substring(0, firstParenLocation);
var subMatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match subMatch in subMatches.Cast<Match>())
var submatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match? submatch in submatches)
{
var subMatchValue = subMatch.Groups[1].Value;
if (submatch == null)
continue;
var submatchValue = submatch.Groups[1].Value;
// Disc number or letter
if (subMatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = subMatchValue.Remove(0, "Disc ".Length);
if (submatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = submatchValue.Remove(0, "Disc ".Length);
// Issue number
else if (subMatchValue.All(c => char.IsNumber(c)))
info.CommonDiscInfo.Title += $" ({subMatchValue})";
else if (ulong.TryParse(submatchValue, out _))
info.CommonDiscInfo.Title += $" ({submatchValue})";
// Disc title
else
info.CommonDiscInfo.DiscTitle = subMatchValue;
info.CommonDiscInfo.DiscTitle = submatchValue;
}
}
// Otherwise, leave the title as-is
@@ -308,15 +302,13 @@ namespace SabreTools.RedumpLib
match = Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
else
info.CommonDiscInfo!.ForeignTitleNonLatin = null;
// Category
match = Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
info.CommonDiscInfo!.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
info.CommonDiscInfo.Category = DiscCategory.Games;
info.CommonDiscInfo!.Category = DiscCategory.Games;
// Region
if (info.CommonDiscInfo.Region == null)
@@ -331,12 +323,17 @@ namespace SabreTools.RedumpLib
if (matches.Count > 0)
{
var tempLanguages = new List<Language?>();
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
tempLanguages.Add(Extensions.ToLanguage(submatch.Groups[1].Value));
if (submatch == null)
continue;
var language = Extensions.ToLanguage(submatch.Groups[1].Value);
if (language != null)
tempLanguages.Add(language);
}
info.CommonDiscInfo.Languages = tempLanguages.Where(l => l != null).ToArray();
info.CommonDiscInfo.Languages = [.. tempLanguages];
}
// Serial
@@ -376,8 +373,11 @@ namespace SabreTools.RedumpLib
tempDumpers.Add(dumper);
}
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
if (submatch == null)
continue;
string? dumper = WebUtility.HtmlDecode(submatch.Groups[1].Value);
if (dumper != null)
tempDumpers.Add(dumper);
@@ -458,36 +458,8 @@ namespace SabreTools.RedumpLib
addToLast = siteCode.IsMultiLine();
// Skip certain site codes because of data issues
switch (siteCode)
{
// Multiple
case SiteCode.InternalSerialName:
case SiteCode.Multisession:
case SiteCode.VolumeLabel:
continue;
// Audio CD
case SiteCode.RingNonZeroDataStart:
case SiteCode.UniversalHash:
continue;
// Microsoft Xbox and Xbox 360
case SiteCode.DMIHash:
case SiteCode.PFIHash:
case SiteCode.SSHash:
case SiteCode.SSVersion:
case SiteCode.XMID:
case SiteCode.XeMID:
continue;
// Microsoft Xbox One and Series X/S
case SiteCode.Filename:
continue;
// Nintendo Gamecube
case SiteCode.InternalName:
continue;
}
if (ShouldSkipSiteCode(siteCode))
continue;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields!.ContainsKey(siteCode.Value))
@@ -503,14 +475,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode != null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.CommentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
else if (!addToLast || lastSiteCode == null)
{
newComments += $"{commentLine}\n";
}
@@ -591,14 +563,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode != null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.ContentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
else if (!addToLast || lastSiteCode == null)
{
newContents += $"{contentLine}\n";
}
@@ -653,6 +625,7 @@ namespace SabreTools.RedumpLib
info.TracksAndWriteOffsets ??= new TracksAndWriteOffsetsSection();
info.SizeAndChecksums ??= new SizeAndChecksumsSection();
info.DumpingInfo ??= new DumpingInfoSection();
info.Artifacts ??= [];
// Ensure special dictionaries
info.CommonDiscInfo.CommentsSpecialFields ??= [];
@@ -666,11 +639,11 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static void InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
public static SubmissionInfo? InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
{
// If we have any invalid info
if (seed == null)
return;
return info;
// Ensure that required sections exist
info = EnsureAllSections(info);
@@ -728,6 +701,47 @@ namespace SabreTools.RedumpLib
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
}
return info;
}
/// <summary>
/// Determine if a site code should be skipped on pulling
/// </summary>
private static bool ShouldSkipSiteCode(SiteCode? siteCode)
{
return siteCode switch
{
// Multiple
SiteCode.HighSierraVolumeDescriptor
or SiteCode.InternalSerialName
or SiteCode.Multisession
or SiteCode.VolumeLabel => true,
// Audio CD
SiteCode.RingNonZeroDataStart
or SiteCode.RingPerfectAudioOffset
or SiteCode.UniversalHash => true,
// Microsoft Xbox and Xbox 360
SiteCode.DMIHash
or SiteCode.PFIHash
or SiteCode.SSHash
or SiteCode.SSVersion
or SiteCode.XMID
or SiteCode.XeMID => true,
// Microsoft Xbox One and Series X/S
SiteCode.Filename => true,
// Nintendo Gamecube
SiteCode.InternalName => true,
// Protection
SiteCode.Protection => true,
_ => false,
};
}
#endregion
@@ -739,9 +753,10 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="text">Text block to process</param>
/// <returns>Processed text block, if possible</returns>
private static string ReplaceHtmlWithSiteCodes(this string text)
internal static string ReplaceHtmlWithSiteCodes(this string text)
{
if (string.IsNullOrEmpty(text))
// Empty strings are ignored
if (text.Length == 0)
return text;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscCategory(value);
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscType(value);
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<Language> languages = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
Language? lang = Data.Extensions.ToLanguage(value);
if (lang != null)
languages.Add(lang.Value);
}
return [.. languages];
}
public override void WriteJson(JsonWriter writer, Language?[]? value, JsonSerializer serializer)
@@ -32,4 +53,4 @@ namespace SabreTools.RedumpLib.Converters
array.WriteTo(writer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<LanguageSelection> selections = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
LanguageSelection? sel = Data.Extensions.ToLanguageSelection(value);
if (sel != null)
selections.Add(sel.Value);
}
return [.. selections];
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[]? value, JsonSerializer serializer)
@@ -32,4 +53,4 @@ namespace SabreTools.RedumpLib.Converters
array.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRegion(value);
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRedumpSystem(value);
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is bool bVal)
return Data.Extensions.ToYesNo(bVal);
else if (reader.Value is string sVal)
return Data.Extensions.ToYesNo(sVal);
return null;
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -4,18 +4,17 @@ namespace SabreTools.RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>");
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the BCA field on a disc page
@@ -25,87 +24,87 @@ namespace SabreTools.RedumpLib.Data
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>");
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">");
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)");
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)", RegexOptions.Compiled);
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">");
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>");
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>");
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*", RegexOptions.Compiled);
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>");
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>");
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc key on a PS3 disc page
/// </summary>
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>");
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the PVD field on a disc page
@@ -115,62 +114,62 @@ namespace SabreTools.RedumpLib.Data
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>");
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />", RegexOptions.Compiled);
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>");
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
#endregion
@@ -307,4 +306,4 @@ namespace SabreTools.RedumpLib.Data
#endregion
}
}
}

View File

@@ -281,7 +281,7 @@ namespace SabreTools.RedumpLib.Data
[Language(LongName = "Bini; Edo", ThreeLetterCode = "bin")]
Bini,
[Language(LongName = "Bislama", TwoLetterCode = "bla", ThreeLetterCode = "bis")]
[Language(LongName = "Bislama", TwoLetterCode = "bi", ThreeLetterCode = "bis")]
Bislama,
// Blin; Bilin
@@ -2008,8 +2008,8 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i", ShortName = "cdi", HasCues = true, HasDat = true)]
PhilipsCDi,
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i Digital Video", ShortName = "cdi-video", IsBanned = true)]
PhilipsCDiDigitalVideo,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Playmaji Polymega")]
PlaymajiPolymega,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Pioneer LaserActive")]
PioneerLaserActive,
@@ -2234,7 +2234,7 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Computer, LongName = "NEC PC-98 series", ShortName = "pc-98", HasCues = true, HasDat = true)]
NECPC98series,
[System(Category = SystemCategory.Computer, LongName = "Sharp X68000", ShortName = "x86kcd", HasCues = true, HasDat = true)]
[System(Category = SystemCategory.Computer, LongName = "Sharp X68000", ShortName = "x68k", HasCues = true, HasDat = true)]
SharpX68000,
// End of computer section delimiter
@@ -2331,7 +2331,7 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Merit Industries MegaTouch XL")]
MeritIndustriesMegaTouchXL,
[System(Category = SystemCategory.Arcade, LongName = "Namco · Sega · Nintendo Triforce", ShortName = "triforce", HasCues = true, HasDat = true, HasGdi = true)]
[System(Category = SystemCategory.Arcade, LongName = "Namco · Sega · Nintendo Triforce", ShortName = "trf", HasCues = true, HasDat = true, HasGdi = true)]
NamcoSegaNintendoTriforce,
[System(Category = SystemCategory.Arcade, LongName = "Namco System 12", ShortName = "ns12")]
@@ -2488,8 +2488,105 @@ namespace SabreTools.RedumpLib.Data
/// </remarks>
public enum Region
{
// TODO: Should "regions" and multi-country sets be phased out?
// TODO: Should "regions" be moved to the end?
#region Aggregates - Redump Only
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region A
@@ -2535,30 +2632,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ascension Island", ShortName = "Ac")]
AscensionIsland,
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia", ShortName = "Au")]
Australia,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria", ShortName = "At")]
Austria,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Azerbaijan", ShortName = "Az")]
Azerbaijan,
@@ -2584,9 +2663,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Belgium", ShortName = "Be")]
Belgium,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Belize", ShortName = "Bz")]
Belize,
@@ -2767,21 +2843,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ethiopia", ShortName = "Et")]
Ethiopia,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
// Commented out to avoid confusion
//[HumanReadable(LongName = "European Union", ShortName = "Eu")]
//EuropeanUnion,
@@ -2790,9 +2851,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "Eurozone", ShortName = "Ez")]
//Eurozone,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
#endregion
#region F
@@ -2821,9 +2879,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "France, Metropolitan", ShortName = "Fx")]
//FranceMetropolitan,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "French Guiana", ShortName = "Gf")]
FrenchGuiana,
@@ -2856,9 +2911,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Gibraltar", ShortName = "Gi")]
Gibraltar,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Greece", ShortName = "Gr")]
Greece,
@@ -2958,18 +3010,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Japan", ShortName = "J")]
Japan,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Jersey", ShortName = "Je")]
Jersey,
@@ -3009,9 +3049,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "(Laos) Lao People's Democratic Republic", ShortName = "La")]
Laos,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Latvia", ShortName = "Lv")]
Latvia,
@@ -3264,9 +3301,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Saudi Arabia", ShortName = "Sa")]
SaudiArabia,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Senegal", ShortName = "Sn")]
Senegal,
@@ -3310,9 +3344,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Spain", ShortName = "S")]
Spain,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "Sri Lanka", ShortName = "Lk")]
SriLanka,
@@ -3397,9 +3428,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "UK", ShortName = "Uk")]
UnitedKingdom,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "Ukraine", ShortName = "Ue")]
Ukraine,
@@ -3424,30 +3452,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "USA", ShortName = "U")]
UnitedStatesOfAmerica,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "USSR", ShortName = "Su")]
USSR,
@@ -3483,9 +3487,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Western Sahara", ShortName = "Eh")]
WesternSahara,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region Y
@@ -3526,7 +3527,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:ALTF]", LongName = "<b>Alternative Foreign Title</b>:")]
AlternativeForeignTitle,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Applications</b>:", LongName = "<b>Applications</b>:")]
Applications,
@@ -3536,30 +3537,38 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:BBFC]", LongName = "<b>BBFC Reg. No.</b>:")]
BBFCRegistrationNumber,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Bethesda ID</b>:", LongName = "<b>Bethesda ID</b>:")]
BethesdaID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>CD Projekt ID</b>:", LongName = "<b>CD Projekt ID</b>:")]
CDProjektID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Compatible OS</b>:", LongName = "<b>Compatible OS</b>:")]
CompatibleOS,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Hologram ID</b>:", LongName = "<b>Disc Hologram ID</b>:")]
DiscHologramID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Title (non-Latin)</b>:", LongName = "<b>Disc Title (non-Latin)</b>:")]
DiscTitleNonLatin,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>DMI</b>:", LongName = "<b>DMI</b>:")]
DMIHash,
[HumanReadable(ShortName = "[T:DNAS]", LongName = "<b>DNAS Disc ID</b>:")]
DNASDiscID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Edition (non-Latin)</b>:", LongName = "<b>Edition (non-Latin)</b>:")]
EditionNonLatin,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Eidos ID</b>:", LongName = "<b>Eidos ID</b>:")]
EidosID,
@@ -3569,7 +3578,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:X]", LongName = "<b>Extras</b>:")]
Extras,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Filename</b>:", LongName = "<b>Filename</b>:")]
Filename,
@@ -3579,7 +3588,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GF]", LongName = "<b>Game Footage</b>:")]
GameFootage,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Games</b>:", LongName = "<b>Games</b>:")]
Games,
@@ -3589,13 +3598,21 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GTID]", LongName = "<b>GT Interactive ID</b>:")]
GTInteractiveID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>High Sierra Volume Descriptor</b>:", LongName = "<b>High Sierra Volume Descriptor</b>:")]
HighSierraVolumeDescriptor,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Internal Name</b>:", LongName = "<b>Internal Name</b>:")]
InternalName,
[HumanReadable(ShortName = "[T:ISN]", LongName = "<b>Internal Serial</b>:")]
InternalSerialName,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Interplay ID</b>:", LongName = "<b>Interplay ID</b>:")]
InterplayID,
[HumanReadable(ShortName = "[T:ISBN]", LongName = "<b>ISBN</b>:")]
ISBN,
@@ -3614,14 +3631,18 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:KID]", LongName = "<b>Konami ID</b>:")]
KonamiID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Logs Link</b>:", LongName = "<b>Logs Link</b>:")]
LogsLink,
[HumanReadable(ShortName = "[T:LAID]", LongName = "<b>Lucas Arts ID</b>:")]
LucasArtsID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Microsoft ID</b>:", LongName = "<b>Microsoft ID</b>:")]
MicrosoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Multisession</b>:", LongName = "<b>Multisession</b>:")]
Multisession,
@@ -3643,7 +3664,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:P]", LongName = "<b>Patches</b>:")]
Patches,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "PC/Mac Hybrid", LongName = "PC/Mac Hybrid")]
PCMacHybrid,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>PFI</b>:", LongName = "<b>PFI</b>:")]
PFIHash,
@@ -3653,16 +3679,25 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:PCID]", LongName = "<b>Pony Canyon ID</b>:")]
PonyCanyonID,
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "[T:PT2]", LongName = "<b>Postgap type</b>: Form 2")]
PostgapType,
[HumanReadable(ShortName = "[T:PPN]", LongName = "<b>PPN</b>:")]
PPN,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag for some systems yet
[HumanReadable(ShortName = "<b>Protection</b>:", LongName = "<b>Protection</b>:")]
Protection,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring non-zero data start</b>:", LongName = "<b>Ring non-zero data start</b>:")]
RingNonZeroDataStart,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring Perfect Audio Offset</b>:", LongName = "<b>Ring Perfect Audio Offset</b>:")]
RingPerfectAudioOffset,
[HumanReadable(ShortName = "[T:RD]", LongName = "<b>Rolling Demos</b>:")]
RollingDemos,
@@ -3678,15 +3713,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:S]", LongName = "<b>Series</b>:")]
Series,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Sierra ID</b>:", LongName = "<b>Sierra ID</b>:")]
SierraID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS</b>:", LongName = "<b>SS</b>:")]
SSHash,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS version</b>:", LongName = "<b>SS version</b>:")]
SSVersion,
@@ -3699,7 +3734,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:UID]", LongName = "<b>Ubisoft ID</b>:")]
UbisoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Universal Hash (SHA-1)</b>:", LongName = "<b>Universal Hash (SHA-1)</b>:")]
UniversalHash,
@@ -3715,14 +3750,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:VOL]", LongName = "<b>Volume Label</b>:")]
VolumeLabel,
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "[T:VCD]", LongName = "<b>V-CD</b>")]
VCD,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XeMID</b>:", LongName = "<b>XeMID</b>:")]
XeMID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XMID</b>:", LongName = "<b>XMID</b>:")]
XMID,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
@@ -73,6 +72,16 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
Dictionary<string, string>? artifacts = null;
if (this.Artifacts != null)
{
artifacts = new Dictionary<string, string>();
foreach (var kvp in this.Artifacts)
{
artifacts[kvp.Key] = kvp.Value;
}
}
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
@@ -90,7 +99,7 @@ namespace SabreTools.RedumpLib.Data
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Artifacts = artifacts,
};
}
}
@@ -101,16 +110,16 @@ namespace SabreTools.RedumpLib.Data
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_system", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_media", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_title", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
@@ -122,15 +131,15 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_category", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_region", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_languages", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[]? Languages { get; set; }
@@ -147,7 +156,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma1", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -162,7 +171,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma2", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -177,7 +186,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma3", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -186,7 +195,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma4", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -233,6 +242,26 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
Dictionary<SiteCode, string>? commentsSpecialFields = null;
if (this.CommentsSpecialFields != null)
{
commentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.CommentsSpecialFields)
{
commentsSpecialFields[kvp.Key] = kvp.Value;
}
}
Dictionary<SiteCode, string>? contentsSpecialFields = null;
if (this.ContentsSpecialFields != null)
{
contentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.ContentsSpecialFields)
{
contentsSpecialFields[kvp.Key] = kvp.Value;
}
}
return new CommonDiscInfoSection
{
System = this.System,
@@ -271,9 +300,9 @@ namespace SabreTools.RedumpLib.Data
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
CommentsSpecialFields = commentsSpecialFields,
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
ContentsSpecialFields = contentsSpecialFields,
};
}
}
@@ -414,13 +443,23 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
Dictionary<string, List<string>?>? fullProtections = null;
if (this.FullProtections != null)
{
fullProtections = new Dictionary<string, List<string>?>();
foreach (var kvp in this.FullProtections)
{
fullProtections[kvp.Key] = kvp.Value;
}
}
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
FullProtections = fullProtections,
SecuROMData = this.SecuROMData,
};
}
@@ -531,31 +570,35 @@ namespace SabreTools.RedumpLib.Data
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump -- Only used with MPF
[JsonProperty(PropertyName = "d_frontend_version", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_frontend_version", DefaultValueHandling = DefaultValueHandling.Include)]
public string? FrontendVersion { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_program", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_date", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_params", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingParameters { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_model", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_firmware", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_reported_disc_type", DefaultValueHandling = DefaultValueHandling.Include)]
public string? ReportedDiscType { get; set; }
// Name not defined by Redump -- Only used with Redumper
@@ -569,6 +612,7 @@ namespace SabreTools.RedumpLib.Data
FrontendVersion = this.FrontendVersion,
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
DumpingParameters = this.DumpingParameters,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,

View File

@@ -38,6 +38,7 @@
public const string FrontendVersionField = "Frontend Version";
public const string DumpingProgramField = "Dumping Program";
public const string DumpingDateField = "Date";
public const string DumpingParametersField = "Parameters";
public const string DumpingDriveManufacturer = "Manufacturer";
public const string DumpingDriveModel = "Model";
public const string DumpingDriveFirmware = "Firmware";

View File

@@ -0,0 +1,187 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
{
/// <summary>
/// Contains logic for dealing with downloads
/// </summary>
public class Downloader
{
#region Properties
/// <summary>
/// Which Redump feature is being used
/// </summary>
public Feature Feature { get; set; }
/// <summary>
/// Minimum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MinimumId { get; set; }
/// <summary>
/// Maximum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MaximumId { get; set; }
/// <summary>
/// Quicksearch text for downloading
/// </summary>
public string? QueryString { get; set; }
/// <summary>
/// Directory to save all outputted files to
/// </summary>
public string? OutDir { get; set; }
/// <summary>
/// Use named subfolders for discrete download sets (Feature.Packs only)
/// </summary>
public bool UseSubfolders { get; set; }
/// <summary>
/// Use the last modified page to try to grab all new discs (Feature.Site, Feature.WIP only)
/// </summary>
public bool OnlyNew { get; set; }
/// <summary>
/// Only list the page IDs but don't download
/// </summary>
public bool OnlyList { get; set; }
/// <summary>
/// Don't replace forward slashes with `-` in queries
/// </summary>
public bool NoSlash { get; set; }
/// <summary>
/// Force continuing downloads until user cancels or pages run out
/// </summary>
public bool Force { get; set; }
/// <summary>
/// Redump username
/// </summary>
public string? Username { get; set; }
/// <summary>
/// Redump password
/// </summary>
public string? Password { get; set; }
#endregion
#region Private Vars
/// <summary>
/// Current HTTP rc to use
/// </summary>
private readonly RedumpClient _client;
#endregion
/// <summary>
/// Constructor
/// </summary>
public Downloader()
{
_client = new RedumpClient();
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="client">Preconfigured client</param>
public Downloader(RedumpClient client)
{
_client = client;
}
/// <summary>
/// Run the downloads that should go
/// </summary>
/// <returns>List of IDs that were processed on success, empty on error</returns>
/// <remarks>Packs will never return anything other than empty</remarks>
public async Task<List<int>> Download()
{
// Login to Redump, if possible
if (!_client.LoggedIn)
await _client.Login(Username ?? string.Empty, Password ?? string.Empty);
// Create output list
List<int> processedIds = [];
switch (Feature)
{
case Feature.Packs:
await Packs.DownloadPacks(_client, OutDir, UseSubfolders);
break;
case Feature.Quicksearch:
processedIds = await ProcessQuicksearch();
break;
case Feature.Site:
processedIds = await ProcessSite();
break;
case Feature.User:
processedIds = await ProcessUser();
break;
case Feature.WIP:
processedIds = await ProcessWIP();
break;
default:
return [];
}
return processedIds;
}
/// <summary>
/// Process the Quicksearch feature
/// </summary>
private async Task<List<int>> ProcessQuicksearch()
{
if (OnlyList)
return await Search.ListSearchResults(_client, QueryString, NoSlash);
else
return await Search.DownloadSearchResults(_client, QueryString, OutDir, NoSlash);
}
/// <summary>
/// Process the Site feature
/// </summary>
private async Task<List<int>> ProcessSite()
{
if (OnlyNew)
return await Discs.DownloadLastModified(_client, OutDir, Force);
else
return await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
}
/// <summary>
/// Process the User feature
/// </summary>
private async Task<List<int>> ProcessUser()
{
if (OnlyList)
return await User.ListUser(_client, Username);
else if (OnlyNew)
return await User.DownloadUserLastModified(_client, Username, OutDir);
else
return await User.DownloadUser(_client, Username, OutDir);
}
/// <summary>
/// Process the WIP feature
/// </summary>
private async Task<List<int>> ProcessWIP()
{
if (OnlyNew)
return await WIP.DownloadLastSubmitted(_client, OutDir);
else
return await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
}
}
}

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

View File

@@ -0,0 +1,791 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib
{
public static class Formatter
{
/// <summary>
/// Ordered set of comment codes for output
/// </summary>
internal static readonly SiteCode[] OrderedCommentCodes =
[
// Submission Info
SiteCode.LogsLink,
// Identifying Info
SiteCode.AlternativeTitle,
SiteCode.AlternativeForeignTitle,
SiteCode.DiscTitleNonLatin,
SiteCode.EditionNonLatin,
SiteCode.InternalName,
SiteCode.InternalSerialName,
SiteCode.VolumeLabel,
SiteCode.HighSierraVolumeDescriptor,
SiteCode.Multisession,
SiteCode.UniversalHash,
SiteCode.RingNonZeroDataStart,
SiteCode.RingPerfectAudioOffset,
SiteCode.XMID,
SiteCode.XeMID,
SiteCode.DMIHash,
SiteCode.PFIHash,
SiteCode.SSHash,
SiteCode.SSVersion,
SiteCode.Filename,
SiteCode.Protection,
SiteCode.BBFCRegistrationNumber,
SiteCode.DiscHologramID,
SiteCode.DNASDiscID,
SiteCode.ISBN,
SiteCode.ISSN,
SiteCode.PPN,
SiteCode.VFCCode,
SiteCode.CompatibleOS,
SiteCode.Genre,
SiteCode.Series,
SiteCode.PostgapType,
SiteCode.VCD,
// Publisher / Company IDs
SiteCode.AcclaimID,
SiteCode.ActivisionID,
SiteCode.BandaiID,
SiteCode.BethesdaID,
SiteCode.CDProjektID,
SiteCode.EidosID,
SiteCode.ElectronicArtsID,
SiteCode.FoxInteractiveID,
SiteCode.GTInteractiveID,
SiteCode.InterplayID,
SiteCode.JASRACID,
SiteCode.KingRecordsID,
SiteCode.KoeiID,
SiteCode.KonamiID,
SiteCode.LucasArtsID,
SiteCode.MicrosoftID,
SiteCode.NaganoID,
SiteCode.NamcoID,
SiteCode.NipponIchiSoftwareID,
SiteCode.OriginID,
SiteCode.PonyCanyonID,
SiteCode.SegaID,
SiteCode.SelenID,
SiteCode.SierraID,
SiteCode.TaitoID,
SiteCode.UbisoftID,
SiteCode.ValveID,
// Standardized Comments
SiteCode.PCMacHybrid,
];
/// <summary>
/// Ordered set of content codes for output
/// </summary>
internal static readonly SiteCode[] OrderedContentCodes =
[
// Applications
SiteCode.Applications,
// Games
SiteCode.Games,
SiteCode.NetYarozeGames,
// Demos
SiteCode.PlayableDemos,
SiteCode.RollingDemos,
SiteCode.TechDemos,
// Video
SiteCode.GameFootage,
SiteCode.Videos,
// Miscellaneous
SiteCode.Patches,
SiteCode.Savegames,
SiteCode.Extras,
];
/// <summary>
/// Format the output data in a human readable way
/// </summary>
/// <param name="info">Information object that should contain normalized values</param>
/// <param name="enableRedumpCompatibility">True to enable Redump compatiblity, false otherwise</param>
/// <returns>String representing each line of an output file, null on error</returns>
public static string? FormatOutputData(SubmissionInfo? info, bool enableRedumpCompatibility, out string? status)
{
// Check to see if the inputs are valid
if (info == null)
{
status = "Submission information was missing";
return null;
}
try
{
// Create the string builder for output
var output = new StringBuilder();
// Preamble for submission
output.AppendLine("Users who wish to submit this information to Redump must ensure that all of the fields below are accurate for the exact media they have.");
output.AppendLine("Please double-check to ensure that there are no fields that need verification, such as the version or copy protection.");
output.AppendLine("If there are no fields in need of verification or all fields are accurate, this preamble can be removed before submission.");
output.AppendLine();
// Common Disc Info section
FormatOutputData(output,
info.CommonDiscInfo,
info.SizeAndChecksums,
info.TracksAndWriteOffsets,
info.FullyMatchedID,
info.PartiallyMatchedIDs);
output.AppendLine();
// Version and Editions section
FormatOutputData(output, info.VersionAndEditions);
output.AppendLine();
// EDC section
FormatOutputData(output, info.EDC, info.CommonDiscInfo?.System);
output.AppendLine();
// Extras section
FormatOutputData(output, info.Extras);
output.AppendLine();
// Copy Protection section
FormatOutputData(output, info.CopyProtection, info.CommonDiscInfo?.System);
output.AppendLine();
// Tracks and Write Offsets section
if (!string.IsNullOrEmpty(info.TracksAndWriteOffsets?.ClrMameProData))
{
FormatOutputData(output, info.TracksAndWriteOffsets!);
output.AppendLine();
}
// Size & Checksum section
else
{
FormatOutputData(output,
info.SizeAndChecksums,
info.CommonDiscInfo?.Media.ToMediaType(),
info.CommonDiscInfo?.System,
enableRedumpCompatibility);
output.AppendLine();
}
// Dumping Info section
FormatOutputData(output, info.DumpingInfo);
status = "Formatting complete!";
// Make sure there aren't any instances of two blank lines in a row
return RemoveConsecutiveEmptyLines(output.ToString());
}
catch (Exception ex)
{
status = $"Error formatting submission info: {ex}";
return null;
}
}
/// <summary>
/// Process any fields that have to be combined
/// </summary>
/// <param name="info">Information object to normalize</param>
public static void ProcessSpecialFields(SubmissionInfo info)
{
// If there is no submission info
if (info?.CommonDiscInfo == null)
return;
// Process the comments field
if (info.CommonDiscInfo.CommentsSpecialFields != null && info.CommonDiscInfo.CommentsSpecialFields.Count > 0)
{
// If the field is missing, add an empty one to fill in
info.CommonDiscInfo.Comments ??= string.Empty;
// Add all special fields before any comments
var orderedTags = OrderCommentTags(info.CommonDiscInfo.CommentsSpecialFields);
var formattedTags = Array.ConvertAll(orderedTags, kvp => FormatSiteTag(kvp.Key, kvp.Value));
info.CommonDiscInfo.Comments = string.Join("\n", formattedTags) + "\n" + info.CommonDiscInfo.Comments;
// Normalize the assembled string
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Replace("\r\n", "\n");
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Replace("\n\n", "\n");
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.CommentsSpecialFields = null;
}
// Process the contents field
if (info.CommonDiscInfo.ContentsSpecialFields != null && info.CommonDiscInfo.ContentsSpecialFields.Count > 0)
{
// If the field is missing, add an empty one to fill in
info.CommonDiscInfo.Contents ??= string.Empty;
// Add all special fields before any contents
var orderedTags = OrderContentTags(info.CommonDiscInfo.ContentsSpecialFields);
var formattedTags = Array.ConvertAll(orderedTags, kvp => FormatSiteTag(kvp.Key, kvp.Value));
info.CommonDiscInfo.Contents = string.Join("\n", formattedTags) + "\n" + info.CommonDiscInfo.Contents;
// Normalize the assembled string
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Replace("\r\n", "\n");
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Replace("\n\n", "\n");
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.ContentsSpecialFields = null;
}
}
/// <summary>
/// Format a CommonDiscInfoSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
CommonDiscInfoSection? section,
SizeAndChecksumsSection? sac,
TracksAndWriteOffsetsSection? tawo,
int? fullyMatchedID,
List<int>? partiallyMatchedIDs)
{
// Sony-printed discs have layers in the opposite order
var system = section?.System;
bool reverseOrder = system.HasReversedRingcodes();
output.AppendLine("Common Disc Info:");
AddIfExists(output, Template.TitleField, section?.Title, 1);
AddIfExists(output, Template.ForeignTitleField, section?.ForeignTitleNonLatin, 1);
AddIfExists(output, Template.DiscNumberField, section?.DiscNumberLetter, 1);
AddIfExists(output, Template.DiscTitleField, section?.DiscTitle, 1);
AddIfExists(output, Template.SystemField, section?.System.LongName(), 1);
AddIfExists(output, Template.MediaTypeField, GetFixedMediaType(
section?.Media.ToMediaType(),
sac?.PICIdentifier,
sac?.Size,
sac?.Layerbreak,
sac?.Layerbreak2,
sac?.Layerbreak3),
1);
AddIfExists(output, Template.CategoryField, section?.Category.LongName(), 1);
AddIfExists(output, Template.FullyMatchingIDField, fullyMatchedID?.ToString(), 1);
AddIfExists(output, Template.PartiallyMatchingIDsField, partiallyMatchedIDs, 1);
AddIfExists(output, Template.RegionField, section?.Region.LongName() ?? "SPACE! (CHANGE THIS)", 1);
AddIfExists(output, Template.LanguagesField,
Array.ConvertAll(section?.Languages ?? [null], l => l.LongName() ?? "ADD LANGUAGES HERE (ONLY IF YOU TESTED)"), 1);
AddIfExists(output, Template.PlaystationLanguageSelectionViaField,
Array.ConvertAll(section?.LanguageSelection ?? [], l => l.LongName()), 1);
AddIfExists(output, Template.DiscSerialField, section?.Serial, 1);
output.AppendLine();
// All ringcode information goes in an indented area
output.AppendLine("\tRingcode Information:");
output.AppendLine();
// If we have a triple-layer disc
if (sac?.Layerbreak3 != default && sac?.Layerbreak3 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringRingField, section?.Layer2MasteringRing, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringSIDField, section?.Layer2MasteringSID, 0);
AddIfExists(output, "Layer 2 " + Template.ToolstampField, section?.Layer2ToolstampMasteringCode, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringRingField, section?.Layer3MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringSIDField, section?.Layer3MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.ToolstampField, section?.Layer3ToolstampMasteringCode, 0);
}
// If we have a triple-layer disc
else if (sac?.Layerbreak2 != default && sac?.Layerbreak2 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringRingField, section?.Layer2MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringSIDField, section?.Layer2MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.ToolstampField, section?.Layer2ToolstampMasteringCode, 0);
}
// If we have a dual-layer disc
else if (sac?.Layerbreak != default && sac?.Layerbreak != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
}
// If we have a single-layer disc
else
{
AddIfExists(output, "Data Side " + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, "Data Side " + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, "Data Side " + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Label Side " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Label Side " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Label Side " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
}
var offset = tawo?.OtherWriteOffsets;
if (int.TryParse(offset, out int i))
offset = i.ToString("+#;-#;0");
AddIfExists(output, Template.WriteOffsetField, offset, 0);
output.AppendLine();
AddIfExists(output, Template.BarcodeField, section?.Barcode, 1);
AddIfExists(output, Template.EXEDateBuildDate, section?.EXEDateBuildDate, 1);
AddIfExists(output, Template.ErrorCountField, section?.ErrorsCount, 1);
AddIfExists(output, Template.CommentsField, section?.Comments?.Trim(), 1);
AddIfExists(output, Template.ContentsField, section?.Contents?.Trim(), 1);
}
/// <summary>
/// Format a VersionAndEditionsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, VersionAndEditionsSection? section)
{
output.AppendLine("Version and Editions:");
AddIfExists(output, Template.VersionField, section?.Version, 1);
AddIfExists(output, Template.EditionField, section?.OtherEditions, 1);
}
/// <summary>
/// Format a EDCSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, EDCSection? section, RedumpSystem? system)
{
// Check the section can be added
if (system != RedumpSystem.SonyPlayStation)
return;
output.AppendLine("EDC:");
AddIfExists(output, Template.PlayStationEDCField, section?.EDC.LongName(), 1);
}
/// <summary>
/// Format a ExtrasSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, ExtrasSection? section)
{
// Optional sections have to exist to format
if (section == null)
return;
// Check the section can be added
if (section.PVD == null
&& section.PIC == null
&& section.BCA == null
&& section.SecuritySectorRanges == null)
{
return;
}
output.AppendLine("Extras:");
AddIfExists(output, Template.PVDField, section.PVD?.Trim(), 1);
AddIfExists(output, Template.PlayStation3WiiDiscKeyField, section.DiscKey, 1);
AddIfExists(output, Template.PlayStation3DiscIDField, section.DiscID, 1);
AddIfExists(output, Template.PICField, section.PIC, 1);
AddIfExists(output, Template.HeaderField, section.Header, 1);
AddIfExists(output, Template.GameCubeWiiBCAField, section.BCA, 1);
AddIfExists(output, Template.XBOXSSRanges, section.SecuritySectorRanges, 1);
}
/// <summary>
/// Format a ExtrasSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
CopyProtectionSection? section,
RedumpSystem? system)
{
// Optional sections have to exist to format
if (section == null)
return;
// Check the section can be added
if (string.IsNullOrEmpty(section.Protection)
&& (section.AntiModchip == null || section.AntiModchip == YesNo.NULL)
&& (section.LibCrypt == null || section.LibCrypt == YesNo.NULL)
&& string.IsNullOrEmpty(section.LibCryptData)
&& string.IsNullOrEmpty(section.SecuROMData))
{
return;
}
output.AppendLine("Copy Protection:");
if (system == RedumpSystem.SonyPlayStation)
{
AddIfExists(output, Template.PlayStationAntiModchipField, section.AntiModchip.LongName(), 1);
AddIfExists(output, Template.PlayStationLibCryptField, section.LibCrypt.LongName(), 1);
AddIfExists(output, Template.SubIntentionField, section.LibCryptData, 1);
}
AddIfExists(output, Template.CopyProtectionField, section.Protection, 1);
AddIfExists(output, Template.SubIntentionField, section.SecuROMData, 1);
}
/// <summary>
/// Format a TracksAndWriteOffsetsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, TracksAndWriteOffsetsSection section)
{
output.AppendLine("Tracks and Write Offsets:");
AddIfExists(output, Template.DATField, section.ClrMameProData + "\n", 1);
AddIfExists(output, Template.CuesheetField, section.Cuesheet, 1);
var offset = section.OtherWriteOffsets;
if (int.TryParse(offset, out int i))
offset = i.ToString("+#;-#;0");
AddIfExists(output, Template.WriteOffsetField, offset, 1);
}
/// <summary>
/// Format a SizeAndChecksumsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
SizeAndChecksumsSection? section,
MediaType? mediaType,
RedumpSystem? system,
bool enableRedumpCompatibility)
{
output.AppendLine("Size & Checksum:");
// Gross hack because of automatic layerbreaks in Redump
if (!enableRedumpCompatibility
|| (mediaType != MediaType.BluRay && system.IsXGD() == false))
{
AddIfExists(output, Template.LayerbreakField, section?.Layerbreak, 1);
}
AddIfExists(output, Template.SizeField, section?.Size.ToString(), 1);
AddIfExists(output, Template.CRC32Field, section?.CRC32, 1);
AddIfExists(output, Template.MD5Field, section?.MD5, 1);
AddIfExists(output, Template.SHA1Field, section?.SHA1, 1);
}
/// <summary>
/// Format a DumpingInfoSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, DumpingInfoSection? section)
{
output.AppendLine("Dumping Info:");
AddIfExists(output, Template.FrontendVersionField, section?.FrontendVersion, 1);
AddIfExists(output, Template.DumpingProgramField, section?.DumpingProgram, 1);
AddIfExists(output, Template.DumpingDateField, section?.DumpingDate, 1);
AddIfExists(output, Template.DumpingParametersField, section?.DumpingParameters, 1);
AddIfExists(output, Template.DumpingDriveManufacturer, section?.Manufacturer, 1);
AddIfExists(output, Template.DumpingDriveModel, section?.Model, 1);
AddIfExists(output, Template.DumpingDriveFirmware, section?.Firmware, 1);
AddIfExists(output, Template.ReportedDiscType, section?.ReportedDiscType, 1);
AddIfExists(output, Template.C2ErrorCountField, section?.C2ErrorsCount, 1);
}
#region Helpers
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, string? value, int indent)
{
// If there's no valid value to write
if (value == null)
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
// Skip fields that need to keep internal whitespace intact
if (key != "Primary Volume Descriptor (PVD)"
&& key != "Header"
&& key != "Cuesheet")
{
// Convert to tabs
#if NETCOREAPP
value = value.Replace("<tab>", "\t", StringComparison.OrdinalIgnoreCase);
#else
value = value.Replace("<tab>", "\t");
value = value.Replace("<TAB>", "\t");
value = value.Replace("<Tab>", "\t");
#endif
value = value.Replace(" ", "\t");
// Sanitize whitespace around tabs
value = Regex.Replace(value, @"\s*\t\s*", "\t", RegexOptions.Compiled);
}
// If the value contains a newline
value = value.Replace("\r\n", "\n");
if (value.Contains("\n"))
{
output.AppendLine(prefix + key + ":"); output.AppendLine();
string[] values = value.Split('\n');
foreach (string val in values)
output.AppendLine(val);
output.AppendLine();
}
// For all regular values
else
{
output.AppendLine(prefix + key + ": " + value);
}
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, string?[]? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Length == 0)
return;
AddIfExists(output, key, string.Join(", ", value), indent);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, long? value, int indent)
{
// If there's no valid value to write
if (value == null || value == default(long))
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
output.AppendLine(prefix + key + ": " + value);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, List<int>? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Count == 0)
return;
AddIfExists(output, key, string.Join(", ", [.. value.ConvertAll(o => o.ToString())]), indent);
}
/// <summary>
/// Format a single site tag to string
/// </summary>
/// <param name="code">Site tag to format</param>
/// <param name="value">String value to use</param>
/// <returns>String-formatted tag and value</returns>
internal static string FormatSiteTag(SiteCode code, string value)
{
// Do not format empty tags
if (value.Length == 0)
return string.Empty;
bool isMultiLine = code.IsMultiLine();
string line = $"{code.ShortName()}{(isMultiLine ? "\n" : " ")}";
// Special case for boolean fields
if (code.IsBoolean())
{
if (value != true.ToString())
return string.Empty;
return line.Trim();
}
return $"{line}{value}{(isMultiLine ? "\n" : string.Empty)}";
}
/// <summary>
/// Get the adjusted name of the media based on layers, if applicable
/// </summary>
/// <param name="mediaType">MediaType to get the proper name for</param>
/// <param name="picIdentifier">PIC identifier string (BD only)</param>
/// <param name="size">Size of the current media</param>
/// <param name="layerbreak">First layerbreak value, as applicable</param>
/// <param name="layerbreak2">Second layerbreak value, as applicable</param>
/// <param name="layerbreak3">Third layerbreak value, as applicable</param>
/// <returns>String representation of the media, including layer specification</returns>
internal static string? GetFixedMediaType(MediaType? mediaType, string? picIdentifier, long? size, long? layerbreak, long? layerbreak2, long? layerbreak3)
{
switch (mediaType)
{
case MediaType.DVD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-9";
else
return $"{mediaType.LongName()}-5";
case MediaType.BluRay:
if (layerbreak3 != default && layerbreak3 != default(long))
return $"{mediaType.LongName()}-128";
else if (layerbreak2 != default && layerbreak2 != default(long))
return $"{mediaType.LongName()}-100";
else if (layerbreak != default && layerbreak != default(long) && picIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long) && size > 53_687_063_712)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-50";
else if (picIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-33";
else if (size > 26_843_531_856)
return $"{mediaType.LongName()}-33";
else
return $"{mediaType.LongName()}-25";
case MediaType.HDDVD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-DL";
else
return $"{mediaType.LongName()}-SL";
case MediaType.UMD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-DL";
else
return $"{mediaType.LongName()}-SL";
default:
return mediaType.LongName();
}
}
/// <summary>
/// Order comment code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
internal static KeyValuePair<SiteCode, string>[] OrderCommentTags(Dictionary<SiteCode, string> tags)
{
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return [];
// Loop through the ordered set of codes and add if needed
var sorted = new List<KeyValuePair<SiteCode, string>>();
foreach (var code in OrderedCommentCodes)
{
// Only add if it exists
if (!tags.ContainsKey(code))
continue;
// Get the tag value
string value = tags[code];
if (value.Length == 0)
continue;
// Add to the set
sorted.Add(new KeyValuePair<SiteCode, string>(code, value));
}
return [.. sorted];
}
/// <summary>
/// Order content code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
internal static KeyValuePair<SiteCode, string>[] OrderContentTags(Dictionary<SiteCode, string> tags)
{
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return [];
// Loop through the ordered set of codes and add if needed
var sorted = new List<KeyValuePair<SiteCode, string>>();
foreach (var code in OrderedContentCodes)
{
// Only add if it exists
if (!tags.ContainsKey(code))
continue;
// Get the tag value
string value = tags[code];
if (value.Length == 0)
continue;
// Add to the set
sorted.Add(new KeyValuePair<SiteCode, string>(code, value));
}
return [.. sorted];
}
/// <summary>
/// Make sure there aren't any instances of two blank lines in a row
/// </summary>
internal static string RemoveConsecutiveEmptyLines(string str)
{
str = Regex.Replace(str, @"(\r\n){2,}", "\r\n\r\n");
return Regex.Replace(str, @"(\n){2,}", "\n\n");
}
#endregion
}
}

View File

@@ -3,7 +3,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
namespace SabreTools.RedumpLib
@@ -40,8 +39,13 @@ namespace SabreTools.RedumpLib
new StringBuilder(256) :
new StringBuilder(value.Length);
sb.Append(valueSpan.Take(index).ToArray());
HtmlDecode(valueSpan.Skip(index).ToArray(), ref sb);
char[] take = new char[index];
Array.Copy(valueSpan, take, index);
sb.Append(take);
char[] skip = new char[valueSpan.Length - index];
Array.Copy(valueSpan, index, skip, 0, skip.Length);
HtmlDecode(skip, ref sb);
return sb.ToString();
}
@@ -57,7 +61,8 @@ namespace SabreTools.RedumpLib
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
char[] inputSlice = input.Skip(i + 1).ToArray();
char[] inputSlice = new char[input.Length - (i + 1)];
Array.Copy(input, i + 1, inputSlice, 0, inputSlice.Length);
int semicolonPos = Array.IndexOf(inputSlice, ';');
int ampersandPos = Array.IndexOf(inputSlice, '&');
@@ -81,9 +86,13 @@ namespace SabreTools.RedumpLib
// &#xE5; --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
int offset = inputSlice[1] == 'x' || inputSlice[1] == 'X' ? 2 : 1;
char[] inputSliceNoPrefix = new char[entityLength - offset];
Array.Copy(inputSlice, offset, inputSliceNoPrefix, 0, inputSliceNoPrefix.Length);
bool parsedSuccessfully = inputSlice[1] == 'x' || inputSlice[1] == 'X'
? uint.TryParse(new string(inputSlice.Skip(2).Take(entityLength - 2).ToArray()), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSlice.Skip(1).Take(entityLength - 1).ToArray()), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
? uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
if (parsedSuccessfully)
{
@@ -112,7 +121,8 @@ namespace SabreTools.RedumpLib
}
else
{
char[] entity = inputSlice.Take(entityLength).ToArray();
char[] entity = new char[entityLength];
Array.Copy(inputSlice, entity, entityLength);
i = entityEndPosition; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
@@ -414,7 +424,10 @@ namespace SabreTools.RedumpLib
ulong key = BitConverter.ToUInt64(tableData, 0);
char value = (char)BitConverter.ToUInt16(tableData, sizeof(ulong));
dictionary[key] = value;
tableData = tableData.Skip((sizeof(ulong) + sizeof(char))).ToArray();
byte[] tempTableData = new byte[tableData.Length - (sizeof(ulong) + sizeof(char))];
Array.Copy(tableData, (sizeof(ulong) + sizeof(char)), tempTableData, 0, tempTableData.Length);
tableData = tempTableData;
}
return dictionary;
}

View File

@@ -0,0 +1,40 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0;netstandard2.0;netstandard2.1</TargetFrameworks>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.2</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="SabreTools.RedumpLib.Test" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MinAsyncBridge" Version="0.12.4" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))" />
<PackageReference Include="Net35.Actions" Version="1.4.0" Condition="$(TargetFramework.StartsWith(`net2`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.7.1" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -22,6 +21,14 @@ namespace SabreTools.RedumpLib
switch (info.CommonDiscInfo.Media)
{
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.BD25:
case DiscType.BD33:
case DiscType.BD50:
@@ -32,13 +39,13 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
@@ -46,14 +53,6 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD25;
break;
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.HDDVDSL:
case DiscType.HDDVDDL:
if (info.SizeAndChecksums.Layerbreak != default)
@@ -79,15 +78,11 @@ namespace SabreTools.RedumpLib
/// <summary>
/// List the disc IDs associated with a given quicksearch query
/// </summary>
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="filterForwardSlashes">True to filter forward slashes, false otherwise</param>
/// <returns>All disc IDs for the given query, null on error</returns>
#if NETFRAMEWORK
public async static Task<List<int>?> ListSearchResults(RedumpWebClient wc, string? query, bool filterForwardSlashes = true)
#else
public async static Task<List<int>?> ListSearchResults(RedumpHttpClient wc, string? query, bool filterForwardSlashes = true)
#endif
public async static Task<List<int>?> ListSearchResults(RedumpClient rc, string? query, bool filterForwardSlashes = true)
{
// If there is an invalid query
if (string.IsNullOrEmpty(query))
@@ -113,13 +108,7 @@ namespace SabreTools.RedumpLib
int pageNumber = 1;
while (true)
{
#if NET40
List<int> pageIds = await Task.Factory.StartNew(() => wc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++)));
#elif NETFRAMEWORK
List<int> pageIds = await Task.Run(() => wc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++)));
#else
List<int> pageIds = await wc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
#endif
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
@@ -137,102 +126,84 @@ namespace SabreTools.RedumpLib
/// <summary>
/// Validate a single track against Redump, if possible
/// </summary>
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="sha1">SHA-1 hash to check against</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
#if NETFRAMEWORK
public async static Task<(bool, List<int>?, string?)> ValidateSingleTrack(RedumpWebClient wc, SubmissionInfo info, string? sha1)
#else
public async static Task<(bool, List<int>?, string?)> ValidateSingleTrack(RedumpHttpClient wc, SubmissionInfo info, string? sha1)
#endif
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
{
// Get all matching IDs for the track
var newIds = await ListSearchResults(wc, sha1);
var newIds = await ListSearchResults(rc, sha1);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for track with SHA-1 of '{sha1}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for track with SHA-1 of '{sha1}'");
return newIds;
}
/// <summary>
/// Validate a universal hash against Redump, if possible
/// </summary>
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
#if NETFRAMEWORK
public async static Task<(bool, List<int>?, string?)> ValidateUniversalHash(RedumpWebClient wc, SubmissionInfo info)
#else
public async static Task<(bool, List<int>?, string?)> ValidateUniversalHash(RedumpHttpClient wc, SubmissionInfo info)
#endif
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
{
// If we don't have special fields
if (info.CommonDiscInfo?.CommentsSpecialFields == null)
return (false, null, "Universal hash was missing");
return null;
// If we don't have a universal hash
string? universalHash = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
if (string.IsNullOrEmpty(universalHash))
return (false, null, "Universal hash was missing");
return null;
// Format the universal hash for finding within the comments
string universalHashQuery = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
// Get all matching IDs for the hash
var newIds = await ListSearchResults(wc, universalHashQuery, filterForwardSlashes: false);
var newIds = await ListSearchResults(rc, universalHashQuery, filterForwardSlashes: false);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for universal hash of '{universalHash}'");
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for universal hash of '{universalHash}'");
return newIds;
}
/// <summary>
/// Validate that the current track count and remote track count match
/// </summary>
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="localCount">Local count of tracks for the current disc</param>
/// <returns>True if the track count matches, false otherwise</returns>
#if NETFRAMEWORK
public async static Task<bool> ValidateTrackCount(RedumpWebClient wc, int id, int localCount)
#else
public async static Task<bool> ValidateTrackCount(RedumpHttpClient wc, int id, int localCount)
#endif
public async static Task<bool> ValidateTrackCount(RedumpClient rc, int id, int localCount)
{
// If we can't pull the remote data, we can't match
#if NET40
string? discData = await Task.Factory.StartNew(() => wc.DownloadSingleSiteID(id));
#elif NETFRAMEWORK
string? discData = await Task.Run(() => wc.DownloadSingleSiteID(id));
#else
string? discData = await wc.DownloadSingleSiteID(id);
#endif
string? discData = await rc.DownloadSingleSiteID(id);
if (string.IsNullOrEmpty(discData))
return false;

View File

@@ -0,0 +1,43 @@
using System;
using System.Net;
#pragma warning disable SYSLIB0014 // 'WebClient.WebClient()' is obsolete
namespace SabreTools.RedumpLib.Web
{
internal class CookieWebClient : WebClient
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
private readonly CookieContainer _container = new();
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
public string? GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string? headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrEmpty(headerValue))
return null;
// Extract the filename from the value
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
{
webRequest.Timeout = 30 * 1000; // 30 seconds
webRequest.CookieContainer = _container;
}
return request;
}
}
}

View File

@@ -0,0 +1,21 @@
using System;
using System.Threading;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Helper class for delaying
/// </summary>
internal static class DelayHelper
{
/// <summary>
/// Delay a random amount of time up to 5 seconds
/// </summary>
public static void DelayRandom()
{
var r = new Random();
int delay = r.Next(0, 50);
Thread.Sleep(delay * 100);
}
}
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with disc pages
/// </summary>
public static class Discs
{
/// <summary>
/// Download the last modified disc pages, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="force">Force continuation of download</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
{
List<int> ids = [];
// Keep getting last modified pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the specified range of site disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn)
{
Console.WriteLine("Site download functionality is only available to Redump members");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleSiteID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

View File

@@ -0,0 +1,71 @@
using System;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with packs
/// </summary>
internal static class Packs
{
/// <summary>
/// Download premade packs
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacks(RedumpClient rc, string? outDir, bool useSubfolders)
{
var systems = (RedumpSystem[])Enum.GetValues(typeof(RedumpSystem));
await rc.DownloadPacks(Constants.PackCuesUrl, Array.FindAll(systems, s => s.HasCues()), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, Array.FindAll(systems, s => s.HasDat()), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, Array.FindAll(systems, s => s.HasDkeys()), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, Array.FindAll(systems, s => s.HasGdi()), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, Array.FindAll(systems, s => s.HasKeys()), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, Array.FindAll(systems, s => s.HasLsd()), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, Array.FindAll(systems, s => s.HasSbi()), "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
/// <summary>
/// Download premade packs for an individual system
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="system">RedumpSystem to get all possible packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacksForSystem(RedumpClient rc, RedumpSystem? system, string? outDir, bool useSubfolders)
{
if (system == null)
return false;
var systemAsArray = new RedumpSystem[] { system.Value };
if (system.HasCues())
await rc.DownloadPacks(Constants.PackCuesUrl, systemAsArray, "CUEs", outDir, useSubfolders ? "cue" : null);
if (system.HasDat())
await rc.DownloadPacks(Constants.PackDatfileUrl, systemAsArray, "DATs", outDir, useSubfolders ? "dat" : null);
if (system.HasDkeys())
await rc.DownloadPacks(Constants.PackDkeysUrl, systemAsArray, "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
if (system.HasGdi())
await rc.DownloadPacks(Constants.PackGdiUrl, systemAsArray, "GDIs", outDir, useSubfolders ? "gdi" : null);
if (system.HasKeys())
await rc.DownloadPacks(Constants.PackKeysUrl, systemAsArray, "KEYS", outDir, useSubfolders ? "keys" : null);
if (system.HasLsd())
await rc.DownloadPacks(Constants.PackLsdUrl, systemAsArray, "LSD", outDir, useSubfolders ? "lsd" : null);
if (system.HasSbi())
await rc.DownloadPacks(Constants.PackSbiUrl, systemAsArray, "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
}
}

View File

@@ -1,20 +1,20 @@
#if !NETFRAMEWORK
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
#if NETCOREAPP
using System.Net.Http;
using System.Net.Http.Headers;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
public class RedumpHttpClient : HttpClient
public class RedumpClient
{
#region Properties
@@ -28,14 +28,44 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Maximum retry count for any operation
/// </summary>
public int RetryCount { get; private set; } = 3;
/// <summary>
/// Internal client for interaction
/// </summary>
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
private CookieWebClient _internalClient;
#else
private HttpClient _internalClient;
#endif
#endregion
/// <summary>
/// Constructor
/// </summary>
public RedumpHttpClient()
: base(new HttpClientHandler { UseCookies = true })
public RedumpClient()
{
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
_internalClient = new CookieWebClient();
#else
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true }) { Timeout = TimeSpan.FromSeconds(30) };
#endif
}
/// <summary>
/// Constructor
/// </summary>
public RedumpClient(int retryCount) : this()
{
// Ensure there are a positive number of retries
if (retryCount <= 0)
retryCount = 3;
RetryCount = retryCount;
}
#region Credentials
@@ -43,22 +73,22 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string?)> ValidateCredentials(string username, string password)
public async static Task<bool?> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return (false, null);
return false;
// Try logging in with the supplied credentials otherwise
using RedumpHttpClient httpClient = new();
var redumpClient = new RedumpClient();
bool? loggedIn = await httpClient.Login(username, password);
bool? loggedIn = await redumpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
return true;
else if (loggedIn == false)
return (false, "Redump username and password denied!");
return false;
else
return (null, "An error occurred validating your credentials!");
return null;
}
/// <summary>
@@ -86,7 +116,11 @@ namespace SabreTools.RedumpLib.Web
}
// HTTP encode the password
#if NET20 || NET35 || NET40
password = Uri.EscapeUriString(password);
#else
password = WebUtility.UrlEncode(password);
#endif
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
@@ -94,25 +128,36 @@ namespace SabreTools.RedumpLib.Web
try
{
// Get the current token from the login page
var loginPage = await GetStringAsync(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
var loginPage = await DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage ?? string.Empty).Groups[1].Value;
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
// Construct the login request
_internalClient.Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
_internalClient.Encoding = Encoding.UTF8;
// Send the login request and get the result
string? responseContent = _internalClient.UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
#else
// Construct the login request
var postContent = new StringContent($"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0", Encoding.UTF8);
postContent.Headers.ContentType = MediaTypeHeaderValue.Parse("application/x-www-form-urlencoded");
// Send the login request and get the result
var response = await PostAsync(Constants.LoginUrl, postContent);
var response = await _internalClient.PostAsync(Constants.LoginUrl, postContent);
string? responseContent = null;
if (response?.Content != null)
responseContent = await response.Content.ReadAsStringAsync();
#endif
// An empty response indicates an error
if (string.IsNullOrEmpty(responseContent))
{
Console.WriteLine($"An error occurred while trying to log in on attempt {i}: No response");
continue;
}
// Explcit confirmation the login was wrong
if (responseContent.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
@@ -141,6 +186,108 @@ namespace SabreTools.RedumpLib.Web
#endregion
#region Generic Helpers
/// <summary>
/// Download from a URI to a byte array
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>Byte array from the URI, null on error</returns>
public async Task<byte[]?> DownloadData(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(uri));
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadData(uri));
#else
return await _internalClient.GetByteArrayAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
public async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
public async Task<string?> DownloadString(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
#endregion
#region Single Page Helpers
/// <summary>
@@ -152,8 +299,8 @@ namespace SabreTools.RedumpLib.Web
{
List<int> ids = [];
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -171,7 +318,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -197,15 +344,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
@@ -213,17 +362,18 @@ namespace SabreTools.RedumpLib.Web
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
ids.Add(id);
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
return false;
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -232,9 +382,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -244,7 +395,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
/// <summary>
@@ -256,8 +407,8 @@ namespace SabreTools.RedumpLib.Web
{
List<int> ids = [];
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -265,7 +416,7 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -291,19 +442,21 @@ namespace SabreTools.RedumpLib.Web
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
continue;
@@ -312,9 +465,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -324,7 +478,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
#endregion
@@ -341,7 +495,13 @@ namespace SabreTools.RedumpLib.Web
{
try
{
return await GetByteArrayAsync(string.Format(url, system.ShortName()));
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#else
return await _internalClient.GetByteArrayAsync(string.Format(url, system.ShortName()));
#endif
}
catch (Exception ex)
{
@@ -391,9 +551,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -429,9 +589,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -553,9 +713,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -591,9 +751,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -667,7 +827,7 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
@@ -675,7 +835,7 @@ namespace SabreTools.RedumpLib.Web
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -687,10 +847,10 @@ namespace SabreTools.RedumpLib.Web
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
packsDictionary.Add(system, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
@@ -707,13 +867,13 @@ namespace SabreTools.RedumpLib.Web
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
public async Task<bool> DownloadPacks(string url, RedumpSystem[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -725,7 +885,7 @@ namespace SabreTools.RedumpLib.Web
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
await DownloadSinglePack(url, system, outDir, subfolder);
}
@@ -734,56 +894,6 @@ namespace SabreTools.RedumpLib.Web
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string?> DownloadFile(string uri, string fileName)
{
// Make the call to get the file
var response = await GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="retries">Number of times to retry on error</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string?> DownloadString(string uri, int retries = 3)
{
// Only retry a positive number of times
if (retries <= 0)
return null;
for (int i = 0; i < retries; i++)
{
try
{
return await GetStringAsync(uri);
}
catch { }
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
@@ -819,5 +929,3 @@ namespace SabreTools.RedumpLib.Web
#endregion
}
}
#endif

View File

@@ -0,0 +1,102 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with searches
/// </summary>
internal static class Search
{
/// <summary>
/// List the disc IDs associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> ListSearchResults(RedumpClient rc, string? query, bool noSlash)
{
// If the query is invalid
if (string.IsNullOrEmpty(query))
return [];
List<int> ids = [];
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
/// <summary>
/// Download the disc pages associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> DownloadSearchResults(RedumpClient rc, string? query, string? outDir, bool noSlash)
{
List<int> ids = [];
// If the query is invalid
if (string.IsNullOrEmpty(query))
return ids;
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
}
}

View File

@@ -0,0 +1,110 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with users
/// </summary>
public static class User
{
/// <summary>
/// Download the disc pages associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUser(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the last modified disc pages associated with the given user, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting last modified user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// List the disc IDs associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> ListUser(RedumpClient rc, string? username)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with WIP queue
/// </summary>
public static class WIP
{
/// <summary>
/// Download the last submitted WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadLastSubmitted(RedumpClient rc, string? outDir)
{
return await rc.CheckSingleWIPPage(Constants.WipDumpsUrl, outDir, false);
}
/// <summary>
/// Download the specified range of WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn || !rc.IsStaff)
{
Console.WriteLine("WIP download functionality is only available to Redump moderators");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleWIPID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

View File

@@ -1,831 +0,0 @@
#if NETFRAMEWORK
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
public class RedumpWebClient : WebClient
{
private readonly CookieContainer m_container = new();
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
/// <returns></returns>
public string? GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrEmpty(headerValue))
return null;
// Extract the filename from the value
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
webRequest.CookieContainer = m_container;
return request;
}
/// <summary>
/// Validate supplied credentials
/// </summary>
public static (bool?, string?) ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
using RedumpWebClient wc = new();
bool? loggedIn = wc.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public bool? Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrEmpty(username) && string.IsNullOrEmpty(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrEmpty(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
#if NET20 || NET35 || NET40
password = Uri.EscapeUriString(password);
#else
password = WebUtility.UrlEncode(password);
#endif
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
Encoding = Encoding.UTF8;
var response = UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
if (response.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (response.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleSitePage(string url)
{
List<int> ids = [];
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleWIPPage(string url)
{
List<int> ids = [];
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public byte[]? DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return DownloadData(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadSinglePack(string url, RedumpSystem? system, string? outDir, string? subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
DownloadFile(string.Format(url, system.ShortName()), tempfile);
MoveOrDelete(tempfile, GetLastFilename(), outDir!, subfolder);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string? DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(6, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleSiteID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string? DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(6, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleWIPID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public Dictionary<RedumpSystem, byte[]> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string? newfile, string outDir, string? subfolder)
{
if (!string.IsNullOrEmpty(newfile))
{
if (!string.IsNullOrEmpty(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
else
{
File.Delete(tempfile);
}
}
#endregion
}
}
#endif

151
publish-nix.sh Executable file
View File

@@ -0,0 +1,151 @@
#! /bin/bash
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
a)
NO_ARCHIVE=true
;;
*)
echo "Invalid option provided"
exit 1
;;
esac
done
# Set the current directory as a variable
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net9.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib/SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
done
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]; then
# Create Tool archives
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/RedumpTool/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/RedumpTool/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip .
done
done
# Reset the directory
cd $BUILD_FOLDER
fi

135
publish-win.ps1 Normal file
View File

@@ -0,0 +1,135 @@
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
param(
[Parameter(Mandatory = $false)]
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
[Parameter(Mandatory = $false)]
[Alias("NoArchive")]
[switch]$NO_ARCHIVE
)
# Set the current directory as a variable
$BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net9.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib\SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
}
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent) {
# Create Tool archives
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip *
}
}
# Reset the directory
Set-Location -Path $PSScriptRoot
}