Compare commits

...

116 Commits
1.4.0 ... main

Author SHA1 Message Date
Matt Nadareski
30789237f1 Formatting cleanup 2026-02-02 09:14:27 -05:00
Deterous
0b309e1cb8 Return empty list for no ID matches (#14)
* Return empty list for no ID matches

* fix

* Deal with null
2026-02-02 09:02:39 -05:00
Matt Nadareski
e832723b40 Add Psion as non-redump system 2026-01-29 12:27:26 -05:00
Matt Nadareski
d5e013344e Bump version 2026-01-27 13:45:52 -05:00
Matt Nadareski
dfeb8cc269 Use TheoryData type in tests, cleanup 2026-01-26 23:48:23 -05:00
Matt Nadareski
a5190b0a9b Use new null checking syntax 2026-01-25 17:01:14 -05:00
Matt Nadareski
2077eb647a Framework gate pragma disable 2026-01-25 16:41:08 -05:00
Matt Nadareski
4c2303ff25 Add editorconfig, fix issues 2026-01-25 16:32:02 -05:00
Matt Nadareski
43ca04363c Fix whitespace 2026-01-24 09:47:43 -05:00
Matt Nadareski
41883cf4a3 Fix ordering to be alphabetical 2026-01-24 09:23:54 -05:00
HeroponRikiBestest
07b529e9e5 Add Steam AppID and DepotID tags (#13)
* Initial commit

* Fix format

* Fix naming

* Minor formatting fixes
2026-01-24 09:22:39 -05:00
Matt Nadareski
53fd163f82 Add 2K Games ID 2026-01-22 13:37:12 -05:00
Matt Nadareski
a2b9fd1095 Fix extension test 2026-01-19 00:00:57 -05:00
Matt Nadareski
063e9739f0 Add even more non-redump systems 2026-01-18 22:09:05 -05:00
Matt Nadareski
5193300c67 Add Sega Nu 1.1 and 2 as systems 2026-01-17 21:32:43 -05:00
Matt Nadareski
65e34e55cd Add raw cuesheet bytes 2025-12-10 08:39:21 -05:00
Matt Nadareski
f66c81bbf0 Format GHA definitions 2025-11-17 08:37:00 -05:00
Matt Nadareski
6b015f91d6 Fix one property group 2025-11-13 20:38:51 -05:00
Matt Nadareski
67ee824e1b Bump version 2025-11-13 08:15:21 -05:00
Matt Nadareski
d256187a0a Add support for .NET 10 2025-11-13 08:09:47 -05:00
Matt Nadareski
6589fec1bc Update rolling tag 2025-10-26 20:28:50 -04:00
Matt Nadareski
c6832a5c2d Bump version 2025-10-22 08:58:57 -04:00
Matt Nadareski
77d922a9bc Minor collection cleanup in tests 2025-10-22 08:50:18 -04:00
Matt Nadareski
7b590e9bba Separate out sections to new files for maintainability 2025-10-22 08:41:57 -04:00
Matt Nadareski
0968bfe786 Ensure adding IDs only after 2025-10-21 18:41:53 -04:00
Matt Nadareski
5acb6ee128 Minor fixes 2025-10-21 18:36:15 -04:00
Matt Nadareski
5dc9ef27ef Update gitignore 2025-10-21 18:32:30 -04:00
Matt Nadareski
d5ccc0c65c Simplify downloader creation 2025-10-21 18:29:04 -04:00
Matt Nadareski
0ef9c6f87a Add console writes on failure cases 2025-10-21 18:11:20 -04:00
Matt Nadareski
638630ccab Disallow some fields from being null 2025-10-21 17:44:15 -04:00
Matt Nadareski
6c3afd25d4 Formatting pass 2025-10-21 17:33:12 -04:00
Matt Nadareski
02572c75fc Make timeout configurable by implementer 2025-10-21 17:30:31 -04:00
Matt Nadareski
cb5b42166d Reduce tab-to-space size to 2 2025-10-16 08:02:09 -04:00
Matt Nadareski
97ea30228e Bump version 2025-10-12 09:00:39 -04:00
Deterous
675af1a619 Add XboxOne/SX TitleID (#12) 2025-10-12 08:56:09 -04:00
Matt Nadareski
28c3f54cc5 Update test packages 2025-10-07 16:21:47 -04:00
Matt Nadareski
51486950fa Bump version 2025-09-24 13:35:03 -04:00
Matt Nadareski
1b1ba94247 Remove reliance on Models library 2025-09-24 13:33:42 -04:00
Matt Nadareski
c4345d0c34 Bump version 2025-09-22 18:00:58 -04:00
Matt Nadareski
b88b877672 Update Nuget packages 2025-09-22 17:58:12 -04:00
Matt Nadareski
f0c5375ac5 There 2025-09-10 21:53:06 -04:00
Matt Nadareski
1f4cca7fa0 Bump version 2025-09-05 10:14:28 -04:00
Matt Nadareski
5161913d96 Update Nuget packages 2025-09-05 09:45:00 -04:00
Deterous
c10b21a485 Add Polymega system (#11)
* Add Polymega system

* Polymega detected by windows
2025-08-31 01:22:51 -04:00
Matt Nadareski
99adad582c Forgot this test update 2025-08-23 09:52:31 -04:00
Matt Nadareski
6956617386 Bump version 2025-08-23 09:48:43 -04:00
Matt Nadareski
b5522c5bb0 Add High Siera volume descriptor pseudo-tag 2025-08-23 09:48:07 -04:00
Matt Nadareski
889cedb87a Bump version 2025-07-24 09:01:21 -04:00
Matt Nadareski
eba722b23a Be consistent about end-of-file newlines 2025-07-24 08:59:57 -04:00
Matt Nadareski
5d77af7e97 Add .NET Standard 2.0 and 2.1 2025-07-24 08:55:29 -04:00
Matt Nadareski
d9cb9394e0 Update nuget packages 2025-07-24 08:48:37 -04:00
Matt Nadareski
dfb5ac762a Bump version 2025-07-21 12:08:21 -04:00
Matt Nadareski
2459165990 Add Interplay ID pseudo-tag 2025-07-20 20:40:59 -04:00
Matt Nadareski
e16ffcb78a Bump version 2025-07-11 12:26:54 -04:00
Matt Nadareski
da27675588 Add PC/Mac Hybrid pseudo-tag 2025-07-11 12:16:40 -04:00
Matt Nadareski
0021c7653c Bump version 2025-05-23 12:39:49 -04:00
Matt Nadareski
091a32b223 Add new non-tag site codes 2025-05-23 12:35:16 -04:00
Matt Nadareski
f29a50c3b0 Bump version 2025-05-01 10:54:09 -04:00
Matt Nadareski
edf6e12371 Update Nuget packages 2025-05-01 10:53:21 -04:00
Matt Nadareski
146b2780c5 Fix skipped multiline site codes 2025-05-01 10:51:26 -04:00
Matt Nadareski
60ce7cbfa0 Bump version 2025-04-30 20:51:59 -04:00
Deterous
39fe46a162 Remove obsolete PhilipsCDiDigitalVideo system (#10)
* Remove obsolete PhilipsCDiDigitalVideo system

* Delete obsolete system
2025-04-17 08:04:02 -04:00
Matt Nadareski
a8674a21e4 Add 30 second timeout to web operations 2025-04-13 21:20:00 -04:00
Matt Nadareski
65f2d53a3f Fix how conditions are used for references 2025-02-25 21:16:16 -05:00
Matt Nadareski
0bc869543a Bump version 2024-12-31 21:10:46 -05:00
Matt Nadareski
aa7d513d2c Add Ring Perfect Audio Offset pseudo-tag 2024-12-31 21:09:11 -05:00
Matt Nadareski
3d35129529 Update copyright 2024-12-30 21:27:26 -05:00
Matt Nadareski
ec563938ba Remove unnecessary action step 2024-12-30 21:26:16 -05:00
Matt Nadareski
f0f3a1a194 Bump version 2024-12-28 13:52:24 -05:00
Deterous
55f5262198 Add new Protection pseudo site code (#9)
* Add Protection pseudo site tag

* Use new sitecode in redumplib
2024-12-27 12:33:35 -05:00
Matt Nadareski
1d247b1f6f Use string comparison on tab replacement when possible 2024-12-25 22:13:26 -05:00
Matt Nadareski
32c57736ae Duplicate write offset field for convenience (fixes #8) 2024-12-25 22:03:55 -05:00
Deterous
8ab312ba8b Convert <Tab> (#7) 2024-12-24 21:07:51 -05:00
Matt Nadareski
3ea01ca933 Ensure .NET versions are installed for testing 2024-12-19 10:52:22 -05:00
Matt Nadareski
27d99f7429 Bump version 2024-12-16 14:35:02 -05:00
Matt Nadareski
8b147f2041 Change empty language list message 2024-12-16 14:33:22 -05:00
Matt Nadareski
9c7a143d52 Add to publish scripts, not rolling build 2024-12-16 14:26:55 -05:00
Matt Nadareski
30bbef7bba Add RedumpTool as a non-building component 2024-12-16 14:23:43 -05:00
Matt Nadareski
17da564b00 Fix old .NET support 2024-12-16 14:22:07 -05:00
Matt Nadareski
073176cccb Update Models to 1.5.8 2024-12-16 14:21:53 -05:00
Matt Nadareski
0434e63e34 Allow symbols to be packed 2024-12-16 14:21:34 -05:00
Matt Nadareski
2b75eb44cd Use publish script and update README 2024-12-06 11:34:34 -05:00
Matt Nadareski
10eecc866e Bump version 2024-12-05 22:11:48 -05:00
Matt Nadareski
84fa2f93ea Fix consecutive empty line logic 2024-12-05 21:15:26 -05:00
Matt Nadareski
5a92c0fc98 Bump version 2024-12-01 22:58:47 -05:00
Matt Nadareski
4ffc1b3160 Fix multi-newline formatting, add tests 2024-12-01 22:44:12 -05:00
Matt Nadareski
ffa8f2b16e Update ToDiscType and add tests 2024-12-01 22:31:19 -05:00
Matt Nadareski
70e3e074cc Update some extensions, update tests 2024-12-01 22:14:49 -05:00
Matt Nadareski
4858b4e459 None of these are TODOs on my part 2024-12-01 21:32:45 -05:00
Matt Nadareski
9495cd32c7 Handle some TODO items 2024-12-01 21:31:58 -05:00
Matt Nadareski
071571870e Add ToYesNo tests 2024-12-01 21:20:47 -05:00
Matt Nadareski
f03cd40181 Use automatic system name mapping 2024-12-01 21:14:08 -05:00
Matt Nadareski
ea51726645 Fill out more tests 2024-12-01 21:09:15 -05:00
Matt Nadareski
f0633d5aa7 Framework only matters for executable 2024-11-30 21:39:44 -05:00
Matt Nadareski
4c076aec0c Update packages 2024-11-30 21:38:41 -05:00
Matt Nadareski
2ec9d6a4a0 Use more targeted library for old .NET 2024-11-18 19:53:44 -05:00
Matt Nadareski
415b488005 Bump version 2024-11-14 22:23:20 -05:00
Matt Nadareski
5d300c9975 Make download helpers public for ease 2024-11-14 22:22:52 -05:00
Matt Nadareski
304236774f Bump version 2024-11-13 01:54:40 -05:00
Matt Nadareski
9924289c48 Fix casting issues 2024-11-13 01:54:24 -05:00
Matt Nadareski
240eb74ead Bump version 2024-11-13 01:30:42 -05:00
Matt Nadareski
a64b109d2c Remove unncessary Linq usage 2024-11-13 01:29:36 -05:00
Matt Nadareski
3e0f9b5410 Add .NET 9 to target frameworks 2024-11-13 00:59:20 -05:00
Matt Nadareski
668be418ac Bump version 2024-10-18 12:30:30 -04:00
Matt Nadareski
7d184a634e Always return ID list, if possible 2024-10-18 12:26:36 -04:00
Matt Nadareski
67aed0899d Don't null foreign title if missing 2024-10-18 11:58:29 -04:00
Matt Nadareski
9fbaf1a187 Bump version 2024-10-04 01:42:58 -04:00
Matt Nadareski
fe8686a2bb Allow forward slashes in queries sometimes 2024-10-04 01:41:05 -04:00
Matt Nadareski
652270c8c7 Add publish scripts 2024-10-01 13:56:40 -04:00
Matt Nadareski
905d8a94fb Bump version 2024-10-01 13:55:33 -04:00
Matt Nadareski
3ee8416695 Remove unnecessary tuples 2024-10-01 13:53:03 -04:00
Matt Nadareski
49fa06da55 Remove threading bridge package (unused) 2024-10-01 04:27:31 -04:00
Matt Nadareski
70e29afd89 Remove Linq requirement from old .NET 2024-10-01 04:25:35 -04:00
Matt Nadareski
2a402a53db Remove ValueTuple packages (usused) 2024-10-01 03:21:07 -04:00
Matt Nadareski
66bb3b75b2 Bump version 2024-07-24 11:05:25 -04:00
Matt Nadareski
9d7d46673a Fix deserializing submission from file 2024-07-23 22:18:06 -04:00
67 changed files with 7744 additions and 3846 deletions

167
.editorconfig Normal file
View File

@@ -0,0 +1,167 @@
# top-most EditorConfig file
root = true
# C# files
[*.cs]
# Indentation and spacing
charset = utf-8
indent_size = 4
indent_style = space
tab_width = 4
trim_trailing_whitespace = true
# New line preferences
end_of_line = lf
insert_final_newline = true
max_line_length = unset
# using directive preferences
csharp_using_directive_placement = outside_namespace
dotnet_diagnostic.IDE0005.severity = error
# Code-block preferences
csharp_style_namespace_declarations = block_scoped
csharp_style_prefer_method_group_conversion = true
csharp_style_prefer_top_level_statements = false
# Expression-level preferences
csharp_prefer_simple_default_expression = true
csharp_style_inlined_variable_declaration = true
csharp_style_unused_value_assignment_preference = discard_variable
csharp_style_unused_value_expression_statement_preference = discard_variable
dotnet_diagnostic.IDE0001.severity = warning
dotnet_diagnostic.IDE0002.severity = warning
dotnet_diagnostic.IDE0004.severity = warning
dotnet_diagnostic.IDE0010.severity = error
dotnet_diagnostic.IDE0051.severity = warning
dotnet_diagnostic.IDE0052.severity = warning
dotnet_diagnostic.IDE0072.severity = warning
dotnet_diagnostic.IDE0080.severity = warning
dotnet_diagnostic.IDE0100.severity = error
dotnet_diagnostic.IDE0110.severity = error
dotnet_diagnostic.IDE0120.severity = warning
dotnet_diagnostic.IDE0121.severity = warning
dotnet_diagnostic.IDE0240.severity = error
dotnet_diagnostic.IDE0241.severity = error
dotnet_style_coalesce_expression = true
dotnet_style_namespace_match_folder = false
dotnet_style_null_propagation = true
dotnet_style_prefer_auto_properties = true
dotnet_style_prefer_collection_expression = when_types_loosely_match
dotnet_style_prefer_is_null_check_over_reference_equality_method = true
dotnet_style_prefer_compound_assignment = true
csharp_style_prefer_simple_property_accessors = true
dotnet_style_prefer_simplified_interpolation = true
dotnet_style_prefer_simplified_boolean_expressions = true
csharp_style_prefer_unbound_generic_type_in_nameof = true
# Field preferences
dotnet_diagnostic.IDE0044.severity = warning
dotnet_style_readonly_field = true
# Language keyword vs. framework types preferences
dotnet_diagnostic.IDE0049.severity = error
dotnet_style_predefined_type_for_locals_parameters_members = true
dotnet_style_predefined_type_for_member_access = true
# Modifier preferences
csharp_prefer_static_local_function = true
csharp_style_prefer_readonly_struct = true
dotnet_diagnostic.IDE0036.severity = warning
dotnet_diagnostic.IDE0040.severity = error
dotnet_diagnostic.IDE0380.severity = error
dotnet_style_require_accessibility_modifiers = always
# New-line preferences
dotnet_diagnostic.IDE2000.severity = warning
dotnet_diagnostic.IDE2002.severity = warning
dotnet_diagnostic.IDE2003.severity = warning
dotnet_diagnostic.IDE2004.severity = warning
dotnet_diagnostic.IDE2005.severity = warning
dotnet_diagnostic.IDE2006.severity = warning
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = false
csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = false
csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = false
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = false
dotnet_style_allow_multiple_blank_lines_experimental = false
dotnet_style_allow_statement_immediately_after_block_experimental = false
# Null-checking preferences
csharp_style_conditional_delegate_call = true
# Parameter preferences
dotnet_code_quality_unused_parameters = all
dotnet_diagnostic.IDE0280.severity = error
# Parentheses preferences
dotnet_diagnostic.IDE0047.severity = warning
dotnet_diagnostic.IDE0048.severity = warning
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
dotnet_style_parentheses_in_other_operators = always_for_clarity
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
# Pattern-matching preferences
dotnet_diagnostic.IDE0019.severity = warning
dotnet_diagnostic.IDE0020.severity = warning
dotnet_diagnostic.IDE0038.severity = warning
dotnet_diagnostic.IDE0066.severity = none
dotnet_diagnostic.IDE0083.severity = warning
dotnet_diagnostic.IDE0260.severity = warning
csharp_style_pattern_matching_over_as_with_null_check = true
csharp_style_pattern_matching_over_is_with_cast_check = true
csharp_style_prefer_not_pattern = true
csharp_style_prefer_pattern_matching = true
# this. and Me. preferences
dotnet_style_qualification_for_event = false
dotnet_style_qualification_for_field = false
dotnet_style_qualification_for_method = false
dotnet_style_qualification_for_property = false
# var preferences
csharp_style_var_for_built_in_types = false
csharp_style_var_when_type_is_apparent = true
# .NET formatting options
dotnet_separate_import_directive_groups = false
dotnet_sort_system_directives_first = true
# C# formatting options
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_case_contents_when_block = false
csharp_indent_labels = one_less_than_current
csharp_indent_switch_labels = true
csharp_new_line_before_catch = true
csharp_new_line_before_else = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_open_brace = all
csharp_new_line_between_query_expression_clauses = true
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = true
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false

48
.github/workflows/build_and_test.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: Build and Test
on:
push:
branches: ["main"]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: |
8.0.x
9.0.x
10.0.x
- name: Run tests
run: dotnet test
- name: Run publish script
run: ./publish-nix.sh -d
- name: Update rolling tag
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git tag -f rolling
git push origin :refs/tags/rolling || true
git push origin rolling --force
- name: Upload to rolling
uses: ncipollo/release-action@v1.20.0
with:
allowUpdates: True
artifacts: "*.nupkg,*.snupkg"
body: "Last built commit: ${{ github.sha }}"
name: "Rolling Release"
prerelease: True
replacesArtifacts: True
tag: "rolling"
updateOnlyUnreleased: True

View File

@@ -1,43 +0,0 @@
name: Nuget Pack
on:
push:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Restore dependencies
run: dotnet restore
- name: Pack
run: dotnet pack
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: 'Nuget Package'
path: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: 'SabreTools.RedumpLib/bin/Release/*.nupkg'
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True
replacesArtifacts: True
tag: "rolling"
updateOnlyUnreleased: True

View File

@@ -3,15 +3,21 @@ name: Build PR
on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Build
run: dotnet build
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: |
8.0.x
9.0.x
10.0.x
- name: Build
run: dotnet build
- name: Run tests
run: dotnet test

296
.gitignore vendored
View File

@@ -1,15 +1,7 @@
*.swp
*.*~
project.lock.json
.DS_Store
*.pyc
nupkg/
# Visual Studio Code
.vscode
# Rider
.idea
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
@@ -17,6 +9,9 @@ nupkg/
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@@ -24,15 +19,280 @@ nupkg/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
msbuild.log
msbuild.err
msbuild.wrn
[Ll]og/
# Visual Studio 2015
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Typescript v1 declaration files
typings/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs

22
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,22 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/RedumpTool/bin/Debug/net10.0/RedumpTool.dll",
"args": [],
"cwd": "${workspaceFolder}/RedumpTool",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false,
"justMyCode": false
}
]
}

24
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "shell",
"args": [
"build",
// Ask dotnet build to generate full paths for file names.
"/property:GenerateFullPaths=true",
// Do not generate summary otherwise it leads to duplicate errors in Problems panel
"/consoleloggerparameters:NoSummary"
],
"group": "build",
"presentation": {
"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

7
LICENSE Normal file
View File

@@ -0,0 +1,7 @@
Copyright (c) 2018-2025 Matt Nadareski
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,5 +1,13 @@
# SabreTools.RedumpLib
[![Build and Test](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml)
This library comprises interaction logic for [Redump](http://redump.org/). Because there is no formal API for the site, this library interacts with the site through normal HTTP methods. It includes a fairly comprehensive reference of supported parts of the site, including URLs, page information, and packs.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.RedumpLib).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.RedumpLib/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.RedumpLib/releases/rolling)

259
RedumpTool/Program.cs Normal file
View File

@@ -0,0 +1,259 @@
using System;
using System.IO;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
namespace RedumpTool
{
public class Program
{
public static void Main(string[] args)
{
// Show help if nothing is input
if (args is null || args.Length == 0)
{
Console.WriteLine("At least one argument is required");
ShowHelp();
return;
}
// Derive the feature, if possible
Feature feature = DeriveFeature(args[0]);
if (feature == Feature.NONE)
{
Console.WriteLine("The feature could not be derived");
ShowHelp();
return;
}
// Create a new Downloader
var downloader = CreateDownloader(feature, args);
if (downloader is null)
{
Console.WriteLine("A downloader could not be created from the inputs");
ShowHelp();
return;
}
// Run the download task
var downloaderTask = downloader.Download();
downloaderTask.Wait();
// Get the downloader task results and print, if necessary
var downloaderResult = downloaderTask.Result;
if (downloaderResult.Count > 0)
{
string processedIds = string.Join(", ", [.. downloaderResult.ConvertAll(i => i.ToString())]);
Console.WriteLine($"Processed IDs: {processedIds}");
}
else if (downloaderResult.Count == 0 && downloader.Feature != Feature.Packs)
{
Console.WriteLine("No results were found");
ShowHelp();
}
}
/// <summary>
/// Derive the feature from the supplied argument
/// </summary>
/// <param name="feature">Possible feature name to derive from</param>
/// <returns>True if the feature was set, false otherwise</returns>
private static Feature DeriveFeature(string feature)
{
return feature.ToLowerInvariant() switch
{
"site" => Feature.Site,
"wip" => Feature.WIP,
"packs" => Feature.Packs,
"user" => Feature.User,
"search" => Feature.Quicksearch,
"query" => Feature.Quicksearch,
_ => Feature.NONE,
};
}
/// <summary>
/// Create a Downloader from a feature and a set of arguments
/// </summary>
/// <param name="feature">Primary feature to use</param>
/// <param name="args">Arguments list to parse</param>
/// <returns>Initialized Downloader on success, null otherwise</returns>
private static Downloader? CreateDownloader(Feature feature, string[] args)
{
// Loop through all of the arguments
var downloader = new Downloader() { Feature = feature };
try
{
for (int i = 1; i < args.Length; i++)
{
switch (args[i])
{
// Output directory
case "-o":
case "--output":
downloader.OutDir = args[++i].Trim('"');
break;
// Username
case "-u":
case "--username":
downloader.Username = args[++i];
break;
// Password
case "-p":
case "--password":
downloader.Password = args[++i];
break;
// Minimum Redump ID
case "-min":
case "--minimum":
if (!int.TryParse(args[++i], out int minimumId))
minimumId = -1;
downloader.MinimumId = minimumId;
break;
// Maximum Redump ID
case "-max":
case "--maximum":
if (!int.TryParse(args[++i], out int maximumId))
maximumId = -1;
downloader.MaximumId = maximumId;
break;
// Quicksearch text
case "-q":
case "--query":
downloader.QueryString = args[++i];
break;
// Packs subfolders
case "-s":
case "--subfolders":
downloader.UseSubfolders = true;
break;
// Use last modified
case "-n":
case "--onlynew":
downloader.OnlyNew = true;
break;
// List instead of download
case "-l":
case "--list":
downloader.OnlyList = true;
break;
// Don't filter forward slashes from queries
case "-ns":
case "--noslash":
downloader.NoSlash = true;
break;
// Force continuation
case "-f":
case "--force":
downloader.Force = true;
break;
// Everything else
default:
Console.WriteLine($"Unrecognized flag: {args[i]}");
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
// Output directory validation
if (!downloader.OnlyList && string.IsNullOrEmpty(downloader.OutDir))
{
Console.WriteLine("No output directory set!");
return null;
}
else if (!downloader.OnlyList && !string.IsNullOrEmpty(downloader.OutDir))
{
// Create the output directory, if it doesn't exist
try
{
if (!Directory.Exists(downloader.OutDir))
Directory.CreateDirectory(downloader.OutDir!);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
// Range verification
if (feature == Feature.Site && !downloader.OnlyNew && (downloader.MinimumId < 0 || downloader.MaximumId < 0))
{
Console.WriteLine("Please enter a valid range of Redump IDs");
return null;
}
else if (feature == Feature.WIP && !downloader.OnlyNew && (downloader.MinimumId < 0 || downloader.MaximumId < 0))
{
Console.WriteLine("Please enter a valid range of WIP IDs");
return null;
}
// Query verification (and cleanup)
if (feature == Feature.Quicksearch && string.IsNullOrEmpty(downloader.QueryString))
{
Console.WriteLine("Please enter a query for searching");
return null;
}
// Return the downloader
return downloader;
}
/// <summary>
/// Show the commandline help for the program
/// </summary>
private static void ShowHelp()
{
Console.WriteLine("RedumpTool - A Redump.org recovery tool");
Console.WriteLine();
Console.WriteLine("Usage: RedumpTool <feature> [options]");
Console.WriteLine();
Console.WriteLine("Common Options");
Console.WriteLine(" -o <folder>, --output <folder> - Set the base output directory");
Console.WriteLine(" -u <username>, --username <username> - Redump username");
Console.WriteLine(" -p <pass>, --password <pass> - Redump password");
Console.WriteLine();
Console.WriteLine("site - Download pages and related files from the main site");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine(" -f, --force - Force continuing downloads until user cancels (used with only new)");
Console.WriteLine();
Console.WriteLine("wip - Download pages and related files from the WIP list");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine();
Console.WriteLine("packs - Download available packs");
Console.WriteLine(" -s, --subfolders - Download packs to named subfolders");
Console.WriteLine();
Console.WriteLine("user - Download pages and related files for a particular user");
Console.WriteLine(" -n, --onlynew - Use the last modified view instead of sequential parsing");
Console.WriteLine(" -l, --list - Only list the page IDs for that user");
Console.WriteLine();
Console.WriteLine("query - Download pages and related files from a Redump-compatible query");
Console.WriteLine(" -q, --query - Redump-compatible query to run");
Console.WriteLine(" -l, --list - Only list the page IDs for that query");
Console.WriteLine(" -ns, --noslash - Don't replace forward slashes with '-'");
Console.WriteLine();
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0;net10.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.9.1</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`)) OR $(TargetFramework.StartsWith(`net10`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0;net10.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,93 @@
using System;
using System.IO;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class BuilderTests
{
[Theory]
[InlineData("success_complete.json", false)]
[InlineData("success_invalid.json", false)] // Fully invalid returns a default object
[InlineData("success_partial.json", false)]
[InlineData("fail_invalid.json", true)]
public void CreateFromFileTest(string filename, bool expectNull)
{
// Get the full path to the test file
string path = Path.Combine(Environment.CurrentDirectory, "TestData", filename);
// Try to create the submission info from file
var si = Builder.CreateFromFile(path);
// Check for an expected result
Assert.Equal(expectNull, si is null);
}
[Fact]
public void InjectSubmissionInformation_BothNull_Null()
{
SubmissionInfo? si = null;
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.Null(actual);
}
[Fact]
public void InjectSubmissionInformation_ValidInputNullSeed_Valid()
{
SubmissionInfo? si = new();
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void InjectSubmissionInformation_BothValid_Valid()
{
SubmissionInfo? si = new();
SubmissionInfo? seed = new();
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_EmptyString_Empty()
{
string original = string.Empty;
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Empty(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_NoReplace_Identical()
{
string original = "<p>Nothing here will be replaced</p>";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(original, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_StandardCode_Replaced()
{
string original = "<b>ISBN</b>: 000-0-00-000000-0";
string expected = "[T:ISBN] 000-0-00-000000-0";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_OutdatedCode_Replaced()
{
string original = "XMID: AB12345C";
string expected = "<b>XMID</b>: AB12345C";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
namespace SabreTools.RedumpLib.Test
{
public class DownloaderTests
{
// Tests here will require installing and using the Moq library
// to mock the RedumpClient type.
}
}

View File

@@ -1,201 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class EnumExtensionsTests
{
/// <summary>
/// MediaType values that support drive speeds
/// </summary>
private static readonly MediaType?[] _supportDriveSpeeds =
[
MediaType.CDROM,
MediaType.DVD,
MediaType.GDROM,
MediaType.HDDVD,
MediaType.BluRay,
MediaType.NintendoGameCubeGameDisc,
MediaType.NintendoWiiOpticalDisc,
];
/// <summary>
/// RedumpSystem values that are considered Audio
/// </summary>
private static readonly RedumpSystem?[] _audioSystems =
[
RedumpSystem.AtariJaguarCDInteractiveMultimediaSystem,
RedumpSystem.AudioCD,
RedumpSystem.DVDAudio,
RedumpSystem.HasbroiONEducationalGamingSystem,
RedumpSystem.HasbroVideoNow,
RedumpSystem.HasbroVideoNowColor,
RedumpSystem.HasbroVideoNowJr,
RedumpSystem.HasbroVideoNowXP,
RedumpSystem.PlayStationGameSharkUpdates,
RedumpSystem.PhilipsCDi,
RedumpSystem.SuperAudioCD,
];
/// <summary>
/// RedumpSystem values that are considered markers
/// </summary>
private static readonly RedumpSystem?[] _markerSystems =
[
RedumpSystem.MarkerArcadeEnd,
RedumpSystem.MarkerComputerEnd,
RedumpSystem.MarkerDiscBasedConsoleEnd,
RedumpSystem.MarkerOtherEnd,
];
/// <summary>
/// RedumpSystem values that are have reversed ringcodes
/// </summary>
private static readonly RedumpSystem?[] _reverseRingcodeSystems =
[
RedumpSystem.SonyPlayStation2,
RedumpSystem.SonyPlayStation3,
RedumpSystem.SonyPlayStation4,
RedumpSystem.SonyPlayStation5,
RedumpSystem.SonyPlayStationPortable,
];
/// <summary>
/// RedumpSystem values that are considered XGD
/// </summary>
private static readonly RedumpSystem?[] _xgdSystems =
[
RedumpSystem.MicrosoftXbox,
RedumpSystem.MicrosoftXbox360,
RedumpSystem.MicrosoftXboxOne,
RedumpSystem.MicrosoftXboxSeriesXS,
];
/// <summary>
/// Check that all systems with reversed ringcodes are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateReversedRingcodeSystemsTestData))]
public void HasReversedRingcodesTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.HasReversedRingcodes();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all audio systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateAudioSystemsTestData))]
public void IsAudioTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsAudio();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all marker systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateMarkerSystemsTestData))]
public void IsMarkerTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsMarker();
Assert.Equal(expected, actual);
}
/// <summary>
/// Check that all XGD systems are marked properly
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expected">The expected value to come from the check</param>
[Theory]
[MemberData(nameof(GenerateXGDSystemsTestData))]
public void IsXGDTest(RedumpSystem? redumpSystem, bool expected)
{
bool actual = redumpSystem.IsXGD();
Assert.Equal(expected, actual);
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered Audio
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateAudioSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_audioSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered markers
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateMarkerSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_markerSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered markers
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateReversedRingcodeSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_reverseRingcodeSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values that are considered XGD
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateXGDSystemsTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (RedumpSystem redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
if (_xgdSystems.Contains(redumpSystem))
testData.Add([redumpSystem, true]);
else
testData.Add([redumpSystem, false]);
}
return testData;
}
}
}

View File

@@ -1,717 +0,0 @@
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
// TODO: Add tests for string-to-enum conversion
public class ExtensionsTests
{
#region Cross-Enumeration
/// <summary>
/// DiscType values that map to MediaType
/// </summary>
private static readonly DiscType?[] _mappableDiscTypes = new DiscType?[]
{
DiscType.BD25,
DiscType.BD33,
DiscType.BD50,
DiscType.BD66,
DiscType.BD100,
DiscType.BD128,
DiscType.CD,
DiscType.DVD5,
DiscType.DVD9,
DiscType.GDROM,
DiscType.HDDVDSL,
DiscType.HDDVDDL,
DiscType.NintendoGameCubeGameDisc,
DiscType.NintendoWiiOpticalDiscSL,
DiscType.NintendoWiiOpticalDiscDL,
DiscType.NintendoWiiUOpticalDiscSL,
DiscType.UMDSL,
DiscType.UMDDL,
};
/// <summary>
/// MediaType values that map to DiscType
/// </summary>
private static readonly MediaType?[] _mappableMediaTypes = new MediaType?[]
{
MediaType.BluRay,
MediaType.CDROM,
MediaType.DVD,
MediaType.GDROM,
MediaType.HDDVD,
MediaType.NintendoGameCubeGameDisc,
MediaType.NintendoWiiOpticalDisc,
MediaType.NintendoWiiUOpticalDisc,
MediaType.UMD,
};
/// <summary>
/// Check that every supported system has some set of MediaTypes supported
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
[Theory]
[MemberData(nameof(GenerateRedumpSystemMappingTestData))]
public void MediaTypesTest(RedumpSystem? redumpSystem)
{
var actual = redumpSystem.MediaTypes();
Assert.NotEmpty(actual);
}
/// <summary>
/// Check that both mappable and unmappable media types output correctly
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null mapping, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeMappingTestData))]
public void ToDiscTypeTest(MediaType? mediaType, bool expectNull)
{
DiscType? actual = mediaType.ToDiscType();
Assert.Equal(expectNull, actual == null);
}
/// <summary>
/// Check that DiscType values all map to something appropriate
/// </summary>
/// <param name="discType">DiscType value to check</param>
/// <param name="expectNull">True to expect a null mapping, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscTypeMappingTestData))]
public void ToMediaTypeTest(DiscType? discType, bool expectNull)
{
MediaType? actual = discType.ToMediaType();
Assert.Equal(expectNull, actual == null);
}
/// <summary>
/// Generate a test set of DiscType values
/// </summary>
/// <returns>MemberData-compatible list of DiscType values</returns>
public static List<object?[]> GenerateDiscTypeMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscType? discType in Enum.GetValues(typeof(DiscType)))
{
if (_mappableDiscTypes.Contains(discType))
testData.Add(new object?[] { discType, false });
else
testData.Add(new object?[] { discType, true });
}
return testData;
}
/// <summary>
/// Generate a test set of RedumpSystem values
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateRedumpSystemMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null } };
foreach (RedumpSystem? redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
testData.Add(new object?[] { redumpSystem });
}
return testData;
}
/// <summary>
/// Generate a test set of mappable media types
/// </summary>
/// <returns>MemberData-compatible list of MediaTypes</returns>
public static List<object?[]> GenerateMediaTypeMappingTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (MediaType? mediaType in Enum.GetValues(typeof(MediaType)))
{
if (_mappableMediaTypes.Contains(mediaType))
testData.Add(new object?[] { mediaType, false });
else
testData.Add(new object?[] { mediaType, true });
}
return testData;
}
#endregion
#region Disc Category
/// <summary>
/// Check that every DiscCategory has a long name provided
/// </summary>
/// <param name="discCategory">DiscCategory value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscCategoryTestData))]
public void DiscCategoryLongNameTest(DiscCategory? discCategory, bool expectNull)
{
var actual = discCategory.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of DiscCategory values
/// </summary>
/// <returns>MemberData-compatible list of DiscCategory values</returns>
public static List<object?[]> GenerateDiscCategoryTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscCategory? discCategory in Enum.GetValues(typeof(DiscCategory)))
{
testData.Add(new object?[] { discCategory, false });
}
return testData;
}
#endregion
#region Disc Type
/// <summary>
/// Check that every DiscType has a long name provided
/// </summary>
/// <param name="discType">DiscType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateDiscTypeTestData))]
public void DiscTypeLongNameTest(DiscType? discType, bool expectNull)
{
var actual = discType.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of DiscType values
/// </summary>
/// <returns>MemberData-compatible list of DiscType values</returns>
public static List<object?[]> GenerateDiscTypeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (DiscType? discType in Enum.GetValues(typeof(DiscType)))
{
if (discType == DiscType.NONE)
testData.Add(new object?[] { discType, true });
else
testData.Add(new object?[] { discType, false });
}
return testData;
}
#endregion
#region Language
/// <summary>
/// Check that every Language has a long name provided
/// </summary>
/// <param name="language">Language value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageTestData))]
public void LanguageLongNameTest(Language? language, bool expectNull)
{
var actual = language.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every Language has a short name provided
/// </summary>
/// <param name="language">Language value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageTestData))]
public void LanguageShortNameTest(Language? language, bool expectNull)
{
var actual = language.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Ensure that every Language that has an ISO 639-1 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateTwoLetterCodeTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.TwoLetterCode();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Ensure that every Language that has a standard/bibliographic ISO 639-2 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateThreeLetterCodeTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.ThreeLetterCode();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Ensure that every Language that has a terminology ISO 639-2 code is unique
/// </summary>
[Fact]
public void LanguageNoDuplicateThreeLetterCodeAltTest()
{
var fullLanguages = Enum.GetValues(typeof(Language)).Cast<Language?>().ToList();
var filteredLanguages = new Dictionary<string, Language?>();
int totalCount = 0;
foreach (Language? language in fullLanguages)
{
var code = language.ThreeLetterCodeAlt();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredLanguages.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredLanguages[code] = language;
totalCount++;
}
Assert.Equal(totalCount, filteredLanguages.Count);
}
/// <summary>
/// Generate a test set of Language values
/// </summary>
/// <returns>MemberData-compatible list of Language values</returns>
public static List<object?[]> GenerateLanguageTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (Language? language in Enum.GetValues(typeof(Language)))
{
testData.Add(new object?[] { language, false });
}
return testData;
}
#endregion
#region Language Selection
/// <summary>
/// Check that every LanguageSelection has a long name provided
/// </summary>
/// <param name="languageSelection">LanguageSelection value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateLanguageSelectionTestData))]
public void LanguageSelectionLongNameTest(LanguageSelection? languageSelection, bool expectNull)
{
var actual = languageSelection.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of LanguageSelection values
/// </summary>
/// <returns>MemberData-compatible list of LanguageSelection values</returns>
public static List<object?[]> GenerateLanguageSelectionTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (LanguageSelection? languageSelection in Enum.GetValues(typeof(LanguageSelection)))
{
testData.Add(new object?[] { languageSelection, false });
}
return testData;
}
#endregion
#region Media Type
/// <summary>
/// Check that every MediaType has a long name provided
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeTestData))]
public void MediaTypeLongNameTest(MediaType? mediaType, bool expectNull)
{
var actual = mediaType.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every MediaType has a short name provided
/// </summary>
/// <param name="mediaType">MediaType value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateMediaTypeTestData))]
public void MediaTypeShortNameTest(MediaType? mediaType, bool expectNull)
{
var actual = mediaType.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of MediaType values
/// </summary>
/// <returns>MemberData-compatible list of MediaType values</returns>
public static List<object?[]> GenerateMediaTypeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (MediaType? mediaType in Enum.GetValues(typeof(MediaType)))
{
testData.Add(new object?[] { mediaType, false });
}
return testData;
}
#endregion
#region Region
/// <summary>
/// Check that every Region has a long name provided
/// </summary>
/// <param name="region">Region value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRegionTestData))]
public void RegionLongNameTest(Region? region, bool expectNull)
{
var actual = region.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every Region has a short name provided
/// </summary>
/// <param name="region">Region value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRegionTestData))]
public void RegionShortNameTest(Region? region, bool expectNull)
{
var actual = region.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Ensure that every Language that has an ISO 639-1 code is unique
/// </summary>
[Fact]
public void RegionNoDuplicateShortNameTest()
{
var fullRegions = Enum.GetValues(typeof(Region)).Cast<Region?>().ToList();
var filteredRegions = new Dictionary<string, Region?>();
int totalCount = 0;
foreach (Region? region in fullRegions)
{
var code = region.ShortName();
if (string.IsNullOrEmpty(code))
continue;
// Throw if the code already exists
if (filteredRegions.ContainsKey(code))
throw new DuplicateNameException($"Code {code} already in dictionary");
filteredRegions[code] = region;
totalCount++;
}
Assert.Equal(totalCount, filteredRegions.Count);
}
/// <summary>
/// Generate a test set of Region values
/// </summary>
/// <returns>MemberData-compatible list of Region values</returns>
public static List<object?[]> GenerateRegionTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (Region? region in Enum.GetValues(typeof(Region)))
{
testData.Add(new object?[] { region, false });
}
return testData;
}
#endregion
#region Site Code
/// <summary>
/// Check that every SiteCode has a long name provided
/// </summary>
/// <param name="siteCode">SiteCode value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSiteCodeTestData))]
public void SiteCodeLongNameTest(SiteCode? siteCode, bool expectNull)
{
var actual = siteCode.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Check that every SiteCode has a short name provided
/// </summary>
/// <param name="siteCode">SiteCode value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSiteCodeTestData))]
public void SiteCodeShortNameTest(SiteCode? siteCode, bool expectNull)
{
var actual = siteCode.ShortName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of SiteCode values
/// </summary>
/// <returns>MemberData-compatible list of SiteCode values</returns>
public static List<object?[]> GenerateSiteCodeTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
testData.Add(new object?[] { siteCode, false });
}
return testData;
}
#endregion
#region System
/// <summary>
/// Check that every RedumpSystem has a long name provided
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateRedumpSystemTestData))]
public void RedumpSystemLongNameTest(RedumpSystem? redumpSystem, bool expectNull)
{
var actual = redumpSystem.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
// TODO: Re-enable the following test once non-Redump systems are accounted for
/// <summary>
/// Check that every RedumpSystem has a short name provided
/// </summary>
/// <param name="redumpSystem">RedumpSystem value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
//[Theory]
//[MemberData(nameof(GenerateRedumpSystemTestData))]
//public void RedumpSystemShortNameTest(RedumpSystem? redumpSystem, bool expectNull)
//{
// string actual = redumpSystem.ShortName();
// if (expectNull)
// Assert.Null(actual);
// else
// Assert.NotNull(actual);
//}
// TODO: Test the other attributes as well
// Most are bool checks so they're not as interesting to have unit tests around
// SystemCategory always returns something as well, so is it worth testing?
/// <summary>
/// Generate a test set of RedumpSystem values
/// </summary>
/// <returns>MemberData-compatible list of RedumpSystem values</returns>
public static List<object?[]> GenerateRedumpSystemTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (RedumpSystem? redumpSystem in Enum.GetValues(typeof(RedumpSystem)))
{
// We want to skip all markers for this
if (redumpSystem.IsMarker())
continue;
testData.Add(new object?[] { redumpSystem, false });
}
return testData;
}
#endregion
#region System Category
/// <summary>
/// Check that every SystemCategory has a long name provided
/// </summary>
/// <param name="systemCategory">SystemCategory value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateSystemCategoryTestData))]
public void SystemCategoryLongNameTest(SystemCategory? systemCategory, bool expectNull)
{
var actual = systemCategory.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of SystemCategory values
/// </summary>
/// <returns>MemberData-compatible list of SystemCategory values</returns>
public static List<object?[]> GenerateSystemCategoryTestData()
{
var testData = new List<object?[]>() { new object?[] { null, true } };
foreach (SystemCategory? systemCategory in Enum.GetValues(typeof(SystemCategory)))
{
if (systemCategory == SystemCategory.NONE)
testData.Add(new object?[] { systemCategory, true });
else
testData.Add(new object?[] { systemCategory, false });
}
return testData;
}
#endregion
#region Yes/No
/// <summary>
/// Check that every YesNo has a long name provided
/// </summary>
/// <param name="yesNo">YesNo value to check</param>
/// <param name="expectNull">True to expect a null value, false otherwise</param>
[Theory]
[MemberData(nameof(GenerateYesNoTestData))]
public void YesNoLongNameTest(YesNo? yesNo, bool expectNull)
{
string actual = yesNo.LongName();
if (expectNull)
Assert.Null(actual);
else
Assert.NotNull(actual);
}
/// <summary>
/// Generate a test set of YesNo values
/// </summary>
/// <returns>MemberData-compatible list of YesNo values</returns>
public static List<object?[]> GenerateYesNoTestData()
{
var testData = new List<object?[]>() { new object?[] { null, false } };
foreach (YesNo? yesNo in Enum.GetValues(typeof(YesNo)))
{
testData.Add(new object?[] { yesNo, false });
}
return testData;
}
#endregion
}
}

View File

@@ -0,0 +1,879 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Data.Sections;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class FormatterTests
{
#region ProcessSpecialFields
// TODO: Write tests for ProcessSpecialFields
#endregion
#region CommonDiscInfoSection
// TODO: Write tests for FormatOutputData(CommonDiscInfoSection)
[Fact]
public void FormatOutputData_CDINullSACNullTAWONull_Minimal()
{
string expected = "Common Disc Info:\n\tRegion: SPACE! (CHANGE THIS)\n\tLanguages: ADD LANGUAGES HERE (ONLY IF YOU TESTED)\n\n\tRingcode Information:\n\n\n";
var builder = new StringBuilder();
CommonDiscInfoSection? section = null;
SizeAndChecksumsSection? sac = null;
TracksAndWriteOffsetsSection? tawo = null;
int? fullyMatchedID = null;
List<int>? partiallyMatchedIDs = null;
Formatter.FormatOutputData(builder,
section,
sac,
tawo,
fullyMatchedID,
partiallyMatchedIDs);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region VersionAndEditionsSection
[Fact]
public void FormatOutputData_VAENull_Minimal()
{
string expected = "Version and Editions:\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_VAE_Formatted()
{
string expected = "Version and Editions:\n\tVersion: XXXXXX\n\tEdition/Release: XXXXXX\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = new()
{
Version = "XXXXXX",
OtherEditions = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region EDCSection
[Fact]
public void FormatOutputData_EDCNull_Minimal()
{
string expected = "EDC:\n";
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDCInvalidSystem_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDC_Formatted()
{
string expected = "EDC:\n\tEDC: Yes\n";
var builder = new StringBuilder();
EDCSection? section = new() { EDC = YesNo.Yes };
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region ExtrasSection
[Fact]
public void FormatOutputData_ExtrasNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_ExtrasInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = new()
{
PVD = null,
PIC = null,
BCA = null,
SecuritySectorRanges = null,
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_Extras_Formatted()
{
string expected = "Extras:\n\tPrimary Volume Descriptor (PVD): XXXXXX\n\tDisc Key: XXXXXX\n\tDisc ID: XXXXXX\n\tPermanent Information & Control (PIC): XXXXXX\n\tHeader: XXXXXX\n\tBCA: XXXXXX\n\tSecurity Sector Ranges: XXXXXX\n";
var builder = new StringBuilder();
ExtrasSection? section = new()
{
PVD = "XXXXXX",
DiscKey = "XXXXXX",
DiscID = "XXXXXX",
PIC = "XXXXXX",
Header = "XXXXXX",
BCA = "XXXXXX",
SecuritySectorRanges = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region CopyProtectionSection
[Fact]
public void FormatOutputData_COPNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = new()
{
Protection = null,
AntiModchip = null,
LibCrypt = null,
LibCryptData = null,
SecuROMData = null,
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COP_Formatted()
{
string expected = "Copy Protection:\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPPSX_Formatted()
{
string expected = "Copy Protection:\n\tAnti-modchip: Yes\n\tLibCrypt: Yes\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region TracksAndWriteOffsetsSection
[Fact]
public void FormatOutputData_TAWOInvalid_Minimal()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\n\n\n\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new();
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_TAWO_Formatted()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\nXXXXXX\n\n\n\tCuesheet: XXXXXX\n\tWrite Offset: XXXXXX\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new()
{
ClrMameProData = "XXXXXX",
Cuesheet = "XXXXXX",
CuesheetRaw = [0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39],
OtherWriteOffsets = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region SizeAndChecksumsSection
// TODO: Write tests for FormatOutputData(SizeAndChecksumsSection)
#endregion
#region DumpingInfoSection
[Fact]
public void FormatOutputData_DINull_Minimal()
{
string expected = "Dumping Info:\n";
var builder = new StringBuilder();
DumpingInfoSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_DI_Formatted()
{
string expected = "Dumping Info:\n\tFrontend Version: XXXXXX\n\tDumping Program: XXXXXX\n\tDate: XXXXXX\n\tParameters: XXXXXX\n\tManufacturer: XXXXXX\n\tModel: XXXXXX\n\tFirmware: XXXXXX\n\tReported Disc Type: XXXXXX\n\tC2 Error Count: XXXXXX\n";
var builder = new StringBuilder();
DumpingInfoSection? section = new()
{
FrontendVersion = "XXXXXX",
DumpingProgram = "XXXXXX",
DumpingDate = "XXXXXX",
DumpingParameters = "XXXXXX",
Manufacturer = "XXXXXX",
Model = "XXXXXX",
Firmware = "XXXXXX",
ReportedDiscType = "XXXXXX",
C2ErrorsCount = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region FormatSiteTag
[Fact]
public void FormatSiteTag_NoValue_Empty()
{
SiteCode code = SiteCode.AlternativeTitle;
string value = string.Empty;
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Standard_Formatted()
{
string expected = "[T:ALT] XXXXXX";
SiteCode code = SiteCode.AlternativeTitle;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanTrue_Formatted()
{
string expected = "[T:VCD]";
SiteCode code = SiteCode.VCD;
string value = "True";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanFalse_Empty()
{
SiteCode code = SiteCode.VCD;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Multiline_Formatted()
{
string expected = "[T:X]\nXXXXXX\n";
SiteCode code = SiteCode.Extras;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
#endregion
#region GetFixedMediaType
[Fact]
public void GetFixedMediaType_NullType_Null()
{
MediaType? mediaType = null;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Null(actual);
}
[Fact]
public void GetFixedMediaType_UnformattedType_Formatted()
{
string? expected = "CD-ROM";
MediaType? mediaType = MediaType.CDROM;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD9_Formatted()
{
string? expected = "DVD-ROM-9";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD5_Formatted()
{
string? expected = "DVD-ROM-5";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD128_Formatted()
{
string? expected = "BD-ROM-128";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = 12345;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD100_Formatted()
{
string? expected = "BD-ROM-100";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = 12345;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66PIC_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = "BDU";
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66Size_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 53_687_063_713;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD50_Formatted()
{
string? expected = "BD-ROM-50";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33PIC_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = "BDU";
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33Size_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 26_843_531_857;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD25_Formatted()
{
string? expected = "BD-ROM-25";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDDL_Formatted()
{
string? expected = "HD-DVD-ROM-DL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDSL_Formatted()
{
string? expected = "HD-DVD-ROM-SL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDDL_Formatted()
{
string? expected = "UMD-DL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDSL_Formatted()
{
string? expected = "UMD-SL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
#endregion
#region OrderCommentTags
[Fact]
public void OrderCommentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.Applications, "XXXXXX" },
};
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderCommentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedCommentCodes.SequenceEqual(actualCodes));
}
#endregion
#region OrderContentTags
[Fact]
public void OrderContentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.AlternativeTitle, "XXXXXX" },
};
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderContentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedContentCodes.SequenceEqual(actualCodes));
}
#endregion
#region RemoveConsecutiveEmptyLines
[Fact]
public void RemoveConsecutiveEmptyLines_Linux_Removed()
{
string expected = "data\n\nbase";
string newlines = "data\n\n\n\n\n\n\n\n\n\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
[Fact]
public void RemoveConsecutiveEmptyLines_Windows_Removed()
{
string expected = "data\r\n\r\nbase";
string newlines = "data\r\n\r\n\r\n\r\n\r\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -1,37 +1,47 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<TargetFrameworks>net8.0;net9.0;net10.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0-release-24177-07" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.13.0" />
<PackageReference Include="xunit.assert" Version="2.8.0" />
<PackageReference Include="xunit.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.execution" Version="2.8.0" />
<PackageReference Include="xunit.runner.console" Version="2.8.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<None Remove="TestData\*" />
</ItemGroup>
</Project>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="18.0.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.25.0" />
<PackageReference Include="xunit.assert" Version="2.9.3" />
<PackageReference Include="xunit.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.3" />
<PackageReference Include="xunit.runner.console" Version="2.9.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.5">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -1,180 +1,183 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class SubmissionInfoTests
{
[Fact]
public void EmptySerializationTest()
{
var submissionInfo = new SubmissionInfo();
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void PartialSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void FullSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
SchemaVersion = 1,
FullyMatchedID = 3,
PartiallyMatchedIDs = new List<int> { 0, 1, 2, 3 },
Added = DateTime.UtcNow,
LastModified = DateTime.UtcNow,
CommonDiscInfo = new CommonDiscInfoSection()
{
System = RedumpSystem.IBMPCcompatible,
Media = DiscType.CD,
Title = "Game Title",
ForeignTitleNonLatin = "Foreign Game Title",
DiscNumberLetter = "1",
DiscTitle = "Install Disc",
Category = DiscCategory.Games,
Region = Region.World,
Languages = new Language?[] { Language.English, Language.Spanish, Language.French },
LanguageSelection = new LanguageSelection?[] { LanguageSelection.BiosSettings },
Serial = "Disc Serial",
Layer0MasteringRing = "L0 Mastering Ring",
Layer0MasteringSID = "L0 Mastering SID",
Layer0ToolstampMasteringCode = "L0 Toolstamp",
Layer0MouldSID = "L0 Mould SID",
Layer0AdditionalMould = "L0 Additional Mould",
Layer1MasteringRing = "L1 Mastering Ring",
Layer1MasteringSID = "L1 Mastering SID",
Layer1ToolstampMasteringCode = "L1 Toolstamp",
Layer1MouldSID = "L1 Mould SID",
Layer1AdditionalMould = "L1 Additional Mould",
Layer2MasteringRing = "L2 Mastering Ring",
Layer2MasteringSID = "L2 Mastering SID",
Layer2ToolstampMasteringCode = "L2 Toolstamp",
Layer3MasteringRing = "L3 Mastering Ring",
Layer3MasteringSID = "L3 Mastering SID",
Layer3ToolstampMasteringCode = "L3 Toolstamp",
RingWriteOffset = "+12",
Barcode = "UPC Barcode",
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
CommentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
ContentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
},
VersionAndEditions = new VersionAndEditionsSection()
{
Version = "Original",
VersionDatfile = "Alt",
CommonEditions = new string[] { "Taikenban" },
OtherEditions = "Rerelease",
},
EDC = new EDCSection()
{
EDC = YesNo.Yes,
},
ParentCloneRelationship = new ParentCloneRelationshipSection()
{
ParentID = "12345",
RegionalParent = false,
},
Extras = new ExtrasSection()
{
PVD = "PVD",
DiscKey = "Disc key",
DiscID = "Disc ID",
PIC = "PIC",
Header = "Header",
BCA = "BCA",
SecuritySectorRanges = "SSv1 Ranges",
},
CopyProtection = new CopyProtectionSection()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.No,
LibCryptData = "LibCrypt data",
Protection = "List of protections",
SecuROMData = "SecuROM data",
},
DumpersAndStatus = new DumpersAndStatusSection()
{
Status = DumpStatus.TwoOrMoreGreen,
Dumpers = new string[] { "Dumper1", "Dumper2" },
OtherDumpers = "Dumper3",
},
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection()
{
ClrMameProData = "Datfile",
Cuesheet = "Cuesheet",
CommonWriteOffsets = new int[] { 0, 12, -12 },
OtherWriteOffsets = "-2",
},
SizeAndChecksums = new SizeAndChecksumsSection()
{
Layerbreak = 0,
Layerbreak2 = 1,
Layerbreak3 = 2,
Size = 12345,
CRC32 = "CRC32",
MD5 = "MD5",
SHA1 = "SHA1",
},
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",
ReportedDiscType = "CD-R",
},
Artifacts = new Dictionary<string, string>()
{
["Sample Artifact"] = "Sample Data",
},
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
}
}
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Data.Sections;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class SubmissionInfoTests
{
[Fact]
public void EmptySerializationTest()
{
var submissionInfo = new SubmissionInfo();
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void PartialSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void FullSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
SchemaVersion = 1,
FullyMatchedID = 3,
PartiallyMatchedIDs = [0, 1, 2, 3],
Added = DateTime.UtcNow,
LastModified = DateTime.UtcNow,
CommonDiscInfo = new CommonDiscInfoSection()
{
System = RedumpSystem.IBMPCcompatible,
Media = DiscType.CD,
Title = "Game Title",
ForeignTitleNonLatin = "Foreign Game Title",
DiscNumberLetter = "1",
DiscTitle = "Install Disc",
Category = DiscCategory.Games,
Region = Region.World,
Languages = [Language.English, Language.Spanish, Language.French],
LanguageSelection = [LanguageSelection.BiosSettings],
Serial = "Disc Serial",
Layer0MasteringRing = "L0 Mastering Ring",
Layer0MasteringSID = "L0 Mastering SID",
Layer0ToolstampMasteringCode = "L0 Toolstamp",
Layer0MouldSID = "L0 Mould SID",
Layer0AdditionalMould = "L0 Additional Mould",
Layer1MasteringRing = "L1 Mastering Ring",
Layer1MasteringSID = "L1 Mastering SID",
Layer1ToolstampMasteringCode = "L1 Toolstamp",
Layer1MouldSID = "L1 Mould SID",
Layer1AdditionalMould = "L1 Additional Mould",
Layer2MasteringRing = "L2 Mastering Ring",
Layer2MasteringSID = "L2 Mastering SID",
Layer2ToolstampMasteringCode = "L2 Toolstamp",
Layer3MasteringRing = "L3 Mastering Ring",
Layer3MasteringSID = "L3 Mastering SID",
Layer3ToolstampMasteringCode = "L3 Toolstamp",
RingWriteOffset = "+12",
Barcode = "UPC Barcode",
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
CommentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
ContentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
},
VersionAndEditions = new VersionAndEditionsSection()
{
Version = "Original",
VersionDatfile = "Alt",
CommonEditions = ["Taikenban"],
OtherEditions = "Rerelease",
},
EDC = new EDCSection()
{
EDC = YesNo.Yes,
},
ParentCloneRelationship = new ParentCloneRelationshipSection()
{
ParentID = "12345",
RegionalParent = false,
},
Extras = new ExtrasSection()
{
PVD = "PVD",
DiscKey = "Disc key",
DiscID = "Disc ID",
PIC = "PIC",
Header = "Header",
BCA = "BCA",
SecuritySectorRanges = "SSv1 Ranges",
},
CopyProtection = new CopyProtectionSection()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.No,
LibCryptData = "LibCrypt data",
Protection = "List of protections",
SecuROMData = "SecuROM data",
},
DumpersAndStatus = new DumpersAndStatusSection()
{
Status = DumpStatus.TwoOrMoreGreen,
Dumpers = ["Dumper1", "Dumper2"],
OtherDumpers = "Dumper3",
},
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection()
{
ClrMameProData = "Datfile",
Cuesheet = "Cuesheet",
CuesheetRaw = [0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39],
CommonWriteOffsets = [0, 12, -12],
OtherWriteOffsets = "-2",
},
SizeAndChecksums = new SizeAndChecksumsSection()
{
Layerbreak = 0,
Layerbreak2 = 1,
Layerbreak3 = 2,
Size = 12345,
CRC32 = "CRC32",
MD5 = "MD5",
SHA1 = "SHA1",
},
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
DumpingParameters = "cd dvd bd sacd fd hdd",
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",
ReportedDiscType = "CD-R",
},
Artifacts = new Dictionary<string, string>()
{
["Sample Artifact"] = "Sample Data",
},
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
}
}

View File

@@ -0,0 +1 @@
This isn't even JSON, I lied.

View File

@@ -0,0 +1,96 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_system": "ajcd",
"d_media": "cd",
"d_title": "Test Title",
"d_title_foreign": "Foreign Title",
"d_number": "1",
"d_label": "Install",
"d_category": "Games",
"d_region": "U",
"d_languages":
[
"en",
"fr",
"es"
],
"d_languages_selection": [],
"d_serial": "Serial",
"d_ring_0_ma1": "Ringcode 0 Layer 0",
"d_ring_0_ma1_sid": "SID 0 Layer 0",
"d_ring_0_ts1": "Toolstamp 0 Layer 0",
"d_ring_0_mo1_sid": "Mould SID 0 Layer 0",
"d_ring_0_mo1": "Additional Mould 0 Layer 0",
"d_ring_0_ma2": "Ringcode 0 Layer 1",
"d_ring_0_ma2_sid": "SID 0 Layer 1",
"d_ring_0_ts2": "Toolstamp 0 Layer 1",
"d_ring_0_mo2_sid": "Mould SID 0 Layer 1",
"d_ring_0_mo2": "Additional Mould 0 Layer 1",
"d_ring_0_ma3": "Ringcode 0 Layer 2",
"d_ring_0_ma3_sid": "SID 0 Layer 2",
"d_ring_0_ts3": "Toolstamp 0 Layer 2",
"d_ring_0_ma4": "Ringcode 0 Layer 3",
"d_ring_0_ma4_sid": "SID 0 Layer 2",
"d_ring_0_ts4": "Toolstamp 0 Layer 2",
"d_ring_0_offsets": "-22",
"d_ring_0_0_value": "-21",
"d_barcode": "0 12345 67890 1",
"d_date": "1980-01-01",
"d_errors": "0",
"d_comments": "This is a comment\nwith a newline",
"d_contents": "These are contents, sorry"
},
"versions_and_editions":
{
"d_version": "1.0.0.0",
"d_version_datfile": "1.00",
"d_editions_text": "Demo"
},
"edc":
{
"d_edc": false
},
"parent_clone_relationship":
{
"d_parent_id": "12345",
"d_is_regional_parent": false
},
"extras":
{
"d_pvd": "Pretend\nthis\nis\na\nPVD",
"d_d1_key": "Disc key",
"d_d2_key": "Disc ID",
"d_pic_data": "Pretend\nthis\nis\na\nPIC",
"d_header": "Pretend\nthis\nis\na\nHeader",
"d_bca": "Pretend\nthis\nis\na\nBCA",
"d_ssranges": "Pretend\nthis\nis\na\nsecurity_range"
},
"copy_protection":
{
"d_protection_a": false,
"d_protection_1": false,
"d_libcrypt": "Definitely\nLibCrypt\nData",
"d_protection": "Super easy to find protection",
"d_securom": "Definitely\nSecuROM\nData"
},
"tracks_and_write_offsets":
{
"d_tracks": "Hash data",
"d_cue": "Real cuesheet",
"d_offset_text": "-22"
},
"size_and_checksums":
{
"d_layerbreak": 1,
"d_layerbreak_2": 2,
"d_layerbreak_3": 3,
"d_pic_identifier": "Pretend\nthis\nis\na\nPIC",
"d_size": 123456,
"d_crc32": "cbf43926",
"d_md5": "d41d8cd98f00b204e9800998ecf8427e",
"d_sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
}

View File

@@ -0,0 +1,4 @@
{
"invalid_key": "invalid_value",
"invalid_x": 12345
}

View File

@@ -0,0 +1,7 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_title": "Test Title"
}
}

View File

@@ -0,0 +1,305 @@
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Data.Sections;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class ValidatorTests
{
// Most tests here will require installing and using the Moq library
// to mock the RedumpClient type.
[Fact]
public void NormalizeDiscType_InvalidMedia_Untouched()
{
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = null }
};
Validator.NormalizeDiscType(si);
Assert.Null(si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_InvalidSizeChecksums_Untouched()
{
DiscType expected = DiscType.CD;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_UnformattedType_Fixed()
{
DiscType expected = DiscType.CD;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD9_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD9;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD5_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD5;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD128_Fixed(DiscType type)
{
DiscType expected = DiscType.BD128;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak3 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD100_Fixed(DiscType type)
{
DiscType expected = DiscType.BD100;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak2 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
PICIdentifier = "BDU",
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
Size = 50_050_629_633,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD50_Fixed(DiscType type)
{
DiscType expected = DiscType.BD50;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
PICIdentifier = "BDU",
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Size = 25_025_314_817,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD25_Fixed(DiscType type)
{
DiscType expected = DiscType.BD25;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDDL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDDL;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDSL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDSL;
var si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
}
}

View File

@@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "Sab
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib.Test", "SabreTools.RedumpLib.Test\SabreTools.RedumpLib.Test.csproj", "{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedumpTool", "RedumpTool\RedumpTool.csproj", "{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -24,5 +26,9 @@ Global
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.Build.0 = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.ActiveCfg = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.Build.0 = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.Build.0 = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.ActiveCfg = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
namespace SabreTools.RedumpLib.Attributes
{
@@ -13,36 +12,36 @@ namespace SabreTools.RedumpLib.Attributes
public static HumanReadableAttribute? GetAttribute(T value)
{
// Null value in, null value out
if (value == null)
if (value is null)
return null;
// Current enumeration type
var enumType = typeof(T);
if (Nullable.GetUnderlyingType(enumType) != null)
if (Nullable.GetUnderlyingType(enumType) is not null)
enumType = Nullable.GetUnderlyingType(enumType);
// If the value returns a null on ToString, just return null
string? valueStr = value?.ToString();
if (string.IsNullOrEmpty(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
if (memberInfos is null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
var enumValueMemberInfo = Array.Find(memberInfos, m => m.DeclaringType == enumType);
if (enumValueMemberInfo is null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
if (attributes is null || attributes.Length == 0)
return null;
// Return the first attribute, if possible
return attributes.FirstOrDefault() as HumanReadableAttribute;
return attributes[0] as HumanReadableAttribute;
}
}
}
}

View File

@@ -22,4 +22,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public string? ShortName { get; set; }
}
}
}

View File

@@ -23,4 +23,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public string? ThreeLetterCodeAlt { get; set; }
}
}
}

View File

@@ -52,4 +52,4 @@ namespace SabreTools.RedumpLib.Attributes
/// </summary>
public bool HasSbi { get; set; } = false;
}
}
}

View File

@@ -1,14 +1,16 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
#if NET40_OR_GREATER || NETCOREAPP || NETSTANDARD2_0_OR_GREATER
using System.Net;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Xml;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Data.Sections;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
@@ -52,6 +54,7 @@ namespace SabreTools.RedumpLib
/// <param name="discData">String containing the HTML disc data</param>
/// <returns>Filled SubmissionInfo object on success, null on error</returns>
/// <remarks>Not currently working</remarks>
#pragma warning disable IDE0051
private static SubmissionInfo? CreateFromID(string discData)
{
var info = new SubmissionInfo()
@@ -76,7 +79,7 @@ namespace SabreTools.RedumpLib
// Get the body node, if possible
var bodyNode = redumpPage["html"]?["body"];
if (bodyNode == null || !bodyNode.HasChildNodes)
if (bodyNode is null || !bodyNode.HasChildNodes)
return null;
// Loop through and get the main node, if possible
@@ -84,7 +87,7 @@ namespace SabreTools.RedumpLib
foreach (XmlNode? tempNode in bodyNode.ChildNodes)
{
// Invalid nodes are skipped
if (tempNode == null)
if (tempNode is null)
continue;
// We only care about div elements
@@ -92,7 +95,7 @@ namespace SabreTools.RedumpLib
continue;
// We only care if it has attributes
if (tempNode.Attributes == null)
if (tempNode.Attributes is null)
continue;
// The main node has a class of "main"
@@ -104,14 +107,14 @@ namespace SabreTools.RedumpLib
}
// If the main node is invalid, we can't do anything
if (mainNode == null || !mainNode.HasChildNodes)
if (mainNode is null || !mainNode.HasChildNodes)
return null;
// Try to find elements as we're going
foreach (XmlNode? childNode in mainNode.ChildNodes)
{
// Invalid nodes are skipped
if (childNode == null)
if (childNode is null)
continue;
// The title is the only thing in h1 tags
@@ -123,7 +126,7 @@ namespace SabreTools.RedumpLib
continue;
// Only 2 of the internal divs have classes attached and one is not used here
if (childNode.Attributes != null && string.Equals(childNode.Attributes["class"]?.Value, "game",
if (childNode.Attributes is not null && string.Equals(childNode.Attributes["class"]?.Value, "game",
StringComparison.OrdinalIgnoreCase))
{
// If we don't have children nodes, skip this one over
@@ -134,14 +137,14 @@ namespace SabreTools.RedumpLib
foreach (XmlNode? gameNode in childNode.ChildNodes)
{
// Invalid nodes are skipped
if (gameNode == null)
if (gameNode is null)
continue;
// Table elements contain multiple other parts of information
if (string.Equals(gameNode.Name, "table", StringComparison.OrdinalIgnoreCase))
{
// All tables have some attribute we can use
if (gameNode.Attributes == null)
if (gameNode.Attributes is null)
continue;
// The gameinfo node contains most of the major information
@@ -156,7 +159,7 @@ namespace SabreTools.RedumpLib
foreach (XmlNode? gameInfoNode in gameNode.ChildNodes)
{
// Invalid nodes are skipped
if (gameInfoNode == null)
if (gameInfoNode is null)
continue;
// If we run into anything not a row, ignore it
@@ -164,13 +167,13 @@ namespace SabreTools.RedumpLib
continue;
// If we don't have the required nodes, ignore it
if (gameInfoNode["th"] == null || gameInfoNode["td"] == null)
if (gameInfoNode["th"] is null || gameInfoNode["td"] is null)
continue;
var gameInfoNodeHeader = gameInfoNode["th"];
var gameInfoNodeData = gameInfoNode["td"];
if (gameInfoNodeHeader == null || gameInfoNodeData == null)
if (gameInfoNodeHeader is null || gameInfoNodeData is null)
{
// No-op for invalid data
}
@@ -242,6 +245,7 @@ namespace SabreTools.RedumpLib
return info;
}
#pragma warning restore IDE0051
/// <summary>
/// Fill out an existing SubmissionInfo object based on a disc page
@@ -250,10 +254,8 @@ namespace SabreTools.RedumpLib
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="includeAllData">True to include all pullable information, false to do bare minimum</param>
public async static Task<bool> FillFromId(RedumpClient rc, SubmissionInfo info, int id, bool includeAllData)
public static async Task<bool> FillFromId(RedumpClient rc, SubmissionInfo info, int id, bool includeAllData)
{
// Ensure that required sections exist
info = EnsureAllSections(info);
var discData = await rc.DownloadSingleSiteID(id);
if (string.IsNullOrEmpty(discData))
return false;
@@ -266,25 +268,36 @@ namespace SabreTools.RedumpLib
// If we have parenthesis, title is everything before the first one
int firstParenLocation = title?.IndexOf(" (") ?? -1;
if (title != null && firstParenLocation >= 0)
if (title is not null && firstParenLocation >= 0)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
info.CommonDiscInfo!.Title = title[..firstParenLocation];
#else
info.CommonDiscInfo!.Title = title.Substring(0, firstParenLocation);
var subMatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match subMatch in subMatches.Cast<Match>())
#endif
var submatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match? submatch in submatches)
{
var subMatchValue = subMatch.Groups[1].Value;
if (submatch is null)
continue;
var submatchValue = submatch.Groups[1].Value;
// Disc number or letter
if (subMatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = subMatchValue.Remove(0, "Disc ".Length);
if (submatchValue.StartsWith("Disc"))
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
info.CommonDiscInfo.DiscNumberLetter = submatchValue["Disc ".Length..];
#else
info.CommonDiscInfo.DiscNumberLetter = submatchValue.Remove(0, "Disc ".Length);
#endif
// Issue number
else if (subMatchValue.All(c => char.IsNumber(c)))
info.CommonDiscInfo.Title += $" ({subMatchValue})";
else if (ulong.TryParse(submatchValue, out _))
info.CommonDiscInfo.Title += $" ({submatchValue})";
// Disc title
else
info.CommonDiscInfo.DiscTitle = subMatchValue;
info.CommonDiscInfo.DiscTitle = submatchValue;
}
}
// Otherwise, leave the title as-is
@@ -298,18 +311,16 @@ namespace SabreTools.RedumpLib
match = Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
else
info.CommonDiscInfo!.ForeignTitleNonLatin = null;
// Category
match = Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
info.CommonDiscInfo!.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
info.CommonDiscInfo.Category = DiscCategory.Games;
info.CommonDiscInfo!.Category = DiscCategory.Games;
// Region
if (info.CommonDiscInfo.Region == null)
if (info.CommonDiscInfo.Region is null)
{
match = Constants.RegionRegex.Match(discData);
if (match.Success)
@@ -321,12 +332,17 @@ namespace SabreTools.RedumpLib
if (matches.Count > 0)
{
var tempLanguages = new List<Language?>();
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
tempLanguages.Add(Extensions.ToLanguage(submatch.Groups[1].Value));
if (submatch is null)
continue;
var language = Extensions.ToLanguage(submatch.Groups[1].Value);
if (language is not null)
tempLanguages.Add(language);
}
info.CommonDiscInfo.Languages = tempLanguages.Where(l => l != null).ToArray();
info.CommonDiscInfo.Languages = [.. tempLanguages];
}
// Serial
@@ -347,7 +363,7 @@ namespace SabreTools.RedumpLib
}
// Version
if (info.VersionAndEditions!.Version == null)
if (info.VersionAndEditions!.Version is null)
{
match = Constants.VersionRegex.Match(discData);
if (match.Success)
@@ -360,16 +376,19 @@ namespace SabreTools.RedumpLib
{
// Start with any currently listed dumpers
var tempDumpers = new List<string>();
if (info.DumpersAndStatus!.Dumpers != null && info.DumpersAndStatus.Dumpers.Length > 0)
if (info.DumpersAndStatus!.Dumpers is not null && info.DumpersAndStatus.Dumpers.Length > 0)
{
foreach (string dumper in info.DumpersAndStatus.Dumpers)
tempDumpers.Add(dumper);
}
foreach (Match submatch in matches.Cast<Match>())
foreach (Match? submatch in matches)
{
if (submatch is null)
continue;
string? dumper = WebUtility.HtmlDecode(submatch.Groups[1].Value);
if (dumper != null)
if (dumper is not null)
tempDumpers.Add(dumper);
}
@@ -430,12 +449,12 @@ namespace SabreTools.RedumpLib
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
if (siteCode is null)
continue;
// If the line doesn't contain this tag, just skip
var shortName = siteCode.ShortName();
if (shortName == null || !commentLine.Contains(shortName))
if (shortName is null || !commentLine.Contains(shortName))
continue;
// Mark as having found a tag
@@ -448,36 +467,8 @@ namespace SabreTools.RedumpLib
addToLast = siteCode.IsMultiLine();
// Skip certain site codes because of data issues
switch (siteCode)
{
// Multiple
case SiteCode.InternalSerialName:
case SiteCode.Multisession:
case SiteCode.VolumeLabel:
continue;
// Audio CD
case SiteCode.RingNonZeroDataStart:
case SiteCode.UniversalHash:
continue;
// Microsoft Xbox and Xbox 360
case SiteCode.DMIHash:
case SiteCode.PFIHash:
case SiteCode.SSHash:
case SiteCode.SSVersion:
case SiteCode.XMID:
case SiteCode.XeMID:
continue;
// Microsoft Xbox One and Series X/S
case SiteCode.Filename:
continue;
// Nintendo Gamecube
case SiteCode.InternalName:
continue;
}
if (ShouldSkipSiteCode(siteCode))
continue;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields!.ContainsKey(siteCode.Value))
@@ -493,14 +484,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode is not null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.CommentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
else if (!addToLast || lastSiteCode is null)
{
newComments += $"{commentLine}\n";
}
@@ -555,12 +546,12 @@ namespace SabreTools.RedumpLib
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
if (siteCode is null)
continue;
// If the line doesn't contain this tag, just skip
var shortName = siteCode.ShortName();
if (shortName == null || !contentLine.Contains(shortName))
if (shortName is null || !contentLine.Contains(shortName))
continue;
// Cache the current site code
@@ -581,14 +572,14 @@ namespace SabreTools.RedumpLib
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
if (addToLast && lastSiteCode is not null && !ShouldSkipSiteCode(lastSiteCode))
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.ContentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
else if (!addToLast || lastSiteCode is null)
{
newContents += $"{contentLine}\n";
}
@@ -624,100 +615,106 @@ namespace SabreTools.RedumpLib
}
/// <summary>
/// Ensure all required sections in a submission info exist
/// Inject information from a seed SubmissionInfo into the existing one
/// </summary>
/// <param name="info">SubmissionInfo object to verify</param>
public static SubmissionInfo EnsureAllSections(SubmissionInfo? info)
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static SubmissionInfo? InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
{
// If there's no info, create one
// If we have any invalid info
if (seed is null)
return info;
// Otherwise, inject information as necessary
info ??= new SubmissionInfo();
// Ensure all sections
info.CommonDiscInfo ??= new CommonDiscInfoSection();
info.VersionAndEditions ??= new VersionAndEditionsSection();
info.EDC ??= new EDCSection();
info.ParentCloneRelationship ??= new ParentCloneRelationshipSection();
info.Extras ??= new ExtrasSection();
info.CopyProtection ??= new CopyProtectionSection();
info.DumpersAndStatus ??= new DumpersAndStatusSection();
info.TracksAndWriteOffsets ??= new TracksAndWriteOffsetsSection();
info.SizeAndChecksums ??= new SizeAndChecksumsSection();
info.DumpingInfo ??= new DumpingInfoSection();
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Title)) info.CommonDiscInfo.Title = seed.CommonDiscInfo.Title;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.ForeignTitleNonLatin)) info.CommonDiscInfo.ForeignTitleNonLatin = seed.CommonDiscInfo.ForeignTitleNonLatin;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscNumberLetter)) info.CommonDiscInfo.DiscNumberLetter = seed.CommonDiscInfo.DiscNumberLetter;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscTitle)) info.CommonDiscInfo.DiscTitle = seed.CommonDiscInfo.DiscTitle;
if (seed.CommonDiscInfo.Category is not null) info.CommonDiscInfo.Category = seed.CommonDiscInfo.Category;
if (seed.CommonDiscInfo.Region is not null) info.CommonDiscInfo.Region = seed.CommonDiscInfo.Region;
if (seed.CommonDiscInfo.Languages is not null) info.CommonDiscInfo.Languages = seed.CommonDiscInfo.Languages;
if (seed.CommonDiscInfo.LanguageSelection is not null) info.CommonDiscInfo.LanguageSelection = seed.CommonDiscInfo.LanguageSelection;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Serial)) info.CommonDiscInfo.Serial = seed.CommonDiscInfo.Serial;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Barcode)) info.CommonDiscInfo.Barcode = seed.CommonDiscInfo.Barcode;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Comments)) info.CommonDiscInfo.Comments = seed.CommonDiscInfo.Comments;
if (seed.CommonDiscInfo.CommentsSpecialFields is not null) info.CommonDiscInfo.CommentsSpecialFields = seed.CommonDiscInfo.CommentsSpecialFields;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Contents)) info.CommonDiscInfo.Contents = seed.CommonDiscInfo.Contents;
if (seed.CommonDiscInfo.ContentsSpecialFields is not null) info.CommonDiscInfo.ContentsSpecialFields = seed.CommonDiscInfo.ContentsSpecialFields;
// Ensure special dictionaries
info.CommonDiscInfo.CommentsSpecialFields ??= [];
info.CommonDiscInfo.ContentsSpecialFields ??= [];
// Info that always overwrites
info.CommonDiscInfo.Layer0MasteringRing = seed.CommonDiscInfo.Layer0MasteringRing;
info.CommonDiscInfo.Layer0MasteringSID = seed.CommonDiscInfo.Layer0MasteringSID;
info.CommonDiscInfo.Layer0ToolstampMasteringCode = seed.CommonDiscInfo.Layer0ToolstampMasteringCode;
info.CommonDiscInfo.Layer0MouldSID = seed.CommonDiscInfo.Layer0MouldSID;
info.CommonDiscInfo.Layer0AdditionalMould = seed.CommonDiscInfo.Layer0AdditionalMould;
info.CommonDiscInfo.Layer1MasteringRing = seed.CommonDiscInfo.Layer1MasteringRing;
info.CommonDiscInfo.Layer1MasteringSID = seed.CommonDiscInfo.Layer1MasteringSID;
info.CommonDiscInfo.Layer1ToolstampMasteringCode = seed.CommonDiscInfo.Layer1ToolstampMasteringCode;
info.CommonDiscInfo.Layer1MouldSID = seed.CommonDiscInfo.Layer1MouldSID;
info.CommonDiscInfo.Layer1AdditionalMould = seed.CommonDiscInfo.Layer1AdditionalMould;
info.CommonDiscInfo.Layer2MasteringRing = seed.CommonDiscInfo.Layer2MasteringRing;
info.CommonDiscInfo.Layer2MasteringSID = seed.CommonDiscInfo.Layer2MasteringSID;
info.CommonDiscInfo.Layer2ToolstampMasteringCode = seed.CommonDiscInfo.Layer2ToolstampMasteringCode;
info.CommonDiscInfo.Layer3MasteringRing = seed.CommonDiscInfo.Layer3MasteringRing;
info.CommonDiscInfo.Layer3MasteringSID = seed.CommonDiscInfo.Layer3MasteringSID;
info.CommonDiscInfo.Layer3ToolstampMasteringCode = seed.CommonDiscInfo.Layer3ToolstampMasteringCode;
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.VersionAndEditions.Version)) info.VersionAndEditions.Version = seed.VersionAndEditions.Version;
if (!string.IsNullOrEmpty(seed.VersionAndEditions.OtherEditions)) info.VersionAndEditions.OtherEditions = seed.VersionAndEditions.OtherEditions;
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
return info;
}
/// <summary>
/// Inject information from a seed SubmissionInfo into the existing one
/// Determine if a site code should be skipped on pulling
/// </summary>
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static void InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
private static bool ShouldSkipSiteCode(SiteCode? siteCode)
{
// If we have any invalid info
if (seed == null)
return;
// Ensure that required sections exist
info = EnsureAllSections(info);
// Otherwise, inject information as necessary
if (info.CommonDiscInfo != null && seed.CommonDiscInfo != null)
#pragma warning disable IDE0072
return siteCode switch
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Title)) info.CommonDiscInfo.Title = seed.CommonDiscInfo.Title;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.ForeignTitleNonLatin)) info.CommonDiscInfo.ForeignTitleNonLatin = seed.CommonDiscInfo.ForeignTitleNonLatin;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscNumberLetter)) info.CommonDiscInfo.DiscNumberLetter = seed.CommonDiscInfo.DiscNumberLetter;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscTitle)) info.CommonDiscInfo.DiscTitle = seed.CommonDiscInfo.DiscTitle;
if (seed.CommonDiscInfo.Category != null) info.CommonDiscInfo.Category = seed.CommonDiscInfo.Category;
if (seed.CommonDiscInfo.Region != null) info.CommonDiscInfo.Region = seed.CommonDiscInfo.Region;
if (seed.CommonDiscInfo.Languages != null) info.CommonDiscInfo.Languages = seed.CommonDiscInfo.Languages;
if (seed.CommonDiscInfo.LanguageSelection != null) info.CommonDiscInfo.LanguageSelection = seed.CommonDiscInfo.LanguageSelection;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Serial)) info.CommonDiscInfo.Serial = seed.CommonDiscInfo.Serial;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Barcode)) info.CommonDiscInfo.Barcode = seed.CommonDiscInfo.Barcode;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Comments)) info.CommonDiscInfo.Comments = seed.CommonDiscInfo.Comments;
if (seed.CommonDiscInfo.CommentsSpecialFields != null) info.CommonDiscInfo.CommentsSpecialFields = seed.CommonDiscInfo.CommentsSpecialFields;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Contents)) info.CommonDiscInfo.Contents = seed.CommonDiscInfo.Contents;
if (seed.CommonDiscInfo.ContentsSpecialFields != null) info.CommonDiscInfo.ContentsSpecialFields = seed.CommonDiscInfo.ContentsSpecialFields;
// Multiple
SiteCode.HighSierraVolumeDescriptor
or SiteCode.InternalSerialName
or SiteCode.Multisession
or SiteCode.VolumeLabel => true,
// Info that always overwrites
info.CommonDiscInfo.Layer0MasteringRing = seed.CommonDiscInfo.Layer0MasteringRing;
info.CommonDiscInfo.Layer0MasteringSID = seed.CommonDiscInfo.Layer0MasteringSID;
info.CommonDiscInfo.Layer0ToolstampMasteringCode = seed.CommonDiscInfo.Layer0ToolstampMasteringCode;
info.CommonDiscInfo.Layer0MouldSID = seed.CommonDiscInfo.Layer0MouldSID;
info.CommonDiscInfo.Layer0AdditionalMould = seed.CommonDiscInfo.Layer0AdditionalMould;
// Audio CD
SiteCode.RingNonZeroDataStart
or SiteCode.RingPerfectAudioOffset
or SiteCode.UniversalHash => true,
info.CommonDiscInfo.Layer1MasteringRing = seed.CommonDiscInfo.Layer1MasteringRing;
info.CommonDiscInfo.Layer1MasteringSID = seed.CommonDiscInfo.Layer1MasteringSID;
info.CommonDiscInfo.Layer1ToolstampMasteringCode = seed.CommonDiscInfo.Layer1ToolstampMasteringCode;
info.CommonDiscInfo.Layer1MouldSID = seed.CommonDiscInfo.Layer1MouldSID;
info.CommonDiscInfo.Layer1AdditionalMould = seed.CommonDiscInfo.Layer1AdditionalMould;
// Microsoft Xbox and Xbox 360
SiteCode.DMIHash
or SiteCode.PFIHash
or SiteCode.SSHash
or SiteCode.SSVersion
or SiteCode.XMID
or SiteCode.XeMID => true,
info.CommonDiscInfo.Layer2MasteringRing = seed.CommonDiscInfo.Layer2MasteringRing;
info.CommonDiscInfo.Layer2MasteringSID = seed.CommonDiscInfo.Layer2MasteringSID;
info.CommonDiscInfo.Layer2ToolstampMasteringCode = seed.CommonDiscInfo.Layer2ToolstampMasteringCode;
// Microsoft Xbox One and Series X/S
SiteCode.Filename => true,
SiteCode.TitleID => true,
info.CommonDiscInfo.Layer3MasteringRing = seed.CommonDiscInfo.Layer3MasteringRing;
info.CommonDiscInfo.Layer3MasteringSID = seed.CommonDiscInfo.Layer3MasteringSID;
info.CommonDiscInfo.Layer3ToolstampMasteringCode = seed.CommonDiscInfo.Layer3ToolstampMasteringCode;
}
// Nintendo Gamecube
SiteCode.InternalName => true,
if (info.VersionAndEditions != null && seed.VersionAndEditions != null)
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.VersionAndEditions.Version)) info.VersionAndEditions.Version = seed.VersionAndEditions.Version;
if (!string.IsNullOrEmpty(seed.VersionAndEditions.OtherEditions)) info.VersionAndEditions.OtherEditions = seed.VersionAndEditions.OtherEditions;
}
// Protection
SiteCode.Protection => true,
if (info.CopyProtection != null && seed.CopyProtection != null)
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
}
_ => false,
};
#pragma warning restore IDE0072
}
#endregion
@@ -729,9 +726,10 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="text">Text block to process</param>
/// <returns>Processed text block, if possible</returns>
private static string ReplaceHtmlWithSiteCodes(this string text)
internal static string ReplaceHtmlWithSiteCodes(this string text)
{
if (string.IsNullOrEmpty(text))
// Empty strings are ignored
if (text.Length == 0)
return text;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))

View File

@@ -10,11 +10,20 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is not string value)
return null;
// Try to parse the value
return Data.Extensions.ToDiscCategory(value);
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)
@@ -23,4 +32,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,20 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is not string value)
return null;
// Try to parse the value
return Data.Extensions.ToDiscType(value);
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)
@@ -23,4 +32,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,19 +11,38 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<Language> languages = [];
while (reader.Read() && reader.Depth > currentDepth)
{
if (reader.Value is not string value)
continue;
Language? lang = Data.Extensions.ToLanguage(value);
if (lang is not null)
languages.Add(lang.Value);
}
return [.. languages];
}
public override void WriteJson(JsonWriter writer, Language?[]? value, JsonSerializer serializer)
{
if (value == null)
if (value is null)
return;
JArray array = new JArray();
JArray array = [];
foreach (var val in value)
{
JToken t = JToken.FromObject(val.ShortName() ?? string.Empty);
@@ -32,4 +52,4 @@ namespace SabreTools.RedumpLib.Converters
array.WriteTo(writer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,19 +11,38 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<LanguageSelection> selections = [];
while (reader.Read() && reader.Depth > currentDepth)
{
if (reader.Value is not string value)
continue;
LanguageSelection? sel = Data.Extensions.ToLanguageSelection(value);
if (sel is not null)
selections.Add(sel.Value);
}
return [.. selections];
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[]? value, JsonSerializer serializer)
{
if (value == null)
if (value is null)
return;
JArray array = new JArray();
JArray array = [];
foreach (var val in value)
{
JToken t = JToken.FromObject(val.LongName() ?? string.Empty);
@@ -32,4 +52,4 @@ namespace SabreTools.RedumpLib.Converters
array.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,20 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is not string value)
return null;
// Try to parse the value
return Data.Extensions.ToRegion(value);
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)
@@ -23,4 +32,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,20 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is not string value)
return null;
// Try to parse the value
return Data.Extensions.ToRedumpSystem(value);
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)
@@ -23,4 +32,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is bool bVal)
return Data.Extensions.ToYesNo(bVal);
else if (reader.Value is string sVal)
return Data.Extensions.ToYesNo(sVal);
return null;
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)
@@ -23,4 +33,4 @@ namespace SabreTools.RedumpLib.Converters
t.WriteTo(writer);
}
}
}
}

View File

@@ -4,173 +4,172 @@ namespace SabreTools.RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>");
public static readonly Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
public static readonly Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the BCA field on a disc page
/// </summary>
public static Regex BcaRegex = new(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
public static readonly Regex BcaRegex = new(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
+ "<tr><th>Row</th><th>Contents</th><th>ASCII</th></tr>"
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>");
public static readonly Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static readonly Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static readonly Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">");
public static readonly Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)");
public static readonly Regex DiscNumberLetterRegex = new(@"\((.*?)\)", RegexOptions.Compiled);
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">");
public static readonly Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
public static readonly Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
public static readonly Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>");
public static readonly Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>");
public static readonly Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
public static readonly Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*", RegexOptions.Compiled);
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
public static readonly Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>");
public static readonly Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
public static readonly Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>");
public static readonly Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc key on a PS3 disc page
/// </summary>
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>");
public static readonly Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the PVD field on a disc page
/// </summary>
public static Regex PvdRegex = new(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
public static readonly Regex PvdRegex = new(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
+ @"<tr><th>Record / Entry</th><th>Contents</th><th>Date</th><th>Time</th><th>GMT</th></tr>"
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
public static readonly Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static readonly Regex RingCodeDoubleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static readonly Regex RingCodeSingleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
public static readonly Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
public static readonly Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>");
public static readonly Regex TitleRegex = new(@"<h1>(.*?)</h1>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
public static readonly Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />", RegexOptions.Compiled);
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
public static readonly Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
public static readonly Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>");
public static readonly Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
public static readonly Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
#endregion
@@ -307,4 +306,4 @@ namespace SabreTools.RedumpLib.Data
#endregion
}
}
}

View File

@@ -281,7 +281,7 @@ namespace SabreTools.RedumpLib.Data
[Language(LongName = "Bini; Edo", ThreeLetterCode = "bin")]
Bini,
[Language(LongName = "Bislama", TwoLetterCode = "bla", ThreeLetterCode = "bis")]
[Language(LongName = "Bislama", TwoLetterCode = "bi", ThreeLetterCode = "bis")]
Bislama,
// Blin; Bilin
@@ -2008,8 +2008,8 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i", ShortName = "cdi", HasCues = true, HasDat = true)]
PhilipsCDi,
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i Digital Video", ShortName = "cdi-video", IsBanned = true)]
PhilipsCDiDigitalVideo,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Playmaji Polymega")]
PlaymajiPolymega,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Pioneer LaserActive")]
PioneerLaserActive,
@@ -2268,6 +2268,9 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Arcade, LongName = "funworld Photo Play", ShortName = "fpp", HasCues = true, HasDat = true)]
funworldPhotoPlay,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "FuRyu & Omron Purikura")]
FuRyuOmronPurikura,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Global VR PC-based Systems")]
GlobalVRVarious,
@@ -2331,6 +2334,9 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Merit Industries MegaTouch XL")]
MeritIndustriesMegaTouchXL,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Namco Purikura")]
NamcoPurikura,
[System(Category = SystemCategory.Arcade, LongName = "Namco · Sega · Nintendo Triforce", ShortName = "trf", HasCues = true, HasDat = true, HasGdi = true)]
NamcoSegaNintendoTriforce,
@@ -2379,9 +2385,21 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Arcade, LongName = "Sega Naomi 2", ShortName = "naomi2", HasCues = true, HasDat = true, HasGdi = true)]
SegaNaomi2,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Sega NAOMI Satellite Terminal PC")]
SegaNaomiSatelliteTerminalPC,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Sega Nu")]
SegaNu,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Sega Nu 1.1")]
SegaNu11,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Sega Nu 2")]
SegaNu2,
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Sega Nu SX")]
SegaNuSX,
[System(Category = SystemCategory.Arcade, LongName = "Sega RingEdge", ShortName = "sre", HasDat = true)]
SegaRingEdge,
@@ -2449,12 +2467,18 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Other, LongName = "Pocket PC", ShortName = "ppc", HasCues = true, HasDat = true)]
PocketPC,
[System(Category = SystemCategory.Other, Available = false, LongName = "Psion")]
Psion,
[System(Category = SystemCategory.Other, Available = false, LongName = "Rainbow Disc")]
RainbowDisc,
[System(Category = SystemCategory.Other, LongName = "Sega Prologue 21 Multimedia Karaoke System", ShortName = "sp21", HasCues = true, HasDat = true)]
SegaPrologue21MultimediaKaraokeSystem,
[System(Category = SystemCategory.Other, Available = false, LongName = "Sharp Zaurus")]
SharpZaurus,
[System(Category = SystemCategory.Other, Available = false, LongName = "Sony Electronic Book")]
SonyElectronicBook,
@@ -2481,15 +2505,112 @@ namespace SabreTools.RedumpLib.Data
/// </summary>
/// <remarks>
/// https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
///
///
/// Because of how Redump stores region IDs, the second letter of each
/// code is lower-cased. In any other system, both letters would be
/// capitalized properly.
/// </remarks>
public enum Region
{
// TODO: Should "regions" and multi-country sets be phased out?
// TODO: Should "regions" be moved to the end?
#region Aggregates - Redump Only
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region A
@@ -2535,30 +2656,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ascension Island", ShortName = "Ac")]
AscensionIsland,
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia", ShortName = "Au")]
Australia,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria", ShortName = "At")]
Austria,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Azerbaijan", ShortName = "Az")]
Azerbaijan,
@@ -2584,9 +2687,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Belgium", ShortName = "Be")]
Belgium,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Belize", ShortName = "Bz")]
Belize,
@@ -2767,21 +2867,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ethiopia", ShortName = "Et")]
Ethiopia,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
// Commented out to avoid confusion
//[HumanReadable(LongName = "European Union", ShortName = "Eu")]
//EuropeanUnion,
@@ -2790,9 +2875,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "Eurozone", ShortName = "Ez")]
//Eurozone,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
#endregion
#region F
@@ -2821,9 +2903,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "France, Metropolitan", ShortName = "Fx")]
//FranceMetropolitan,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "French Guiana", ShortName = "Gf")]
FrenchGuiana,
@@ -2856,9 +2935,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Gibraltar", ShortName = "Gi")]
Gibraltar,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Greece", ShortName = "Gr")]
Greece,
@@ -2958,18 +3034,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Japan", ShortName = "J")]
Japan,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Jersey", ShortName = "Je")]
Jersey,
@@ -3009,9 +3073,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "(Laos) Lao People's Democratic Republic", ShortName = "La")]
Laos,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Latvia", ShortName = "Lv")]
Latvia,
@@ -3264,9 +3325,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Saudi Arabia", ShortName = "Sa")]
SaudiArabia,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Senegal", ShortName = "Sn")]
Senegal,
@@ -3310,9 +3368,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Spain", ShortName = "S")]
Spain,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "Sri Lanka", ShortName = "Lk")]
SriLanka,
@@ -3351,7 +3406,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Thailand", ShortName = "Th")]
Thailand,
// East Timor was "Tp"
// East Timor was "Tp"
[HumanReadable(LongName = "Timor-Leste (East Timor)", ShortName = "Tl")]
TimorLeste,
@@ -3397,9 +3452,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "UK", ShortName = "Uk")]
UnitedKingdom,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "Ukraine", ShortName = "Ue")]
Ukraine,
@@ -3424,30 +3476,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "USA", ShortName = "U")]
UnitedStatesOfAmerica,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "USSR", ShortName = "Su")]
USSR,
@@ -3483,9 +3511,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Western Sahara", ShortName = "Eh")]
WesternSahara,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region Y
@@ -3526,7 +3551,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:ALTF]", LongName = "<b>Alternative Foreign Title</b>:")]
AlternativeForeignTitle,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Applications</b>:", LongName = "<b>Applications</b>:")]
Applications,
@@ -3536,30 +3561,38 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:BBFC]", LongName = "<b>BBFC Reg. No.</b>:")]
BBFCRegistrationNumber,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Bethesda ID</b>:", LongName = "<b>Bethesda ID</b>:")]
BethesdaID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>CD Projekt ID</b>:", LongName = "<b>CD Projekt ID</b>:")]
CDProjektID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Compatible OS</b>:", LongName = "<b>Compatible OS</b>:")]
CompatibleOS,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Hologram ID</b>:", LongName = "<b>Disc Hologram ID</b>:")]
DiscHologramID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Title (non-Latin)</b>:", LongName = "<b>Disc Title (non-Latin)</b>:")]
DiscTitleNonLatin,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>DMI</b>:", LongName = "<b>DMI</b>:")]
DMIHash,
[HumanReadable(ShortName = "[T:DNAS]", LongName = "<b>DNAS Disc ID</b>:")]
DNASDiscID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Edition (non-Latin)</b>:", LongName = "<b>Edition (non-Latin)</b>:")]
EditionNonLatin,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Eidos ID</b>:", LongName = "<b>Eidos ID</b>:")]
EidosID,
@@ -3569,7 +3602,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:X]", LongName = "<b>Extras</b>:")]
Extras,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Filename</b>:", LongName = "<b>Filename</b>:")]
Filename,
@@ -3579,7 +3612,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GF]", LongName = "<b>Game Footage</b>:")]
GameFootage,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Games</b>:", LongName = "<b>Games</b>:")]
Games,
@@ -3589,13 +3622,21 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GTID]", LongName = "<b>GT Interactive ID</b>:")]
GTInteractiveID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>High Sierra Volume Descriptor</b>:", LongName = "<b>High Sierra Volume Descriptor</b>:")]
HighSierraVolumeDescriptor,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Internal Name</b>:", LongName = "<b>Internal Name</b>:")]
InternalName,
[HumanReadable(ShortName = "[T:ISN]", LongName = "<b>Internal Serial</b>:")]
InternalSerialName,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Interplay ID</b>:", LongName = "<b>Interplay ID</b>:")]
InterplayID,
[HumanReadable(ShortName = "[T:ISBN]", LongName = "<b>ISBN</b>:")]
ISBN,
@@ -3614,14 +3655,18 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:KID]", LongName = "<b>Konami ID</b>:")]
KonamiID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Logs Link</b>:", LongName = "<b>Logs Link</b>:")]
LogsLink,
[HumanReadable(ShortName = "[T:LAID]", LongName = "<b>Lucas Arts ID</b>:")]
LucasArtsID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Microsoft ID</b>:", LongName = "<b>Microsoft ID</b>:")]
MicrosoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Multisession</b>:", LongName = "<b>Multisession</b>:")]
Multisession,
@@ -3643,7 +3688,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:P]", LongName = "<b>Patches</b>:")]
Patches,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "PC/Mac Hybrid", LongName = "PC/Mac Hybrid")]
PCMacHybrid,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>PFI</b>:", LongName = "<b>PFI</b>:")]
PFIHash,
@@ -3653,16 +3703,25 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:PCID]", LongName = "<b>Pony Canyon ID</b>:")]
PonyCanyonID,
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "[T:PT2]", LongName = "<b>Postgap type</b>: Form 2")]
PostgapType,
[HumanReadable(ShortName = "[T:PPN]", LongName = "<b>PPN</b>:")]
PPN,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag for some systems yet
[HumanReadable(ShortName = "<b>Protection</b>:", LongName = "<b>Protection</b>:")]
Protection,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring non-zero data start</b>:", LongName = "<b>Ring non-zero data start</b>:")]
RingNonZeroDataStart,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring Perfect Audio Offset</b>:", LongName = "<b>Ring Perfect Audio Offset</b>:")]
RingPerfectAudioOffset,
[HumanReadable(ShortName = "[T:RD]", LongName = "<b>Rolling Demos</b>:")]
RollingDemos,
@@ -3678,28 +3737,48 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:S]", LongName = "<b>Series</b>:")]
Series,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Sierra ID</b>:", LongName = "<b>Sierra ID</b>:")]
SierraID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS</b>:", LongName = "<b>SS</b>:")]
SSHash,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS version</b>:", LongName = "<b>SS version</b>:")]
SSVersion,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Steam App ID</b>:", LongName = "<b>Steam AppID</b>:")]
SteamAppID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Steam Depot ID (.sis/.csm/.csd)</b>:", LongName = "<b>Steam Depot ID (.sis/.csm/.csd)</b>:")]
SteamCsmCsdDepotID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Steam Depot ID (.sis/.sim/.sid)</b>:", LongName = "<b>Steam Depot ID (.sis/.sim/.sid)</b>:")]
SteamSimSidDepotID,
[HumanReadable(ShortName = "[T:TID]", LongName = "<b>Taito ID</b>:")]
TaitoID,
[HumanReadable(ShortName = "[T:TD]", LongName = "<b>Tech Demos</b>:")]
TechDemos,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Title ID</b>:", LongName = "<b>Title ID</b>:")]
TitleID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>2K Games ID</b>:", LongName = "<b>2K Games ID</b>:")]
TwoKGamesID,
[HumanReadable(ShortName = "[T:UID]", LongName = "<b>Ubisoft ID</b>:")]
UbisoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Universal Hash (SHA-1)</b>:", LongName = "<b>Universal Hash (SHA-1)</b>:")]
UniversalHash,
@@ -3715,14 +3794,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:VOL]", LongName = "<b>Volume Label</b>:")]
VolumeLabel,
/// <remarks>No text value after</remarks>
[HumanReadable(ShortName = "[T:VCD]", LongName = "<b>V-CD</b>")]
VCD,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XeMID</b>:", LongName = "<b>XeMID</b>:")]
XeMID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XMID</b>:", LongName = "<b>XMID</b>:")]
XMID,
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,202 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Common disc info section of New Disc Form
/// </summary>
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string? ForeignTitleNonLatin { get; set; }
[JsonProperty(PropertyName = "d_number", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscNumberLetter { get; set; }
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[]? Languages { get; set; }
[JsonProperty(PropertyName = "d_languages_selection", NullValueHandling = NullValueHandling.Ignore, DefaultValueHandling = DefaultValueHandling.Ignore)]
[JsonConverter(typeof(LanguageSelectionConverter))]
public LanguageSelection?[]? LanguageSelection { get; set; }
[JsonProperty(PropertyName = "d_serial", NullValueHandling = NullValueHandling.Ignore)]
public string? Serial { get; set; }
[JsonProperty(PropertyName = "d_ring", NullValueHandling = NullValueHandling.Ignore)]
public string? Ring { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer3MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts4", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer3ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_offsets", NullValueHandling = NullValueHandling.Ignore)]
public string RingOffsetsHidden { get { return "1"; } }
[JsonProperty(PropertyName = "d_ring_0_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingZeroId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_density", NullValueHandling = NullValueHandling.Ignore)]
public string? RingZeroDensity { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_value", NullValueHandling = NullValueHandling.Ignore)]
public string? RingWriteOffset { get; set; }
[JsonProperty(PropertyName = "d_ring_count", NullValueHandling = NullValueHandling.Ignore)]
public string RingCount { get { return "1"; } }
[JsonProperty(PropertyName = "d_barcode", NullValueHandling = NullValueHandling.Ignore)]
public string? Barcode { get; set; }
[JsonProperty(PropertyName = "d_date", NullValueHandling = NullValueHandling.Ignore)]
public string? EXEDateBuildDate { get; set; }
[JsonProperty(PropertyName = "d_errors", NullValueHandling = NullValueHandling.Ignore)]
public string? ErrorsCount { get; set; }
[JsonProperty(PropertyName = "d_comments", NullValueHandling = NullValueHandling.Ignore)]
public string? Comments { get; set; }
[JsonIgnore]
public Dictionary<SiteCode, string> CommentsSpecialFields { get; set; } = [];
[JsonProperty(PropertyName = "d_contents", NullValueHandling = NullValueHandling.Ignore)]
public string? Contents { get; set; }
[JsonIgnore]
public Dictionary<SiteCode, string> ContentsSpecialFields { get; set; } = [];
public object Clone()
{
Dictionary<SiteCode, string> commentsSpecialFields = [];
foreach (var kvp in this.CommentsSpecialFields)
{
commentsSpecialFields[kvp.Key] = kvp.Value;
}
Dictionary<SiteCode, string> contentsSpecialFields = [];
foreach (var kvp in this.ContentsSpecialFields)
{
contentsSpecialFields[kvp.Key] = kvp.Value;
}
return new CommonDiscInfoSection
{
System = this.System,
Media = this.Media,
Title = this.Title,
ForeignTitleNonLatin = this.ForeignTitleNonLatin,
DiscNumberLetter = this.DiscNumberLetter,
DiscTitle = this.DiscTitle,
Category = this.Category,
Region = this.Region,
Languages = this.Languages?.Clone() as Language?[],
LanguageSelection = this.LanguageSelection?.Clone() as LanguageSelection?[],
Serial = this.Serial,
Ring = this.Ring,
RingId = this.RingId,
Layer0MasteringRing = this.Layer0MasteringRing,
Layer0MasteringSID = this.Layer0MasteringSID,
Layer0ToolstampMasteringCode = this.Layer0ToolstampMasteringCode,
Layer0MouldSID = this.Layer0MouldSID,
Layer0AdditionalMould = this.Layer0AdditionalMould,
Layer1MasteringRing = this.Layer1MasteringRing,
Layer1MasteringSID = this.Layer1MasteringSID,
Layer1ToolstampMasteringCode = this.Layer1ToolstampMasteringCode,
Layer1MouldSID = this.Layer1MouldSID,
Layer1AdditionalMould = this.Layer1AdditionalMould,
Layer2MasteringRing = this.Layer2MasteringRing,
Layer2MasteringSID = this.Layer2MasteringSID,
Layer2ToolstampMasteringCode = this.Layer2ToolstampMasteringCode,
Layer3MasteringRing = this.Layer3MasteringRing,
Layer3MasteringSID = this.Layer3MasteringSID,
Layer3ToolstampMasteringCode = this.Layer3ToolstampMasteringCode,
RingZeroId = this.RingZeroId,
RingZeroDensity = this.RingZeroDensity,
RingWriteOffset = this.RingWriteOffset,
Barcode = this.Barcode,
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = commentsSpecialFields,
Contents = this.Contents,
ContentsSpecialFields = contentsSpecialFields,
};
}
}
}

View File

@@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Copy protection section of New Disc form
/// </summary>
public class CopyProtectionSection : ICloneable
{
[JsonProperty(PropertyName = "d_protection_a", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? AntiModchip { get; set; }
[JsonProperty(PropertyName = "d_protection_1", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? LibCrypt { get; set; }
[JsonProperty(PropertyName = "d_libcrypt", NullValueHandling = NullValueHandling.Ignore)]
public string? LibCryptData { get; set; }
[JsonProperty(PropertyName = "d_protection", NullValueHandling = NullValueHandling.Ignore)]
public string? Protection { get; set; }
[JsonIgnore]
public Dictionary<string, List<string>?>? FullProtections { get; set; }
[JsonProperty(PropertyName = "d_securom", NullValueHandling = NullValueHandling.Ignore)]
public string? SecuROMData { get; set; }
public object Clone()
{
Dictionary<string, List<string>?>? fullProtections = null;
if (this.FullProtections is not null)
{
fullProtections = [];
foreach (var kvp in this.FullProtections)
{
fullProtections[kvp.Key] = kvp.Value;
}
}
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = fullProtections,
SecuROMData = this.SecuROMData,
};
}
}
}

View File

@@ -0,0 +1,30 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Dumpers and status section of New Disc form (Moderator only)
/// </summary>
public class DumpersAndStatusSection : ICloneable
{
[JsonProperty(PropertyName = "d_status", NullValueHandling = NullValueHandling.Ignore)]
public DumpStatus Status { get; set; }
[JsonProperty(PropertyName = "d_dumpers", NullValueHandling = NullValueHandling.Ignore)]
public string[]? Dumpers { get; set; }
[JsonProperty(PropertyName = "d_dumpers_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherDumpers { get; set; }
public object Clone()
{
return new DumpersAndStatusSection
{
Status = this.Status,
Dumpers = this.Dumpers?.Clone() as string[],
OtherDumpers = this.OtherDumpers,
};
}
}
}

View File

@@ -0,0 +1,63 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Dumping info section for moderation
/// </summary>
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump -- Only used with MPF
[JsonProperty(PropertyName = "d_frontend_version", DefaultValueHandling = DefaultValueHandling.Include)]
public string? FrontendVersion { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_params", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingParameters { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", DefaultValueHandling = DefaultValueHandling.Include)]
public string? ReportedDiscType { get; set; }
// Name not defined by Redump -- Only used with Redumper
[JsonProperty(PropertyName = "d_errors_c2", NullValueHandling = NullValueHandling.Ignore)]
public string? C2ErrorsCount { get; set; }
public object Clone()
{
return new DumpingInfoSection
{
FrontendVersion = this.FrontendVersion,
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
DumpingParameters = this.DumpingParameters,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,
ReportedDiscType = this.ReportedDiscType,
C2ErrorsCount = this.C2ErrorsCount,
};
}
}
}

View File

@@ -0,0 +1,24 @@
using System;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// EDC section of New Disc form (PSX only)
/// </summary>
public class EDCSection : ICloneable
{
[JsonProperty(PropertyName = "d_edc", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? EDC { get; set; }
public object Clone()
{
return new EDCSection
{
EDC = this.EDC,
};
}
}
}

View File

@@ -0,0 +1,46 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Extras section of New Disc form
/// </summary>
public class ExtrasSection : ICloneable
{
[JsonProperty(PropertyName = "d_pvd", NullValueHandling = NullValueHandling.Ignore)]
public string? PVD { get; set; }
[JsonProperty(PropertyName = "d_d1_key", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscKey { get; set; }
[JsonProperty(PropertyName = "d_d2_key", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscID { get; set; }
[JsonProperty(PropertyName = "d_pic_data", NullValueHandling = NullValueHandling.Ignore)]
public string? PIC { get; set; }
[JsonProperty(PropertyName = "d_header", NullValueHandling = NullValueHandling.Ignore)]
public string? Header { get; set; }
[JsonProperty(PropertyName = "d_bca", NullValueHandling = NullValueHandling.Ignore)]
public string? BCA { get; set; }
[JsonProperty(PropertyName = "d_ssranges", NullValueHandling = NullValueHandling.Ignore)]
public string? SecuritySectorRanges { get; set; }
public object Clone()
{
return new ExtrasSection
{
PVD = this.PVD,
DiscKey = this.DiscKey,
DiscID = this.DiscID,
PIC = this.PIC,
Header = this.Header,
BCA = this.BCA,
SecuritySectorRanges = this.SecuritySectorRanges,
};
}
}
}

View File

@@ -0,0 +1,26 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Parent/Clone relationship section of New Disc form
/// </summary>
public class ParentCloneRelationshipSection : ICloneable
{
[JsonProperty(PropertyName = "d_parent_id", NullValueHandling = NullValueHandling.Ignore)]
public string? ParentID { get; set; }
[JsonProperty(PropertyName = "d_is_regional_parent", NullValueHandling = NullValueHandling.Ignore)]
public bool RegionalParent { get; set; }
public object Clone()
{
return new ParentCloneRelationshipSection
{
ParentID = this.ParentID,
RegionalParent = this.RegionalParent,
};
}
}
}

View File

@@ -0,0 +1,50 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Size &amp; checksums section of New Disc form (DVD/BD/UMD-based)
/// </summary>
public class SizeAndChecksumsSection : ICloneable
{
[JsonProperty(PropertyName = "d_layerbreak", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_2", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak2 { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_3", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak3 { get; set; }
[JsonProperty(PropertyName = "d_pic_identifier", NullValueHandling = NullValueHandling.Ignore)]
public string? PICIdentifier { get; set; }
[JsonProperty(PropertyName = "d_size", NullValueHandling = NullValueHandling.Ignore)]
public long Size { get; set; }
[JsonProperty(PropertyName = "d_crc32", NullValueHandling = NullValueHandling.Ignore)]
public string? CRC32 { get; set; }
[JsonProperty(PropertyName = "d_md5", NullValueHandling = NullValueHandling.Ignore)]
public string? MD5 { get; set; }
[JsonProperty(PropertyName = "d_sha1", NullValueHandling = NullValueHandling.Ignore)]
public string? SHA1 { get; set; }
public object Clone()
{
return new SizeAndChecksumsSection
{
Layerbreak = this.Layerbreak,
Layerbreak2 = this.Layerbreak2,
Layerbreak3 = this.Layerbreak3,
PICIdentifier = this.PICIdentifier,
Size = this.Size,
CRC32 = this.CRC32,
MD5 = this.MD5,
SHA1 = this.SHA1,
};
}
}
}

View File

@@ -0,0 +1,38 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Tracks and write offsets section of New Disc form (CD/GD-based)
/// </summary>
public class TracksAndWriteOffsetsSection : ICloneable
{
[JsonProperty(PropertyName = "d_tracks", NullValueHandling = NullValueHandling.Ignore)]
public string? ClrMameProData { get; set; }
[JsonProperty(PropertyName = "d_cue", NullValueHandling = NullValueHandling.Ignore)]
public string? Cuesheet { get; set; }
[JsonProperty(PropertyName = "d_cue_raw", NullValueHandling = NullValueHandling.Ignore)]
public byte[]? CuesheetRaw { get; set; }
[JsonProperty(PropertyName = "d_offset", NullValueHandling = NullValueHandling.Ignore)]
public int[]? CommonWriteOffsets { get; set; }
[JsonProperty(PropertyName = "d_offset_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherWriteOffsets { get; set; }
public object Clone()
{
return new TracksAndWriteOffsetsSection
{
ClrMameProData = this.ClrMameProData,
Cuesheet = this.Cuesheet,
CuesheetRaw = this.CuesheetRaw?.Clone() as byte[],
CommonWriteOffsets = this.CommonWriteOffsets?.Clone() as int[],
OtherWriteOffsets = this.OtherWriteOffsets,
};
}
}
}

View File

@@ -0,0 +1,34 @@
using System;
using Newtonsoft.Json;
namespace SabreTools.RedumpLib.Data.Sections
{
/// <summary>
/// Version and editions section of New Disc form
/// </summary>
public class VersionAndEditionsSection : ICloneable
{
[JsonProperty(PropertyName = "d_version", NullValueHandling = NullValueHandling.Ignore)]
public string? Version { get; set; }
[JsonProperty(PropertyName = "d_version_datfile", NullValueHandling = NullValueHandling.Ignore)]
public string? VersionDatfile { get; set; }
[JsonProperty(PropertyName = "d_editions", NullValueHandling = NullValueHandling.Ignore)]
public string[]? CommonEditions { get; set; }
[JsonProperty(PropertyName = "d_editions_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherEditions { get; set; }
public object Clone()
{
return new VersionAndEditionsSection
{
Version = this.Version,
VersionDatfile = this.VersionDatfile,
CommonEditions = this.CommonEditions,
OtherEditions = this.OtherEditions,
};
}
}
}

View File

@@ -1,8 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
using SabreTools.RedumpLib.Data.Sections;
namespace SabreTools.RedumpLib.Data
{
@@ -39,40 +38,46 @@ namespace SabreTools.RedumpLib.Data
public DateTime? LastModified { get; set; }
[JsonProperty(PropertyName = "common_disc_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CommonDiscInfoSection? CommonDiscInfo { get; set; } = new CommonDiscInfoSection();
public CommonDiscInfoSection CommonDiscInfo { get; set; } = new CommonDiscInfoSection();
[JsonProperty(PropertyName = "versions_and_editions", DefaultValueHandling = DefaultValueHandling.Ignore)]
public VersionAndEditionsSection? VersionAndEditions { get; set; } = new VersionAndEditionsSection();
public VersionAndEditionsSection VersionAndEditions { get; set; } = new VersionAndEditionsSection();
[JsonProperty(PropertyName = "edc", DefaultValueHandling = DefaultValueHandling.Ignore)]
public EDCSection? EDC { get; set; } = new EDCSection();
public EDCSection EDC { get; set; } = new EDCSection();
[JsonProperty(PropertyName = "parent_clone_relationship", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ParentCloneRelationshipSection? ParentCloneRelationship { get; set; } = new ParentCloneRelationshipSection();
public ParentCloneRelationshipSection ParentCloneRelationship { get; set; } = new ParentCloneRelationshipSection();
[JsonProperty(PropertyName = "extras", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ExtrasSection? Extras { get; set; } = new ExtrasSection();
public ExtrasSection Extras { get; set; } = new ExtrasSection();
[JsonProperty(PropertyName = "copy_protection", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CopyProtectionSection? CopyProtection { get; set; } = new CopyProtectionSection();
public CopyProtectionSection CopyProtection { get; set; } = new CopyProtectionSection();
[JsonProperty(PropertyName = "dumpers_and_status", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpersAndStatusSection? DumpersAndStatus { get; set; } = new DumpersAndStatusSection();
public DumpersAndStatusSection DumpersAndStatus { get; set; } = new DumpersAndStatusSection();
[JsonProperty(PropertyName = "tracks_and_write_offsets", DefaultValueHandling = DefaultValueHandling.Ignore)]
public TracksAndWriteOffsetsSection? TracksAndWriteOffsets { get; set; } = new TracksAndWriteOffsetsSection();
public TracksAndWriteOffsetsSection TracksAndWriteOffsets { get; set; } = new TracksAndWriteOffsetsSection();
[JsonProperty(PropertyName = "size_and_checksums", DefaultValueHandling = DefaultValueHandling.Ignore)]
public SizeAndChecksumsSection? SizeAndChecksums { get; set; } = new SizeAndChecksumsSection();
public SizeAndChecksumsSection SizeAndChecksums { get; set; } = new SizeAndChecksumsSection();
[JsonProperty(PropertyName = "dumping_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpingInfoSection? DumpingInfo { get; set; } = new DumpingInfoSection();
public DumpingInfoSection DumpingInfo { get; set; } = new DumpingInfoSection();
[JsonProperty(PropertyName = "artifacts", DefaultValueHandling = DefaultValueHandling.Ignore)]
public Dictionary<string, string>? Artifacts { get; set; } = [];
public Dictionary<string, string> Artifacts { get; set; } = [];
public object Clone()
{
Dictionary<string, string> artifacts = [];
foreach (var kvp in this.Artifacts)
{
artifacts[kvp.Key] = kvp.Value;
}
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
@@ -80,505 +85,17 @@ namespace SabreTools.RedumpLib.Data
PartiallyMatchedIDs = this.PartiallyMatchedIDs,
Added = this.Added,
LastModified = this.LastModified,
CommonDiscInfo = this.CommonDiscInfo?.Clone() as CommonDiscInfoSection,
VersionAndEditions = this.VersionAndEditions?.Clone() as VersionAndEditionsSection,
EDC = this.EDC?.Clone() as EDCSection,
ParentCloneRelationship = this.ParentCloneRelationship?.Clone() as ParentCloneRelationshipSection,
Extras = this.Extras?.Clone() as ExtrasSection,
CopyProtection = this.CopyProtection?.Clone() as CopyProtectionSection,
DumpersAndStatus = this.DumpersAndStatus?.Clone() as DumpersAndStatusSection,
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Common disc info section of New Disc Form
/// </summary>
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
public string? Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string? ForeignTitleNonLatin { get; set; }
[JsonProperty(PropertyName = "d_number", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscNumberLetter { get; set; }
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[]? Languages { get; set; }
[JsonProperty(PropertyName = "d_languages_selection", NullValueHandling = NullValueHandling.Ignore, DefaultValueHandling = DefaultValueHandling.Ignore)]
[JsonConverter(typeof(LanguageSelectionConverter))]
public LanguageSelection?[]? LanguageSelection { get; set; }
[JsonProperty(PropertyName = "d_serial", NullValueHandling = NullValueHandling.Ignore)]
public string? Serial { get; set; }
[JsonProperty(PropertyName = "d_ring", NullValueHandling = NullValueHandling.Ignore)]
public string? Ring { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
public string? Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
public string? Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
public string? Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
public string? Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer3MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts4", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer3ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_offsets", NullValueHandling = NullValueHandling.Ignore)]
public string RingOffsetsHidden { get { return "1"; } }
[JsonProperty(PropertyName = "d_ring_0_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingZeroId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_density", NullValueHandling = NullValueHandling.Ignore)]
public string? RingZeroDensity { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_value", NullValueHandling = NullValueHandling.Ignore)]
public string? RingWriteOffset { get; set; }
[JsonProperty(PropertyName = "d_ring_count", NullValueHandling = NullValueHandling.Ignore)]
public string RingCount { get { return "1"; } }
[JsonProperty(PropertyName = "d_barcode", NullValueHandling = NullValueHandling.Ignore)]
public string? Barcode { get; set; }
[JsonProperty(PropertyName = "d_date", NullValueHandling = NullValueHandling.Ignore)]
public string? EXEDateBuildDate { get; set; }
[JsonProperty(PropertyName = "d_errors", NullValueHandling = NullValueHandling.Ignore)]
public string? ErrorsCount { get; set; }
[JsonProperty(PropertyName = "d_comments", NullValueHandling = NullValueHandling.Ignore)]
public string? Comments { get; set; }
[JsonIgnore]
public Dictionary<SiteCode, string>? CommentsSpecialFields { get; set; }
[JsonProperty(PropertyName = "d_contents", NullValueHandling = NullValueHandling.Ignore)]
public string? Contents { get; set; }
[JsonIgnore]
public Dictionary<SiteCode, string>? ContentsSpecialFields { get; set; }
public object Clone()
{
return new CommonDiscInfoSection
{
System = this.System,
Media = this.Media,
Title = this.Title,
ForeignTitleNonLatin = this.ForeignTitleNonLatin,
DiscNumberLetter = this.DiscNumberLetter,
DiscTitle = this.DiscTitle,
Category = this.Category,
Region = this.Region,
Languages = this.Languages?.Clone() as Language?[],
LanguageSelection = this.LanguageSelection?.Clone() as LanguageSelection?[],
Serial = this.Serial,
Ring = this.Ring,
RingId = this.RingId,
Layer0MasteringRing = this.Layer0MasteringRing,
Layer0MasteringSID = this.Layer0MasteringSID,
Layer0ToolstampMasteringCode = this.Layer0ToolstampMasteringCode,
Layer0MouldSID = this.Layer0MouldSID,
Layer0AdditionalMould = this.Layer0AdditionalMould,
Layer1MasteringRing = this.Layer1MasteringRing,
Layer1MasteringSID = this.Layer1MasteringSID,
Layer1ToolstampMasteringCode = this.Layer1ToolstampMasteringCode,
Layer1MouldSID = this.Layer1MouldSID,
Layer1AdditionalMould = this.Layer1AdditionalMould,
Layer2MasteringRing = this.Layer2MasteringRing,
Layer2MasteringSID = this.Layer2MasteringSID,
Layer2ToolstampMasteringCode = this.Layer2ToolstampMasteringCode,
Layer3MasteringRing = this.Layer3MasteringRing,
Layer3MasteringSID = this.Layer3MasteringSID,
Layer3ToolstampMasteringCode = this.Layer3ToolstampMasteringCode,
RingZeroId = this.RingZeroId,
RingZeroDensity = this.RingZeroDensity,
RingWriteOffset = this.RingWriteOffset,
Barcode = this.Barcode,
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Version and editions section of New Disc form
/// </summary>
public class VersionAndEditionsSection : ICloneable
{
[JsonProperty(PropertyName = "d_version", NullValueHandling = NullValueHandling.Ignore)]
public string? Version { get; set; }
[JsonProperty(PropertyName = "d_version_datfile", NullValueHandling = NullValueHandling.Ignore)]
public string? VersionDatfile { get; set; }
[JsonProperty(PropertyName = "d_editions", NullValueHandling = NullValueHandling.Ignore)]
public string[]? CommonEditions { get; set; }
[JsonProperty(PropertyName = "d_editions_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherEditions { get; set; }
public object Clone()
{
return new VersionAndEditionsSection
{
Version = this.Version,
VersionDatfile = this.VersionDatfile,
CommonEditions = this.CommonEditions,
OtherEditions = this.OtherEditions,
};
}
}
/// <summary>
/// EDC section of New Disc form (PSX only)
/// </summary>
public class EDCSection : ICloneable
{
[JsonProperty(PropertyName = "d_edc", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? EDC { get; set; }
public object Clone()
{
return new EDCSection
{
EDC = this.EDC,
};
}
}
/// <summary>
/// Parent/Clone relationship section of New Disc form
/// </summary>
public class ParentCloneRelationshipSection : ICloneable
{
[JsonProperty(PropertyName = "d_parent_id", NullValueHandling = NullValueHandling.Ignore)]
public string? ParentID { get; set; }
[JsonProperty(PropertyName = "d_is_regional_parent", NullValueHandling = NullValueHandling.Ignore)]
public bool RegionalParent { get; set; }
public object Clone()
{
return new ParentCloneRelationshipSection
{
ParentID = this.ParentID,
RegionalParent = this.RegionalParent,
};
}
}
/// <summary>
/// Extras section of New Disc form
/// </summary>
public class ExtrasSection : ICloneable
{
[JsonProperty(PropertyName = "d_pvd", NullValueHandling = NullValueHandling.Ignore)]
public string? PVD { get; set; }
[JsonProperty(PropertyName = "d_d1_key", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscKey { get; set; }
[JsonProperty(PropertyName = "d_d2_key", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscID { get; set; }
[JsonProperty(PropertyName = "d_pic_data", NullValueHandling = NullValueHandling.Ignore)]
public string? PIC { get; set; }
[JsonProperty(PropertyName = "d_header", NullValueHandling = NullValueHandling.Ignore)]
public string? Header { get; set; }
[JsonProperty(PropertyName = "d_bca", NullValueHandling = NullValueHandling.Ignore)]
public string? BCA { get; set; }
[JsonProperty(PropertyName = "d_ssranges", NullValueHandling = NullValueHandling.Ignore)]
public string? SecuritySectorRanges { get; set; }
public object Clone()
{
return new ExtrasSection
{
PVD = this.PVD,
DiscKey = this.DiscKey,
DiscID = this.DiscID,
PIC = this.PIC,
Header = this.Header,
BCA = this.BCA,
SecuritySectorRanges = this.SecuritySectorRanges,
};
}
}
/// <summary>
/// Copy protection section of New Disc form
/// </summary>
public class CopyProtectionSection : ICloneable
{
[JsonProperty(PropertyName = "d_protection_a", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? AntiModchip { get; set; }
[JsonProperty(PropertyName = "d_protection_1", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? LibCrypt { get; set; }
[JsonProperty(PropertyName = "d_libcrypt", NullValueHandling = NullValueHandling.Ignore)]
public string? LibCryptData { get; set; }
[JsonProperty(PropertyName = "d_protection", NullValueHandling = NullValueHandling.Ignore)]
public string? Protection { get; set; }
[JsonIgnore]
public Dictionary<string, List<string>?>? FullProtections { get; set; }
[JsonProperty(PropertyName = "d_securom", NullValueHandling = NullValueHandling.Ignore)]
public string? SecuROMData { get; set; }
public object Clone()
{
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
SecuROMData = this.SecuROMData,
};
}
}
/// <summary>
/// Dumpers and status section of New Disc form (Moderator only)
/// </summary>
public class DumpersAndStatusSection : ICloneable
{
[JsonProperty(PropertyName = "d_status", NullValueHandling = NullValueHandling.Ignore)]
public DumpStatus Status { get; set; }
[JsonProperty(PropertyName = "d_dumpers", NullValueHandling = NullValueHandling.Ignore)]
public string[]? Dumpers { get; set; }
[JsonProperty(PropertyName = "d_dumpers_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherDumpers { get; set; }
public object Clone()
{
return new DumpersAndStatusSection
{
Status = this.Status,
Dumpers = this.Dumpers?.Clone() as string[],
OtherDumpers = this.OtherDumpers,
};
}
}
/// <summary>
/// Tracks and write offsets section of New Disc form (CD/GD-based)
/// </summary>
public class TracksAndWriteOffsetsSection : ICloneable
{
[JsonProperty(PropertyName = "d_tracks", NullValueHandling = NullValueHandling.Ignore)]
public string? ClrMameProData { get; set; }
[JsonProperty(PropertyName = "d_cue", NullValueHandling = NullValueHandling.Ignore)]
public string? Cuesheet { get; set; }
[JsonProperty(PropertyName = "d_offset", NullValueHandling = NullValueHandling.Ignore)]
public int[]? CommonWriteOffsets { get; set; }
[JsonProperty(PropertyName = "d_offset_text", NullValueHandling = NullValueHandling.Ignore)]
public string? OtherWriteOffsets { get; set; }
public object Clone()
{
return new TracksAndWriteOffsetsSection
{
ClrMameProData = this.ClrMameProData,
Cuesheet = this.Cuesheet,
CommonWriteOffsets = this.CommonWriteOffsets?.Clone() as int[],
OtherWriteOffsets = this.OtherWriteOffsets,
};
}
}
/// <summary>
/// Size &amp; checksums section of New Disc form (DVD/BD/UMD-based)
/// </summary>
public class SizeAndChecksumsSection : ICloneable
{
[JsonProperty(PropertyName = "d_layerbreak", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_2", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak2 { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_3", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak3 { get; set; }
[JsonProperty(PropertyName = "d_pic_identifier", NullValueHandling = NullValueHandling.Ignore)]
public string? PICIdentifier { get; set; }
[JsonProperty(PropertyName = "d_size", NullValueHandling = NullValueHandling.Ignore)]
public long Size { get; set; }
[JsonProperty(PropertyName = "d_crc32", NullValueHandling = NullValueHandling.Ignore)]
public string? CRC32 { get; set; }
[JsonProperty(PropertyName = "d_md5", NullValueHandling = NullValueHandling.Ignore)]
public string? MD5 { get; set; }
[JsonProperty(PropertyName = "d_sha1", NullValueHandling = NullValueHandling.Ignore)]
public string? SHA1 { get; set; }
public object Clone()
{
return new SizeAndChecksumsSection
{
Layerbreak = this.Layerbreak,
Layerbreak2 = this.Layerbreak2,
Layerbreak3 = this.Layerbreak3,
PICIdentifier = this.PICIdentifier,
Size = this.Size,
CRC32 = this.CRC32,
MD5 = this.MD5,
SHA1 = this.SHA1,
};
}
}
/// <summary>
/// Dumping info section for moderation
/// </summary>
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump -- Only used with MPF
[JsonProperty(PropertyName = "d_frontend_version", Required = Required.AllowNull)]
public string? FrontendVersion { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
public string? DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
public string? DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_params", Required = Required.AllowNull)]
public string? DumpingParameters { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
public string? Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
public string? Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
public string? Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
public string? ReportedDiscType { get; set; }
// Name not defined by Redump -- Only used with Redumper
[JsonProperty(PropertyName = "d_errors_c2", NullValueHandling = NullValueHandling.Ignore)]
public string? C2ErrorsCount { get; set; }
public object Clone()
{
return new DumpingInfoSection
{
FrontendVersion = this.FrontendVersion,
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
DumpingParameters = this.DumpingParameters,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,
ReportedDiscType = this.ReportedDiscType,
C2ErrorsCount = this.C2ErrorsCount,
CommonDiscInfo = this.CommonDiscInfo?.Clone() as CommonDiscInfoSection ?? new CommonDiscInfoSection(),
VersionAndEditions = this.VersionAndEditions?.Clone() as VersionAndEditionsSection ?? new VersionAndEditionsSection(),
EDC = this.EDC?.Clone() as EDCSection ?? new EDCSection(),
ParentCloneRelationship = this.ParentCloneRelationship?.Clone() as ParentCloneRelationshipSection ?? new ParentCloneRelationshipSection(),
Extras = this.Extras?.Clone() as ExtrasSection ?? new ExtrasSection(),
CopyProtection = this.CopyProtection?.Clone() as CopyProtectionSection ?? new CopyProtectionSection(),
DumpersAndStatus = this.DumpersAndStatus?.Clone() as DumpersAndStatusSection ?? new DumpersAndStatusSection(),
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection ?? new TracksAndWriteOffsetsSection(),
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection ?? new SizeAndChecksumsSection(),
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection ?? new DumpingInfoSection(),
Artifacts = artifacts,
};
}
}

View File

@@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -51,6 +52,11 @@ namespace SabreTools.RedumpLib
/// </summary>
public bool OnlyList { get; set; }
/// <summary>
/// Don't replace forward slashes with `-` in queries
/// </summary>
public bool NoSlash { get; set; }
/// <summary>
/// Force continuing downloads until user cancels or pages run out
/// </summary>
@@ -97,49 +103,86 @@ namespace SabreTools.RedumpLib
/// <summary>
/// Run the downloads that should go
/// </summary>
/// <returns>True if there was a valid download type, false otherwise</returns>
public async Task<bool> Download()
/// <returns>List of IDs that were processed on success, empty on error</returns>
/// <remarks>Packs will never return anything other than empty</remarks>
public async Task<List<int>> Download()
{
// Login to Redump, if possible
if (!_client.LoggedIn)
await _client.Login(Username ?? string.Empty, Password ?? string.Empty);
// Create output list
List<int> processedIds = [];
switch (Feature)
{
case Feature.Site:
if (OnlyNew)
await Discs.DownloadLastModified(_client, OutDir, Force);
else
await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
break;
case Feature.WIP:
if (OnlyNew)
await WIP.DownloadLastSubmitted(_client, OutDir);
else
await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
break;
case Feature.Packs:
await Packs.DownloadPacks(_client, OutDir, UseSubfolders);
break;
case Feature.User:
if (OnlyList)
await User.ListUser(_client, Username);
else if (OnlyNew)
await User.DownloadUserLastModified(_client, Username, OutDir);
else
await User.DownloadUser(_client, Username, OutDir);
break;
case Feature.Quicksearch:
if (OnlyList)
await Search.ListSearchResults(_client, QueryString);
else
await Search.DownloadSearchResults(_client, QueryString, OutDir);
processedIds = await ProcessQuicksearch();
break;
case Feature.Site:
processedIds = await ProcessSite();
break;
case Feature.User:
processedIds = await ProcessUser();
break;
case Feature.WIP:
processedIds = await ProcessWIP();
break;
case Feature.NONE:
default:
return false;
return [];
}
return true;
return processedIds;
}
/// <summary>
/// Process the Quicksearch feature
/// </summary>
private async Task<List<int>> ProcessQuicksearch()
{
if (OnlyList)
return await Search.ListSearchResults(_client, QueryString, NoSlash);
else
return await Search.DownloadSearchResults(_client, QueryString, OutDir, NoSlash);
}
/// <summary>
/// Process the Site feature
/// </summary>
private async Task<List<int>> ProcessSite()
{
if (OnlyNew)
return await Discs.DownloadLastModified(_client, OutDir, Force);
else
return await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
}
/// <summary>
/// Process the User feature
/// </summary>
private async Task<List<int>> ProcessUser()
{
if (OnlyList)
return await User.ListUser(_client, Username);
else if (OnlyNew)
return await User.DownloadUserLastModified(_client, Username, OutDir);
else
return await User.DownloadUser(_client, Username, OutDir);
}
/// <summary>
/// Process the WIP feature
/// </summary>
private async Task<List<int>> ProcessWIP()
{
if (OnlyNew)
return await WIP.DownloadLastSubmitted(_client, OutDir);
else
return await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
}
}
}

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
namespace SabreTools.RedumpLib
@@ -40,8 +39,13 @@ namespace SabreTools.RedumpLib
new StringBuilder(256) :
new StringBuilder(value.Length);
sb.Append(valueSpan.Take(index).ToArray());
HtmlDecode(valueSpan.Skip(index).ToArray(), ref sb);
char[] take = new char[index];
Array.Copy(valueSpan, take, index);
sb.Append(take);
char[] skip = new char[valueSpan.Length - index];
Array.Copy(valueSpan, index, skip, 0, skip.Length);
HtmlDecode(skip, ref sb);
return sb.ToString();
}
@@ -57,7 +61,8 @@ namespace SabreTools.RedumpLib
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
char[] inputSlice = input.Skip(i + 1).ToArray();
char[] inputSlice = new char[input.Length - (i + 1)];
Array.Copy(input, i + 1, inputSlice, 0, inputSlice.Length);
int semicolonPos = Array.IndexOf(inputSlice, ';');
int ampersandPos = Array.IndexOf(inputSlice, '&');
@@ -81,9 +86,13 @@ namespace SabreTools.RedumpLib
// &#xE5; --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
int offset = inputSlice[1] == 'x' || inputSlice[1] == 'X' ? 2 : 1;
char[] inputSliceNoPrefix = new char[entityLength - offset];
Array.Copy(inputSlice, offset, inputSliceNoPrefix, 0, inputSliceNoPrefix.Length);
bool parsedSuccessfully = inputSlice[1] == 'x' || inputSlice[1] == 'X'
? uint.TryParse(new string(inputSlice.Skip(2).Take(entityLength - 2).ToArray()), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSlice.Skip(1).Take(entityLength - 1).ToArray()), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
? uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
if (parsedSuccessfully)
{
@@ -112,7 +121,8 @@ namespace SabreTools.RedumpLib
}
else
{
char[] entity = inputSlice.Take(entityLength).ToArray();
char[] entity = new char[entityLength];
Array.Copy(inputSlice, entity, entityLength);
i = entityEndPosition; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
@@ -414,7 +424,10 @@ namespace SabreTools.RedumpLib
ulong key = BitConverter.ToUInt64(tableData, 0);
char value = (char)BitConverter.ToUInt16(tableData, sizeof(ulong));
dictionary[key] = value;
tableData = tableData.Skip((sizeof(ulong) + sizeof(char))).ToArray();
byte[] tempTableData = new byte[tableData.Length - (sizeof(ulong) + sizeof(char))];
Array.Copy(tableData, (sizeof(ulong) + sizeof(char)), tempTableData, 0, tempTableData.Length);
tableData = tempTableData;
}
return dictionary;
}

View File

@@ -1,46 +1,39 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.4.0</Version>
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0;net10.0;netstandard2.0;netstandard2.1</TargetFrameworks>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.9.1</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="SabreTools.RedumpLib.Test" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<PackageReference Include="MinValueTupleBridge" Version="0.2.1" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`))">
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MinAsyncBridge" Version="0.12.4" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))" />
<PackageReference Include="Net35.Actions" Version="1.4.0" Condition="$(TargetFramework.StartsWith(`net2`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
@@ -17,11 +16,20 @@ namespace SabreTools.RedumpLib
public static void NormalizeDiscType(SubmissionInfo info)
{
// If we have nothing valid, do nothing
if (info?.CommonDiscInfo?.Media == null || info?.SizeAndChecksums == null)
if (info.CommonDiscInfo.Media is null || info.SizeAndChecksums == default)
return;
#pragma warning disable IDE0010
switch (info.CommonDiscInfo.Media)
{
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.BD25:
case DiscType.BD33:
case DiscType.BD50:
@@ -32,13 +40,13 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == "BDU")
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.PICIdentifier == "BDU")
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
@@ -46,14 +54,6 @@ namespace SabreTools.RedumpLib
info.CommonDiscInfo.Media = DiscType.BD25;
break;
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.HDDVDSL:
case DiscType.HDDVDDL:
if (info.SizeAndChecksums.Layerbreak != default)
@@ -74,6 +74,7 @@ namespace SabreTools.RedumpLib
default:
break;
}
#pragma warning restore IDE0010
}
/// <summary>
@@ -83,7 +84,7 @@ namespace SabreTools.RedumpLib
/// <param name="query">Query string to attempt to search for</param>
/// <param name="filterForwardSlashes">True to filter forward slashes, false otherwise</param>
/// <returns>All disc IDs for the given query, null on error</returns>
public async static Task<List<int>?> ListSearchResults(RedumpClient rc, string? query, bool filterForwardSlashes = true)
public static async Task<List<int>?> ListSearchResults(RedumpClient rc, string? query, bool filterForwardSlashes = true)
{
// If there is an invalid query
if (string.IsNullOrEmpty(query))
@@ -109,7 +110,10 @@ namespace SabreTools.RedumpLib
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
List<int>? pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
if (pageIds is null)
return null;
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
@@ -130,27 +134,27 @@ namespace SabreTools.RedumpLib
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="sha1">SHA-1 hash to check against</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
/// <returns>List of found values, if possible</returns>
public static async Task<List<int>?> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
{
// Get all matching IDs for the track
var newIds = await ListSearchResults(rc, sha1);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
if (newIds is null)
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for track with SHA-1 of '{sha1}'");
// If no IDs match, return an empty list
if (newIds.Count == 0)
return [];
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs is not null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for track with SHA-1 of '{sha1}'");
return newIds;
}
/// <summary>
@@ -158,40 +162,43 @@ namespace SabreTools.RedumpLib
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
public async static Task<(bool, List<int>?, string?)> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
/// <returns>List of found values, if possible</returns>
public static async Task<List<int>?> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
{
// If we don't have special fields
if (info.CommonDiscInfo?.CommentsSpecialFields == null)
return (false, null, "Universal hash was missing");
if (info.CommonDiscInfo.CommentsSpecialFields is null)
return null;
// If we don't have a universal hash
string? universalHash = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
if (string.IsNullOrEmpty(universalHash))
return (false, null, "Universal hash was missing");
return null;
// Format the universal hash for finding within the comments
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
string universalHashQuery = $"{universalHash[..^1]}/comments/only";
#else
string universalHashQuery = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
#endif
// Get all matching IDs for the hash
var newIds = await ListSearchResults(rc, universalHashQuery, filterForwardSlashes: false);
// If we got null back, there was an error
if (newIds == null)
return (false, null, "There was an unknown error retrieving information from Redump");
if (newIds is null)
return null;
// If no IDs match, just return
if (!newIds.Any())
return (false, null, $"There were no matching IDs for universal hash of '{universalHash}'");
// If no IDs match, just an empty list
if (newIds.Count == 0)
return [];
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Any())
if (info.PartiallyMatchedIDs is not null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return (true, newIds, $"There were matching ID(s) found for universal hash of '{universalHash}'");
return newIds;
}
/// <summary>
@@ -201,7 +208,7 @@ namespace SabreTools.RedumpLib
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="localCount">Local count of tracks for the current disc</param>
/// <returns>True if the track count matches, false otherwise</returns>
public async static Task<bool> ValidateTrackCount(RedumpClient rc, int id, int localCount)
public static async Task<bool> ValidateTrackCount(RedumpClient rc, int id, int localCount)
{
// If we can't pull the remote data, we can't match
string? discData = await rc.DownloadSingleSiteID(id);

View File

@@ -1,11 +1,18 @@
using System;
using System.Net;
#pragma warning disable SYSLIB0014 // 'WebClient.WebClient()' is obsolete
namespace SabreTools.RedumpLib.Web
{
#if NET6_0_OR_GREATER
#pragma warning disable SYSLIB0014
#endif
internal class CookieWebClient : WebClient
{
/// <summary>
/// The timespan to wait before the request times out.
/// </summary>
public TimeSpan Timeout { get; set; }
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
private readonly CookieContainer _container = new();
@@ -15,7 +22,7 @@ namespace SabreTools.RedumpLib.Web
public string? GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
if (ResponseHeaders is null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
@@ -24,7 +31,11 @@ namespace SabreTools.RedumpLib.Web
return null;
// Extract the filename from the value
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
return headerValue[(headerValue.IndexOf("filename=") + 9)..].Replace("\"", "");
#else
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
#endif
}
/// <inheritdoc/>
@@ -32,9 +43,15 @@ namespace SabreTools.RedumpLib.Web
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
{
webRequest.Timeout = (int)Timeout.TotalMilliseconds;
webRequest.CookieContainer = _container;
}
return request;
}
}
}
#if NET6_0_OR_GREATER
#pragma warning restore SYSLIB0014
#endif
}

View File

@@ -18,4 +18,4 @@ namespace SabreTools.RedumpLib.Web
Thread.Sleep(delay * 100);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -15,17 +16,22 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="force">Force continuation of download</param>
public static async Task<bool> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
{
List<int> ids = [];
// Keep getting last modified pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
@@ -35,21 +41,25 @@ namespace SabreTools.RedumpLib.Web
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
public static async Task<bool> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn)
{
Console.WriteLine("Site download functionality is only available to Redump members");
return false;
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleSiteID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return true;
return ids;
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -18,19 +17,15 @@ namespace SabreTools.RedumpLib.Web
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacks(RedumpClient rc, string? outDir, bool useSubfolders)
{
#if NETFRAMEWORK || NETCOREAPP3_1
var systems = Enum.GetValues(typeof(RedumpSystem)).OfType<RedumpSystem>().Select(s => new Nullable<RedumpSystem>(s));
#else
var systems = Enum.GetValues<RedumpSystem>().Select(s => new RedumpSystem?(s));
#endif
var systems = (RedumpSystem[])Enum.GetValues(typeof(RedumpSystem));
await rc.DownloadPacks(Constants.PackCuesUrl, systems.Where(s => s.HasCues()).ToArray(), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, systems.Where(s => s.HasDat()).ToArray(), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, systems.Where(s => s.HasDkeys()).ToArray(), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, systems.Where(s => s.HasGdi()).ToArray(), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, systems.Where(s => s.HasKeys()).ToArray(), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, systems.Where(s => s.HasLsd()).ToArray(), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, systems.Where(s => s.HasSbi()).ToArray(), "SBIs", outDir, useSubfolders ? "sbi" : null);
await rc.DownloadPacks(Constants.PackCuesUrl, Array.FindAll(systems, s => s.HasCues()), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, Array.FindAll(systems, s => s.HasDat()), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, Array.FindAll(systems, s => s.HasDkeys()), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, Array.FindAll(systems, s => s.HasGdi()), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, Array.FindAll(systems, s => s.HasKeys()), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, Array.FindAll(systems, s => s.HasLsd()), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, Array.FindAll(systems, s => s.HasSbi()), "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
@@ -44,7 +39,10 @@ namespace SabreTools.RedumpLib.Web
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacksForSystem(RedumpClient rc, RedumpSystem? system, string? outDir, bool useSubfolders)
{
var systemAsArray = new RedumpSystem?[] { system };
if (system is null)
return false;
var systemAsArray = new RedumpSystem[] { system.Value };
if (system.HasCues())
await rc.DownloadPacks(Constants.PackCuesUrl, systemAsArray, "CUEs", outDir, useSubfolders ? "cue" : null);
@@ -70,4 +68,4 @@ namespace SabreTools.RedumpLib.Web
return true;
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
#if NETCOREAPP
using System.Net.Http;
@@ -32,15 +31,20 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Maximum retry count for any operation
/// </summary>
public int RetryCount { get; private set; } = 3;
public int RetryCount { get; }
/// <summary>
/// Maximum number of seconds for a retry
/// </summary>
public int TimeoutSeconds { get; }
/// <summary>
/// Internal client for interaction
/// </summary>
#if NETFRAMEWORK
private CookieWebClient _internalClient;
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
private readonly CookieWebClient _internalClient;
#else
private HttpClient _internalClient;
private readonly HttpClient _internalClient;
#endif
#endregion
@@ -48,25 +52,24 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Constructor
/// </summary>
public RedumpClient()
{
#if NETFRAMEWORK
_internalClient = new CookieWebClient();
#else
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true });
#endif
}
/// <summary>
/// Constructor
/// </summary>
public RedumpClient(int retryCount) : this()
public RedumpClient(int retryCount = 3, int timeoutSeconds = 30)
{
// Ensure there are a positive number of retries
if (retryCount <= 0)
retryCount = 3;
// Ensure a positive timespan
if (timeoutSeconds <= 0)
timeoutSeconds = 30;
RetryCount = retryCount;
TimeoutSeconds = timeoutSeconds;
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
_internalClient = new CookieWebClient() { Timeout = TimeSpan.FromSeconds(TimeoutSeconds) };
#else
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true }) { Timeout = TimeSpan.FromSeconds(TimeoutSeconds) };
#endif
}
#region Credentials
@@ -74,22 +77,22 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string?)> ValidateCredentials(string username, string password)
public static async Task<bool?> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return (false, null);
return false;
// Try logging in with the supplied credentials otherwise
var redumpClient = new RedumpClient();
bool? loggedIn = await redumpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
return true;
else if (loggedIn == false)
return (false, "Redump username and password denied!");
return false;
else
return (null, "An error occurred validating your credentials!");
return null;
}
/// <summary>
@@ -129,10 +132,10 @@ namespace SabreTools.RedumpLib.Web
try
{
// Get the current token from the login page
var loginPage = await DownloadStringWithRetries(Constants.LoginUrl);
var loginPage = await DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage ?? string.Empty).Groups[1].Value;
#if NETFRAMEWORK
#if NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
// Construct the login request
_internalClient.Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
_internalClient.Encoding = Encoding.UTF8;
@@ -147,7 +150,7 @@ namespace SabreTools.RedumpLib.Web
// Send the login request and get the result
var response = await _internalClient.PostAsync(Constants.LoginUrl, postContent);
string? responseContent = null;
if (response?.Content != null)
if (response?.Content is not null)
responseContent = await response.Content.ReadAsStringAsync();
#endif
@@ -187,6 +190,108 @@ namespace SabreTools.RedumpLib.Web
#endregion
#region Generic Helpers
/// <summary>
/// Download from a URI to a byte array
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>Byte array from the URI, null on error</returns>
public async Task<byte[]?> DownloadData(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(uri));
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadData(uri));
#else
return await _internalClient.GetByteArrayAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
public async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers is null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
public async Task<string?> DownloadString(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
#endregion
#region Single Page Helpers
/// <summary>
@@ -194,15 +299,19 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url)
public async Task<List<int>?> CheckSingleSitePage(string url)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If the web client failed, return null
if (dumpsPage is null)
return null;
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
if (dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
@@ -217,9 +326,9 @@ namespace SabreTools.RedumpLib.Web
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
if (match is null)
continue;
try
@@ -243,15 +352,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
if (dumpsPage is null || dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
@@ -261,17 +372,19 @@ namespace SabreTools.RedumpLib.Web
{
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
ids.Add(id);
}
return false;
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
if (match is null)
continue;
try
@@ -280,7 +393,9 @@ namespace SabreTools.RedumpLib.Web
{
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
ids.Add(value);
}
}
catch (Exception ex)
@@ -290,7 +405,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
/// <summary>
@@ -303,17 +418,17 @@ namespace SabreTools.RedumpLib.Web
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
if (dumpsPage is null || dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
if (match is null)
continue;
try
@@ -337,30 +452,33 @@ namespace SabreTools.RedumpLib.Web
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadStringWithRetries(url);
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
if (dumpsPage is null || dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match? match in matches.Cast<Match?>())
foreach (Match? match in matches)
{
if (match == null)
if (match is null)
continue;
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -370,7 +488,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
#endregion
@@ -389,7 +507,7 @@ namespace SabreTools.RedumpLib.Web
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#elif NETFRAMEWORK
#elif NETFRAMEWORK || NETSTANDARD2_0_OR_GREATER
return await Task.Run(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#else
return await _internalClient.GetByteArrayAsync(string.Format(url, system.ShortName()));
@@ -445,9 +563,9 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
if (discPage is null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
@@ -483,9 +601,9 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
if (discPage is null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
@@ -607,9 +725,9 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
if (discPage is null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
@@ -645,9 +763,9 @@ namespace SabreTools.RedumpLib.Web
{
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadStringWithRetries(discPageUri);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
if (discPage is null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
@@ -719,7 +837,7 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
@@ -727,7 +845,7 @@ namespace SabreTools.RedumpLib.Web
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -741,8 +859,8 @@ namespace SabreTools.RedumpLib.Web
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
if (pack is not null)
packsDictionary.Add(system, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
@@ -753,19 +871,19 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Download a set of packs
/// </summary>
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="systems">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
public async Task<bool> DownloadPacks(string url, RedumpSystem[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -786,72 +904,6 @@ namespace SabreTools.RedumpLib.Web
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string?> DownloadStringWithRetries(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
@@ -886,4 +938,4 @@ namespace SabreTools.RedumpLib.Web
#endregion
}
}
}

View File

@@ -15,12 +15,13 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <returns>All disc IDs for the given query, null on error</returns>
public static async Task<List<int>?> ListSearchResults(RedumpClient rc, string? query)
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> ListSearchResults(RedumpClient rc, string? query, bool noSlash)
{
// If the query is invalid
if (string.IsNullOrEmpty(query))
return null;
return [];
List<int> ids = [];
@@ -29,8 +30,9 @@ namespace SabreTools.RedumpLib.Web
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('/', '-');
query = query.Replace('\\', '/');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
@@ -41,7 +43,10 @@ namespace SabreTools.RedumpLib.Web
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
if (pageIds is null)
return [];
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
@@ -50,7 +55,7 @@ namespace SabreTools.RedumpLib.Web
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return null;
return [];
}
return ids;
@@ -62,19 +67,24 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadSearchResults(RedumpClient rc, string? query, string? outDir)
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> DownloadSearchResults(RedumpClient rc, string? query, string? outDir, bool noSlash)
{
List<int> ids = [];
// If the query is invalid
if (string.IsNullOrEmpty(query))
return false;
return ids;
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('/', '-');
query = query.Replace('\\', '/');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
@@ -83,11 +93,13 @@ namespace SabreTools.RedumpLib.Web
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
}
}
}

View File

@@ -16,23 +16,28 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadUser(RedumpClient rc, string? username, string? outDir)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUser(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return false;
return ids;
}
// Keep getting user pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
@@ -41,32 +46,37 @@ namespace SabreTools.RedumpLib.Web
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return false;
return ids;
}
// Keep getting last modified user pages until there are none left
int pageNumber = 1;
while (true)
{
if (!await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true))
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return true;
return ids;
}
/// <summary>
/// List the disc IDs associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <returns>All disc IDs for the given user, null on error</returns>
public static async Task<List<int>?> ListUser(RedumpClient rc, string? username)
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> ListUser(RedumpClient rc, string? username)
{
List<int> ids = [];
@@ -82,7 +92,10 @@ namespace SabreTools.RedumpLib.Web
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
if (pageIds is null)
return [];
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
@@ -91,10 +104,10 @@ namespace SabreTools.RedumpLib.Web
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return null;
return [];
}
return ids;
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
@@ -14,7 +15,8 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
public static async Task<bool> DownloadLastSubmitted(RedumpClient rc, string? outDir)
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadLastSubmitted(RedumpClient rc, string? outDir)
{
return await rc.CheckSingleWIPPage(Constants.WipDumpsUrl, outDir, false);
}
@@ -26,21 +28,25 @@ namespace SabreTools.RedumpLib.Web
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
public static async Task<bool> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn || !rc.IsStaff)
{
Console.WriteLine("WIP download functionality is only available to Redump moderators");
return false;
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleWIPID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return true;
return ids;
}
}
}
}

151
publish-nix.sh Executable file
View File

@@ -0,0 +1,151 @@
#! /bin/bash
# This batch file assumes the following:
# - .NET 10.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
a)
NO_ARCHIVE=true
;;
*)
echo "Invalid option provided"
exit 1
;;
esac
done
# Set the current directory as a variable
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net10.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0" "net10.0")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0" "net9.0" "net10.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0" "net9.0" "net10.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0" "net10.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib/SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
done
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]; then
# Create Tool archives
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/RedumpTool/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/RedumpTool/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip .
done
done
# Reset the directory
cd $BUILD_FOLDER
fi

135
publish-win.ps1 Normal file
View File

@@ -0,0 +1,135 @@
# This batch file assumes the following:
# - .NET 10.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
param(
[Parameter(Mandatory = $false)]
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
[Parameter(Mandatory = $false)]
[Alias("NoArchive")]
[switch]$NO_ARCHIVE
)
# Set the current directory as a variable
$BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net10.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0', 'net10.0')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0', 'net10.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0', 'net9.0', 'net10.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0', 'net10.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib\SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
}
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent) {
# Create Tool archives
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip *
}
}
# Reset the directory
Set-Location -Path $PSScriptRoot
}