Compare commits

..

118 Commits
3.2.1 ... 3.2.3

Author SHA1 Message Date
Matt Nadareski
8a048c8a57 Bump version 2024-11-06 15:36:56 -05:00
Matt Nadareski
d51db072fc Force rebuild of rolling 2024-11-05 15:33:45 -05:00
Matt Nadareski
468c9937da Reduce null use in BaseProcessor 2024-11-05 15:30:54 -05:00
Matt Nadareski
206ac76633 Fix missed GetOutputFiles invocation 2024-11-05 15:23:03 -05:00
Matt Nadareski
339b0d93d1 Ensure debug symbols are stripped 2024-11-05 14:13:18 -05:00
Matt Nadareski
fd5da5de74 Add conf to build matrix 2024-11-05 13:53:05 -05:00
Matt Nadareski
42146f991d Attempt to reduce nesting in GHA builds 2024-11-05 13:46:44 -05:00
Matt Nadareski
f3f05eee48 Attempt to reduce nesting in GHA builds 2024-11-05 13:45:40 -05:00
Matt Nadareski
61bf2f69aa Attempt to reduce nesting in GHA builds 2024-11-05 13:40:05 -05:00
Matt Nadareski
546aa70b69 Update BinaryObjectScanner to 3.1.16 2024-11-05 13:17:49 -05:00
Matt Nadareski
65cd1cede3 Remove last instances of ValueTuple usage 2024-11-03 23:14:22 -05:00
Matt Nadareski
dffa16139a Remove tupling from everything except CLI programs 2024-11-03 23:09:54 -05:00
Matt Nadareski
98bacf11fc Reduce tupling even more 2024-11-03 22:51:24 -05:00
Matt Nadareski
ee0ccecfb2 Replace user info func with ProcessUserInfoDelegate 2024-11-03 22:29:26 -05:00
Matt Nadareski
a0825f276b Use new ProtectionDictionary type 2024-11-03 22:15:42 -05:00
Matt Nadareski
66d61e20de Reduce call complexity for login result 2024-11-03 21:59:22 -05:00
Matt Nadareski
6455ebd434 Simplify GetMediaType return 2024-11-03 21:55:04 -05:00
Matt Nadareski
f4436fddfa Remove unused CompilerServices.Unsafe library 2024-11-03 21:06:14 -05:00
Matt Nadareski
317777cf93 Remove old .NET version of ValueTuple where possible 2024-11-03 20:40:36 -05:00
Matt Nadareski
da6dbe136e Remove unnecessary System.ValueTuple usage 2024-11-03 20:38:35 -05:00
Matt Nadareski
13d7cf8124 Remove unused IndexRange library 2024-11-03 20:16:48 -05:00
Matt Nadareski
bb8fea625b Update BinaryObjectScanner to 3.1.15 2024-11-03 20:13:38 -05:00
Matt Nadareski
15a0659867 Use rolling release, not AppVeyor, in issue templates 2024-10-22 12:39:51 -04:00
Matt Nadareski
988a5f6d2b Update RedumpLib to 1.4.4 2024-10-18 13:04:53 -04:00
Matt Nadareski
1941639473 No directory means no files 2024-10-16 12:04:39 -04:00
Matt Nadareski
d1772f743e Reduce cleverness in output file code 2024-10-16 11:45:06 -04:00
Matt Nadareski
870c2d1cec Add separate field for Regex; assorted cleanup 2024-10-16 02:48:39 -04:00
Matt Nadareski
f7f6ae1eee Ensure Regex directories are unescaped 2024-10-16 02:31:11 -04:00
Matt Nadareski
dd9e527592 Use new output file logic in processors 2024-10-16 01:57:31 -04:00
Matt Nadareski
e1122fa976 Ensure consistency in output file path checking (fixes #755) 2024-10-16 01:56:48 -04:00
Matt Nadareski
8a44fa3355 Ensure that the full base path is being used 2024-10-14 21:47:05 -04:00
Matt Nadareski
51a9e3005f Use fake filename for Redumper DAT 2024-10-14 21:23:16 -04:00
Matt Nadareski
413b6da24b Fix trimming of header output (fixes #754) 2024-10-13 11:00:07 -04:00
Matt Nadareski
172a0fb5dc Update Redumper to build 416 2024-10-12 20:13:27 -04:00
Matt Nadareski
83a189a5d3 Format CleanRip BCA wtih 2-byte blocks (fixes #743) 2024-10-10 12:02:22 -04:00
Matt Nadareski
60c27ec89b Sum track errors in Redumper (fixes #745) 2024-10-10 11:19:27 -04:00
Matt Nadareski
51733557cd Remove ReadLine in list commands 2024-10-10 11:11:54 -04:00
Matt Nadareski
93d964c603 Make .NET 8 the default in issue reports 2024-10-09 12:37:21 -04:00
Matt Nadareski
2925f2262b Change multiple offset delimiter 2024-10-08 22:04:03 -04:00
Matt Nadareski
5b211a7345 Fix faulty offset dedupe logic 2024-10-08 21:53:06 -04:00
Matt Nadareski
ed4bd24fcb Include all DIC write offsets (fixes #750) 2024-10-08 15:47:24 -04:00
Matt Nadareski
8a7761753b Update to DIC 20241001 2024-10-01 22:44:41 -04:00
Matt Nadareski
1b8cca9999 Update changelog 2024-10-01 10:55:33 -04:00
TheRogueArchivist
b75391b1c6 Fix SafeDisc filtering (#749)
* Start updating filtering for SafeDisc

This will need more work, as it is currently incomplete, unoptimized, and untested.

* Further updates to SafeDisc Filtering

Still not done, but most if not all the major edge cases should be accounted for. Mostly just needs testing to make sure I didn't accidentally break something along the way, and further polishing of code and outputs.

* Further update to SafeDisc Filtering

More cleanly covers another specific case.

* Hopefully final main additions to SafeDisc filtering

* Update SafeDisc matching for newest BOS

Fix things that broke with the BOS update, and update a few comments.
2024-10-01 10:53:58 -04:00
Matt Nadareski
e9c2fd9245 Update BinaryObjectScanner to 3.1.14 2024-09-28 13:21:37 -04:00
Matt Nadareski
fb24bbd8a5 Update to DIC 20240901 2024-09-26 10:56:50 -04:00
Matt Nadareski
4e3083c8e6 Fix date 2024-09-24 14:11:02 -04:00
Matt Nadareski
05738b7c11 Bump version 2024-09-24 13:43:18 -04:00
Matt Nadareski
f963db67b1 Update changelog 2024-09-07 01:25:28 -04:00
TurnedToast
de64631c00 Add _drive.txt file to GetOutputFiles for UmdImageCreator (#748)
* add _drive.txt file to GetOutputFiles for UmdImageCreator

* remove required from _drive

* disc -> drive
2024-09-07 01:24:49 -04:00
Matt Nadareski
c8adef78c2 Update changelog 2024-09-06 22:53:02 -04:00
TurnedToast
7b116e7a04 Ensure manufacturer files starting from 0 are zipped in redumper DVD … (#747)
* Ensure manufacturer files starting from 0 are zipped in redumper DVD processing

* Remove extraneous DVD manufacturer/physical, add needed physical for bluray
2024-09-06 22:52:20 -04:00
Matt Nadareski
fb7b6ff1be Fix typo in publisher identifiers 2024-09-05 19:47:07 -04:00
Matt Nadareski
7fb8e44c31 Forgot to assume directories don't exist 2024-08-29 01:53:49 -04:00
Matt Nadareski
239ad4c4bc Handle XGD required files 2024-08-23 21:44:54 -04:00
Matt Nadareski
9834d0ea3e Fix access permissions of output file classes 2024-08-23 21:39:50 -04:00
Matt Nadareski
a35c13bd10 Add and use CustomOutputFile 2024-08-23 21:36:04 -04:00
Matt Nadareski
5e1777a7c7 Add future XGD output files 2024-08-23 21:24:12 -04:00
Matt Nadareski
66570300df Forgot the other locations 2024-08-23 21:20:55 -04:00
Matt Nadareski
4ac1fb201e Less confusing implmentation of DatfileExists 2024-08-23 21:19:07 -04:00
Matt Nadareski
cba8daa010 Add archive override for RegexOutputFile 2024-08-23 21:08:43 -04:00
Matt Nadareski
ba24a4b21a Create and use RegexOutputFile 2024-08-23 20:58:55 -04:00
Matt Nadareski
91c6fdac82 Use simplified CheckAllOutputFilesExist 2024-08-23 20:39:30 -04:00
Matt Nadareski
416656c457 Rename new method to CheckRequiredFiles 2024-08-23 20:29:48 -04:00
Matt Nadareski
fdd75818c4 Rename new method to 2024-08-23 20:29:36 -04:00
Matt Nadareski
ac302626c2 Add new, unused CheckAllOutputFilesExist variant 2024-08-23 20:27:58 -04:00
Matt Nadareski
428f3cc547 Minor tweaks to existing code 2024-08-23 19:47:10 -04:00
Matt Nadareski
66fc36fe3c Add runtime error for improperly created artifacts 2024-08-23 17:07:24 -04:00
Matt Nadareski
9dddf1c9b6 Use new func in Redumper 2024-08-23 16:49:27 -04:00
Matt Nadareski
5dbb955d26 Pass in new func for OutputFile 2024-08-23 16:42:04 -04:00
Matt Nadareski
2f2958bdea Add unused passable func to OutputFile 2024-08-23 16:32:33 -04:00
Matt Nadareski
c91f6ebbce Fix recursive issue in AddToArchive 2024-08-23 16:27:51 -04:00
Matt Nadareski
22fdd036eb Fix new AddToArchive methods 2024-08-23 16:23:45 -04:00
Matt Nadareski
3f12c6acb9 Rearrange some BaseProcessor methods 2024-08-23 16:17:22 -04:00
Matt Nadareski
1dbae18da6 Fix broken build 2024-08-23 13:51:10 -04:00
Matt Nadareski
6370e2dd6a Fix up some file path methods 2024-08-23 13:47:08 -04:00
Matt Nadareski
0c8879bc66 Split new output file methods 2024-08-23 13:34:55 -04:00
Matt Nadareski
6be34414fe Make GenerateArtifacts return a dictionary 2024-08-22 15:07:13 -04:00
Matt Nadareski
f15fc989c8 Add artifact keys for all relevant files 2024-08-22 15:03:36 -04:00
Matt Nadareski
0fc53cb534 Define new ArtifactKey field 2024-08-22 14:46:50 -04:00
Matt Nadareski
dc0909808a Replace GenerateArtifacts with common code 2024-08-22 14:43:09 -04:00
Matt Nadareski
00401d1282 Move GetLogFilePaths to better location 2024-08-22 14:27:12 -04:00
Matt Nadareski
b9d0d5d8f6 Replace GetLogFilePaths with common code 2024-08-22 14:26:43 -04:00
Matt Nadareski
22a6b77d27 Hook up GetOutputFiles in debug way 2024-08-22 14:22:37 -04:00
Matt Nadareski
bc4fe17fab Add unused GetOutputFiles method 2024-08-22 14:13:52 -04:00
Matt Nadareski
4b4027f285 Make helper class more robust 2024-08-22 13:18:47 -04:00
Matt Nadareski
d28257b2b7 Create currently-unused helper class 2024-08-22 12:57:07 -04:00
Matt Nadareski
669ef47f32 Start preparing for better output file checks 2024-08-22 12:27:00 -04:00
Matt Nadareski
be224800bc Remove redundant drive calls 2024-08-21 00:45:06 -04:00
Matt Nadareski
8dbb589d42 Create some PlayStation helper methods 2024-08-21 00:35:54 -04:00
Matt Nadareski
7b2fd5bf35 Fix minor inconsistencies 2024-08-20 23:28:25 -04:00
Matt Nadareski
95fa651074 Move MSXC parsing to PhysicalTool 2024-08-20 23:26:42 -04:00
Matt Nadareski
a0a155eb9b Preemptively update Redumper Saturn support 2024-08-20 23:12:01 -04:00
Matt Nadareski
72339b18df Remove GD-ROM version fallback method 2024-08-20 22:57:46 -04:00
Matt Nadareski
95c9c7706d Include serial for UMD (fixes #742) 2024-08-20 21:16:49 -04:00
Matt Nadareski
135bb43cdf Use new BEE method in code 2024-08-20 14:12:39 -04:00
Matt Nadareski
cfc75ca84d Move BEE method to better location 2024-08-20 14:08:51 -04:00
Matt Nadareski
33c35b63d7 Add bus encryption enabled method 2024-08-20 14:04:51 -04:00
Matt Nadareski
851a43d46f Rename 2 XGD helper methods 2024-08-20 13:53:09 -04:00
Matt Nadareski
a88bef481d Make GD-ROM LD code nicer to read 2024-08-20 13:01:46 -04:00
Matt Nadareski
781fec2b57 Futureproof GD-ROM LD in Redumper 2024-08-20 12:51:31 -04:00
Matt Nadareski
ee96367a45 Support GD-ROM info for Redumper (fixes #741) 2024-08-20 12:30:46 -04:00
Matt Nadareski
9f9bfc0888 Hash DMI and PFI files for XGD in Redumper 2024-08-18 13:53:49 -04:00
Matt Nadareski
c6cc697320 Prepare Redumper for XGD support 2024-08-18 13:40:39 -04:00
Deterous
5e3f7f740b Fix cleaning XGD3 SS (#740) 2024-08-18 10:41:44 -04:00
Matt Nadareski
e17a8f4708 Allow separate mounted path for Linux (fixes #739) 2024-08-16 20:25:39 -04:00
Matt Nadareski
ff4771a74a Quote input paths if needed (fixes #738) 2024-08-16 19:40:51 -04:00
Matt Nadareski
426717102d Add more verbose requirement to CLI help 2024-08-16 14:44:03 -04:00
Matt Nadareski
126bae33a4 Fix some CLI issues (fixes #736, fixes #737) 2024-08-16 14:29:42 -04:00
Matt Nadareski
11b8dd44bb Fix config location in OptionsLoader 2024-08-08 13:59:21 -04:00
Deterous
cbbb8aaa8c Fix XGD3 SS ranges (#733)
* Fix XGD3 SS ranges

* Changelog
2024-08-06 22:34:33 -04:00
Matt Nadareski
9ee7cd7fd7 Move two extensions to a better location 2024-08-06 20:47:52 -04:00
Matt Nadareski
324c1fcee3 Fix build for older .NET 2024-08-06 16:59:04 -04:00
Matt Nadareski
06776a6093 Add physical drive extensions to new tool 2024-08-06 16:52:37 -04:00
Matt Nadareski
43a079bb28 Fix usings ordering in ItemHelper 2024-08-06 16:18:39 -04:00
Matt Nadareski
d45345d338 Add comments around default options object 2024-08-06 14:18:08 -04:00
Matt Nadareski
1ff0340cae Add Check flags for protection scan extras 2024-08-06 14:16:44 -04:00
Matt Nadareski
278c86f9f4 Clean up some Check options, add IRD option 2024-08-06 14:09:05 -04:00
51 changed files with 3323 additions and 2337 deletions

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the feature already exists.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the feature already exists.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check [previous issues](https://github.com/SabreTools/MPF/issues) to see if any of those are related to what you're about to ask for.

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the feature already exists.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the feature already exists.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check [previous issues](https://github.com/SabreTools/MPF/issues) to see if any of those are related to what you're about to ask for.

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the issue has already been addressed.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the issue has already been addressed.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check multiple discs to help narrow down the issue
- Check the Options to see if changing any of those affects your issue.
@@ -25,7 +25,6 @@ What version are you using?
**Build**
What runtime version are you using?
- [ ] .NET 6.0 running on (Operating System)
- [ ] .NET 8.0 running on (Operating System)
**Describe the issue**

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.Check]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64, osx-arm64]
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,22 +27,24 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.CLI]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64, osx-arm64]
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,22 +27,24 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.UI]
runtime: [win-x86, win-x64]
framework: [net8.0-windows] #[net40, net452, net472, net48, netcoreapp3.1, net5.0-windows, net6.0-windows, net7.0-windows, net8.0-windows]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,36 +27,38 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Bundle DiscImageCreator
run: |
wget https://github.com/user-attachments/files/15521936/DiscImageCreator_20240601.zip
unzip -u DiscImageCreator_20240601.zip
mkdir -p MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator
mv Release_ANSI/* MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator/
wget https://github.com/user-attachments/files/17211434/DiscImageCreator_20241001.zip
unzip -u DiscImageCreator_20241001.zip
mkdir -p MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator
mv Release_ANSI/* MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator/
- name: Bundle Redumper
run: |
wget https://github.com/superg/redumper/releases/download/build_371/redumper-2024.05.27_build371-win64.zip
unzip redumper-2024.05.27_build371-win64.zip
mkdir -p MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper
mv redumper-2024.05.27_build371-win64/bin/redumper.exe MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper/
wget https://github.com/superg/redumper/releases/download/build_416/redumper-2024.10.12_build416-win64.zip
unzip redumper-2024.10.12_build416-win64.zip
mkdir -p MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper
mv redumper-2024.10.12_build416-win64/bin/redumper.exe MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper/
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -1,3 +1,118 @@
### 3.2.3 (2024-11-06)
- Update to DIC 20240901
- Update BinaryObjectScanner to 3.1.14
- Fix SafeDisc filtering (TheRogueArchivist)
- Update to DIC 20241001
- Include all DIC write offsets
- Fix faulty offset dedupe logic
- Change multiple offset delimiter
- Make .NET 8 the default in issue reports
- Remove ReadLine in list commands
- Sum track errors in Redumper
- Format CleanRip BCA wtih 2-byte blocks
- Update Redumper to build 416
- Fix trimming of header output
- Use fake filename for Redumper DAT
- Ensure that the full base path is being used
- Ensure consistency in output file path checking
- Use new output file logic in processors
- Ensure Regex directories are unescaped
- Add separate field for Regex; assorted cleanup
- Reduce cleverness in output file code
- No directory means no files
- Update RedumpLib to 1.4.4
- Use rolling release, not AppVeyor, in issue templates
- Update BinaryObjectScanner to 3.1.15
- Remove unused IndexRange library
- Remove unnecessary System.ValueTuple usage
- Remove old .NET version of ValueTuple where possible
- Remove unused CompilerServices.Unsafe library
- Simplify GetMediaType return
- Reduce call complexity for login result
- Use new ProtectionDictionary type
- Replace user info func with ProcessUserInfoDelegate
- Reduce tupling even more
- Remove tupling from everything except CLI programs
- Remove last instances of ValueTuple usage
- Update BinaryObjectScanner to 3.1.16
- Attempt to reduce nesting in GHA builds
- Add conf to build matrix
- Ensure debug symbols are stripped
- Fix missed GetOutputFiles invocation
- Reduce null use in BaseProcessor
- Force rebuild of rolling
### 3.2.2 (2024-09-24)
- Clean up some Check options, add IRD option
- Add Check flags for protection scan extras
- Add comments around default options object
- Fix usings ordering in ItemHelper
- Add physical drive extensions to new tool
- Fix build for older .NET
- Move two extensions to a better location
- Fix XGD3 SS ranges
- Fix config location in OptionsLoader
- Fix some CLI issues
- Add more verbose requirement to CLI help
- Quote input paths if needed
- Allow separate mounted path for Linux
- Fix cleaning XGD3 SS
- Prepare Redumper for XGD support
- Hash DMI and PFI files for XGD in Redumper
- Support GD-ROM info for Redumper
- Futureproof GD-ROM LD in Redumper
- Make GD-ROM LD code nicer to read
- Rename 2 XGD helper methods
- Add bus encryption enabled method
- Move BEE method to better location
- Use new BEE method in code
- Include serial for UMD
- Remove GD-ROM version fallback method
- Preemptively update Redumper Saturn support
- Move MSXC parsing to PhysicalTool
- Fix minor inconsistencies
- Create some PlayStation helper methods
- Remove redundant drive calls
- Start preparing for better output file checks
- Create currently-unused helper class
- Make helper class more robust
- Add unused GetOutputFiles method
- Hook up GetOutputFiles in debug way
- Replace GetLogFilePaths with common code
- Move GetLogFilePaths to better location
- Replace GenerateArtifacts with common code
- Define new ArtifactKey field
- Add artifact keys for all relevant files
- Make GenerateArtifacts return a dictionary
- Split new output file methods
- Fix up some file path methods
- Fix broken build
- Rearrange some BaseProcessor methods
- Fix new AddToArchive methods
- Fix recursive issue in AddToArchive
- Add unused passable func to OutputFile
- Pass in new func for OutputFile
- Use new func in Redumper
- Add runtime error for improperly created artifacts
- Minor tweaks to existing code
- Add new, unused CheckAllOutputFilesExist variant
- Rename new method to CheckRequiredFiles
- Use simplified CheckAllOutputFilesExist
- Create and use RegexOutputFile
- Add archive override for RegexOutputFile
- Less confusing implmentation of DatfileExists
- Forgot the other locations
- Add future XGD output files
- Add and use CustomOutputFile
- Fix access permissions of output file classes
- Handle XGD required files
- Forgot to assume directories don't exist
- Fix typo in publisher identifiers
- Ensure manufacturer files starting from 0 are zipped in redumper DVD (TurnedToast)
- Add _drive.txt file to GetOutputFiles for UmdImageCreator (TurnedToast)
### 3.2.1 (2024-08-05)
- Add nuget packing for processors and contexts

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Title>MPF CLI</Title>
@@ -36,6 +36,10 @@
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
@@ -44,14 +48,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
#if NET40
using System.Threading;
using System.Threading.Tasks;
#endif
using BinaryObjectScanner;
@@ -40,21 +39,30 @@ namespace MPF.CLI
return;
}
// Check for the minimum number of arguments
if (args.Length < 4)
{
DisplayHelp("Not enough arguments have been provided, exiting...");
return;
}
// Try processing the common arguments
(bool success, MediaType mediaType, RedumpSystem? knownSystem, var error) = OptionsLoader.ProcessCommonArguments(args);
bool success = OptionsLoader.ProcessCommonArguments(args, out MediaType mediaType, out RedumpSystem? knownSystem, out var error);
if (!success)
{
DisplayHelp(error);
return;
}
// Validate the supplied credentials
bool? validated = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
string message = validated switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
if (!string.IsNullOrEmpty(message))
Console.WriteLine(message);
// Process any custom parameters
int startIndex = 2;
CommandOptions opts = LoadFromArguments(args, options, ref startIndex);
// Validate the internal program
switch (options.InternalProgram)
{
@@ -87,22 +95,8 @@ namespace MPF.CLI
break;
}
// Make new Progress objects
var resultProgress = new Progress<ResultEventArgs>();
resultProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
var protectionProgress = new Progress<ProtectionProgress>();
protectionProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
// Validate the supplied credentials
(bool? _, string? message) = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
if (!string.IsNullOrEmpty(message))
Console.WriteLine(message);
// Process any custom parameters
(CommandOptions opts, int startIndex) = LoadFromArguments(args, options, startIndex: 2);
// Ensure we have the values we need
if (opts.CustomParams == null && (opts.DevicePath == null || opts.DevicePath == null))
if (opts.CustomParams == null && (opts.DevicePath == null || opts.FilePath == null))
{
DisplayHelp("Both a device path and file path need to be supplied, exiting...");
return;
@@ -113,7 +107,13 @@ namespace MPF.CLI
// Populate an environment
var drive = Drive.Create(null, opts.DevicePath ?? string.Empty);
var env = new DumpEnvironment(options, opts.FilePath, drive, knownSystem, mediaType, options.InternalProgram, parameters: null);
var env = new DumpEnvironment(options,
opts.FilePath,
drive,
knownSystem,
mediaType,
options.InternalProgram,
parameters: null);
// Process the parameters
string? paramStr = opts.CustomParams ?? env.GetFullParameters(speed);
@@ -124,6 +124,12 @@ namespace MPF.CLI
}
env.SetExecutionContext(paramStr);
// Make new Progress objects
var resultProgress = new Progress<ResultEventArgs>();
resultProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
var protectionProgress = new Progress<ProtectionProgress>();
protectionProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
// Invoke the dumping program
Console.WriteLine($"Invoking {options.InternalProgram} using '{paramStr}'");
var dumpResult = env.Run(resultProgress).GetAwaiter().GetResult();
@@ -138,6 +144,19 @@ namespace MPF.CLI
return;
}
// If we have a mounted path, replace the environment
if (opts.MountedPath != null && Directory.Exists(opts.MountedPath))
{
drive = Drive.Create(null, opts.MountedPath);
env = new DumpEnvironment(options,
opts.FilePath,
drive,
knownSystem,
mediaType,
internalProgram: null,
parameters: null);
}
// Finally, attempt to do the output dance
#if NET40
var verifyTask = env.VerifyAndSaveDumpOutput(resultProgress, protectionProgress);
@@ -172,31 +191,40 @@ namespace MPF.CLI
Console.WriteLine("CLI Options:");
Console.WriteLine("-u, --use <program> Override default dumping program");
Console.WriteLine("-d, --device <devicepath> Physical drive path (Required if no custom parameters set)");
Console.WriteLine("-m, --mounted <dirpath> Mounted filesystem path for additional checks");
Console.WriteLine("-f, --file \"<filepath>\" Output file path (Required if no custom parameters set)");
Console.WriteLine("-s, --speed <speed> Override default dumping speed");
Console.WriteLine("-c, --custom \"<params>\" Custom parameters to use");
Console.WriteLine();
Console.WriteLine("Custom parameters, if used, will fully replace the default parameters.");
Console.WriteLine("All parameters need to be supplied if doing this.");
Console.WriteLine("Custom dumping parameters, if used, will fully replace the default parameters.");
Console.WriteLine("All dumping parameters need to be supplied if doing this.");
Console.WriteLine("Otherwise, both a drive path and output file path are required.");
Console.WriteLine();
Console.WriteLine("Mounted filesystem path is only recommended on OSes that require block");
Console.WriteLine("device dumping, usually Linux and macOS.");
Console.WriteLine();
}
/// <summary>
/// Load the current set of options from application arguments
/// </summary>
private static (CommandOptions, int) LoadFromArguments(string[] args, Frontend.Options options, int startIndex = 0)
private static CommandOptions LoadFromArguments(string[] args, Frontend.Options options, ref int startIndex)
{
// Create return values
var opts = new CommandOptions();
// If we have no arguments, just return
if (args == null || args.Length == 0)
return (opts, 0);
{
startIndex = 0;
return opts;
}
// If we have an invalid start index, just return
if (startIndex < 0 || startIndex >= args.Length)
return (opts, startIndex);
return opts;
// Loop through the arguments and parse out values
for (; startIndex < args.Length; startIndex++)
@@ -225,6 +253,17 @@ namespace MPF.CLI
startIndex++;
}
// Use a mounted path for physical checks
else if (args[startIndex].StartsWith("-m=") || args[startIndex].StartsWith("--mounted="))
{
opts.MountedPath = args[startIndex].Split('=')[1];
}
else if (args[startIndex] == "-m" || args[startIndex] == "--mounted")
{
opts.MountedPath = args[startIndex + 1];
startIndex++;
}
// Use a file path
else if (args[startIndex].StartsWith("-f=") || args[startIndex].StartsWith("--file="))
{
@@ -271,7 +310,7 @@ namespace MPF.CLI
}
}
return (opts, startIndex);
return opts;
}
/// <summary>
@@ -285,6 +324,12 @@ namespace MPF.CLI
/// <remarks>Required if custom parameters are not set</remarks>
public string? DevicePath { get; set; } = null;
/// <summary>
/// Path to the mounted filesystem to check
/// </summary>
/// <remarks>Should only be used when the device path is not readable</remarks>
public string? MountedPath { get; set; } = null;
/// <summary>
/// Path to the output file
/// </summary>

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Title>MPF Check</Title>
@@ -40,6 +40,10 @@
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
@@ -48,14 +52,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -20,14 +20,29 @@ namespace MPF.Check
// Create a default options object
var options = new Frontend.Options()
{
RedumpUsername = null,
RedumpPassword = null,
// Internal Program
InternalProgram = InternalProgram.NONE,
// Extra Dumping Options
ScanForProtection = false,
AddPlaceholders = true,
PullAllInformation = false,
AddFilenameSuffix = false,
OutputSubmissionJSON = false,
IncludeArtifacts = false,
CompressLogFiles = false,
DeleteUnnecessaryFiles = false,
CreateIRDAfterDumping = false,
// Protection Scanning Options
ScanArchivesForProtection = true,
ScanPackersForProtection = false,
IncludeDebugProtectionInformation = false,
HideDriveLetters = false,
// Redump Login Information
RedumpUsername = null,
RedumpPassword = null,
};
// Try processing the standalone arguments
@@ -40,7 +55,7 @@ namespace MPF.Check
}
// Try processing the common arguments
(bool success, MediaType mediaType, RedumpSystem? knownSystem, var error) = OptionsLoader.ProcessCommonArguments(args);
bool success = OptionsLoader.ProcessCommonArguments(args, out MediaType mediaType, out RedumpSystem? knownSystem, out var error);
if (!success)
{
DisplayHelp(error);
@@ -48,7 +63,8 @@ namespace MPF.Check
}
// Loop through and process options
(CommandOptions opts, int startIndex) = LoadFromArguments(args, options, startIndex: 2);
int startIndex = 2;
CommandOptions opts = LoadFromArguments(args, options, ref startIndex);
if (options.InternalProgram == InternalProgram.NONE)
{
DisplayHelp("A program name needs to be provided");
@@ -62,7 +78,14 @@ namespace MPF.Check
protectionProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
// Validate the supplied credentials
(bool? _, string? message) = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
bool? validated = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
string message = validated switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
if (!string.IsNullOrEmpty(message))
Console.WriteLine(message);
@@ -114,12 +137,17 @@ namespace MPF.Check
Console.WriteLine("Check Options:");
Console.WriteLine("-u, --use <program> Dumping program output type [REQUIRED]");
Console.WriteLine(" --load-seed <path> Load a seed submission JSON for user information");
Console.WriteLine(" --no-placeholders Disable placeholder values in submission info");
Console.WriteLine(" --create-ird Create IRD from output files (PS3 only)");
Console.WriteLine("-c, --credentials <user> <pw> Redump username and password");
Console.WriteLine(" --pull-all Pull all information from Redump (requires --credentials)");
Console.WriteLine("-p, --path <drivepath> Physical drive path for additional checks");
Console.WriteLine("-s, --scan Enable copy protection scan (requires --path)");
Console.WriteLine(" --disable-archives Disable scanning archives (requires --scan)");
Console.WriteLine(" --enable-packers Enable scanning for packers (requires --scan)");
Console.WriteLine(" --enable-debug Enable debug protection information (requires --scan)");
Console.WriteLine(" --hide-drive-letters Hide drive letters from scan output (requires --scan)");
Console.WriteLine("-l, --load-seed <path> Load a seed submission JSON for user information");
Console.WriteLine("-x, --suffix Enable adding filename suffix");
Console.WriteLine("-j, --json Enable submission JSON output");
Console.WriteLine(" --include-artifacts Include artifacts in JSON (requires --json)");
@@ -131,21 +159,28 @@ namespace MPF.Check
/// <summary>
/// Load the current set of options from application arguments
/// </summary>
private static (CommandOptions, int) LoadFromArguments(string[] args, Frontend.Options options, int startIndex = 0)
private static CommandOptions LoadFromArguments(string[] args, Frontend.Options options, ref int startIndex)
{
// Create return values
var opts = new CommandOptions();
// These values require multiple parts to be active
bool scan = false, hideDriveLetters = false;
bool scan = false,
enableArchives = true,
enablePackers = false,
enableDebug = false,
hideDriveLetters = false;
// If we have no arguments, just return
if (args == null || args.Length == 0)
return (opts, 0);
{
startIndex = 0;
return opts;
}
// If we have an invalid start index, just return
if (startIndex < 0 || startIndex >= args.Length)
return (opts, startIndex);
return opts;
// Loop through the arguments and parse out values
for (; startIndex < args.Length; startIndex++)
@@ -163,6 +198,31 @@ namespace MPF.Check
startIndex++;
}
// Include seed info file
else if (args[startIndex].StartsWith("--load-seed="))
{
string seedInfo = args[startIndex].Split('=')[1];
opts.Seed = Builder.CreateFromFile(seedInfo);
}
else if (args[startIndex] == "--load-seed")
{
string seedInfo = args[startIndex + 1];
opts.Seed = Builder.CreateFromFile(seedInfo);
startIndex++;
}
// Disable placeholder values in submission info
else if (args[startIndex].Equals("--no-placeholders"))
{
options.AddPlaceholders = false;
}
// Create IRD from output files (PS3 only)
else if (args[startIndex].Equals("--create-ird"))
{
options.CreateIRDAfterDumping = true;
}
// Redump login
else if (args[startIndex].StartsWith("-c=") || args[startIndex].StartsWith("--credentials="))
{
@@ -200,25 +260,30 @@ namespace MPF.Check
scan = true;
}
// Disable scanning archives (requires --scan)
else if (args[startIndex].Equals("--disable-archives"))
{
enableArchives = false;
}
// Enable scanning for packers (requires --scan)
else if (args[startIndex].Equals("--enable-packers"))
{
enablePackers = true;
}
// Enable debug protection information (requires --scan)
else if (args[startIndex].Equals("--enable-debug"))
{
enableDebug = true;
}
// Hide drive letters from scan output (requires --scan)
else if (args[startIndex].Equals("--hide-drive-letters"))
{
hideDriveLetters = true;
}
// Include seed info file
else if (args[startIndex].StartsWith("-l=") || args[startIndex].StartsWith("--load-seed="))
{
string seedInfo = args[startIndex].Split('=')[1];
opts.Seed = Builder.CreateFromFile(seedInfo);
}
else if (args[startIndex] == "-l" || args[startIndex] == "--load-seed")
{
string seedInfo = args[startIndex + 1];
opts.Seed = Builder.CreateFromFile(seedInfo);
startIndex++;
}
// Add filename suffix
else if (args[startIndex].Equals("-x") || args[startIndex].Equals("--suffix"))
{
@@ -258,9 +323,12 @@ namespace MPF.Check
// Now deal with the complex options
options.ScanForProtection = scan && !string.IsNullOrEmpty(opts.DevicePath);
options.ScanArchivesForProtection = enableArchives && scan && !string.IsNullOrEmpty(opts.DevicePath);
options.ScanPackersForProtection = enablePackers && scan && !string.IsNullOrEmpty(opts.DevicePath);
options.IncludeDebugProtectionInformation = enableDebug && scan && !string.IsNullOrEmpty(opts.DevicePath);
options.HideDriveLetters = hideDriveLetters && scan && !string.IsNullOrEmpty(opts.DevicePath);
return (opts, startIndex);
return opts;
}
/// <summary>

View File

@@ -14,10 +14,10 @@ namespace MPF.ExecutionContexts.Aaru
#region Generic Dumping Information
/// <inheritdoc/>
public override string? InputPath => InputValue;
public override string? InputPath => InputValue?.Trim('"');
/// <inheritdoc/>
public override string? OutputPath => OutputValue;
public override string? OutputPath => OutputValue?.Trim('"');
/// <inheritdoc/>
public override int? Speed
@@ -1082,7 +1082,11 @@ namespace MPF.ExecutionContexts.Aaru
if (string.IsNullOrEmpty(InputValue))
return null;
parameters.Add(InputValue!.TrimEnd('\\'));
if (InputValue.Contains(' '))
parameters.Add($"\"{InputValue!.TrimEnd('\\')}\"");
else
parameters.Add(InputValue!.TrimEnd('\\'));
break;
// Two input values

View File

@@ -332,7 +332,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid byte, false otherwise</returns>
protected static bool IsValidInt8(string parameter, sbyte lowerBound = -1, sbyte upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!sbyte.TryParse(value, out sbyte temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -352,7 +352,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int16, false otherwise</returns>
protected static bool IsValidInt16(string parameter, short lowerBound = -1, short upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!short.TryParse(value, out short temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -372,7 +372,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int32, false otherwise</returns>
protected static bool IsValidInt32(string parameter, int lowerBound = -1, int upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!int.TryParse(value, out int temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -392,7 +392,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int64, false otherwise</returns>
protected static bool IsValidInt64(string parameter, long lowerBound = -1, long upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!long.TryParse(value, out long temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -568,7 +568,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (sbyte.TryParse(value, out sbyte sByteValue))
return (sbyte)(sByteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -588,7 +588,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (sbyte.TryParse(value, out sbyte sByteValue))
return (sbyte)(sByteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -655,7 +655,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (short.TryParse(value, out short shortValue))
return (short)(shortValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -675,7 +675,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (short.TryParse(value, out short shortValue))
return (short)(shortValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -742,7 +742,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (int.TryParse(value, out int intValue))
return (int)(intValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -762,7 +762,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (int.TryParse(value, out int intValue))
return (int)(intValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -829,7 +829,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (long.TryParse(value, out long longValue))
return (long)(longValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -849,7 +849,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (long.TryParse(value, out long longValue))
return (long)(longValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -990,7 +990,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (byte.TryParse(value, out byte byteValue))
return (byte)(byteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -1010,7 +1010,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (byte.TryParse(value, out byte byteValue))
return (byte)(byteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -1027,10 +1027,10 @@ namespace MPF.ExecutionContexts
/// </summary>
/// <param name="value">String value to treat as suffixed number</param>
/// <returns>Trimmed value and multiplication factor</returns>
private static (string trimmed, long factor) ExtractFactorFromValue(string value)
private static string ExtractFactorFromValue(string value, out long factor)
{
value = value.Trim('"');
long factor = 1;
factor = 1;
// Characters
if (value.EndsWith("c", StringComparison.Ordinal))
@@ -1081,7 +1081,7 @@ namespace MPF.ExecutionContexts
value = value.TrimEnd('G');
}
return (value, factor);
return value;
}
/// <summary>

View File

@@ -15,10 +15,10 @@ namespace MPF.ExecutionContexts.DiscImageCreator
#region Generic Dumping Information
/// <inheritdoc/>
public override string? InputPath => DrivePath;
public override string? InputPath => DrivePath?.Trim('"');
/// <inheritdoc/>
public override string? OutputPath => Filename;
public override string? OutputPath => Filename?.Trim('"');
/// <inheritdoc/>
/// <inheritdoc/>
@@ -204,6 +204,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.DisableBeep,
FlagStrings.DVDReread,
FlagStrings.ForceUnitAccess,
FlagStrings.Range,
FlagStrings.UseAnchorVolumeDescriptorPointer,
],
@@ -232,6 +233,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.ScanSectorProtect,
FlagStrings.SeventyFour,
FlagStrings.SubchannelReadLevel,
FlagStrings.TryReadingPregap,
FlagStrings.VideoNow,
FlagStrings.VideoNowColor,
FlagStrings.VideoNowXP,
@@ -340,6 +342,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.ScanSectorProtect,
FlagStrings.SeventyFour,
FlagStrings.SubchannelReadLevel,
FlagStrings.TryReadingPregap,
FlagStrings.VideoNow,
FlagStrings.VideoNowColor,
FlagStrings.VideoNowXP,
@@ -419,9 +422,16 @@ namespace MPF.ExecutionContexts.DiscImageCreator
|| BaseCommand == CommandStrings.XGD3Swap)
{
if (DrivePath != null)
parameters.Add(DrivePath);
{
if (DrivePath.Contains(' '))
parameters.Add($"\"{DrivePath}\"");
else
parameters.Add(DrivePath);
}
else
{
return null;
}
}
// Filename
@@ -845,6 +855,13 @@ namespace MPF.ExecutionContexts.DiscImageCreator
parameters.Add(FlagStrings.Tages);
}
// Try Reading Pregap
if (IsFlagSupported(FlagStrings.TryReadingPregap))
{
if (this[FlagStrings.TryReadingPregap] == true)
parameters.Add(FlagStrings.TryReadingPregap);
}
// Use Anchor Volume Descriptor Pointer
if (IsFlagSupported(FlagStrings.UseAnchorVolumeDescriptorPointer))
{

View File

@@ -37,6 +37,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
public const string SkipSector = "/sk";
public const string SubchannelReadLevel = "/s";
public const string Tages = "/t";
public const string TryReadingPregap = "/trp";
public const string UseAnchorVolumeDescriptorPointer = "/avdp";
public const string VideoNow = "/vn";
public const string VideoNowColor = "/vnc";

View File

@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
@@ -45,15 +45,9 @@
<PackageReference Include="MinTasksExtensionsBridge" Version="0.3.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
</Project>

View File

@@ -15,10 +15,13 @@ namespace MPF.ExecutionContexts.Redumper
#region Generic Dumping Information
/// <inheritdoc/>
public override string? InputPath => DriveValue;
public override string? InputPath => DriveValue?.Trim('"');
/// <inheritdoc/>
public override string? OutputPath => Path.Combine(ImagePathValue?.Trim('"') ?? string.Empty, ImageNameValue?.Trim('"') ?? string.Empty) + GetDefaultExtension(this.Type);
public override string? OutputPath => Path.Combine(
ImagePathValue?.Trim('"') ?? string.Empty,
ImageNameValue?.Trim('"') ?? string.Empty)
+ GetDefaultExtension(this.Type);
/// <inheritdoc/>
public override int? Speed => SpeedValue;
@@ -264,7 +267,12 @@ namespace MPF.ExecutionContexts.Redumper
if (this[FlagStrings.Drive] == true)
{
if (DriveValue != null)
parameters.Add($"{FlagStrings.Drive}={DriveValue}");
{
if (DriveValue.Contains(' '))
parameters.Add($"{FlagStrings.Drive}=\"{DriveValue}\"");
else
parameters.Add($"{FlagStrings.Drive}={DriveValue}");
}
}
// Speed

View File

@@ -1,17 +1,10 @@
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
#if NET462_OR_GREATER || NETCOREAPP
using Microsoft.Management.Infrastructure;
using Microsoft.Management.Infrastructure.Generic;
#endif
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using MPF.Processors;
using SabreTools.IO;
using SabreTools.RedumpLib.Data;
namespace MPF.Frontend
@@ -150,19 +143,19 @@ namespace MPF.Frontend
/// <summary>
/// Get the current media type from drive letter
/// </summary>
/// <param name="system"></param>
/// <returns></returns>
public (MediaType?, string?) GetMediaType(RedumpSystem? system)
/// <param name="system">Currently selected system</param>
/// <returns>The detected media type, if possible</returns>
public MediaType? GetMediaType(RedumpSystem? system)
{
// Take care of the non-optical stuff first
switch (InternalDriveType)
{
case Frontend.InternalDriveType.Floppy:
return (MediaType.FloppyDisk, null);
return MediaType.FloppyDisk;
case Frontend.InternalDriveType.HardDisk:
return (MediaType.HardDisk, null);
return MediaType.HardDisk;
case Frontend.InternalDriveType.Removable:
return (MediaType.FlashDrive, null);
return MediaType.FlashDrive;
}
// Some systems should default to certain media types
@@ -175,18 +168,18 @@ namespace MPF.Frontend
case RedumpSystem.SegaSaturn:
case RedumpSystem.SonyPlayStation:
case RedumpSystem.VideoCD:
return (MediaType.CDROM, null);
return MediaType.CDROM;
// DVD
case RedumpSystem.DVDAudio:
case RedumpSystem.DVDVideo:
case RedumpSystem.MicrosoftXbox:
case RedumpSystem.MicrosoftXbox360:
return (MediaType.DVD, null);
return MediaType.DVD;
// HD-DVD
case RedumpSystem.HDDVDVideo:
return (MediaType.HDDVD, null);
return MediaType.HDDVD;
// Blu-ray
case RedumpSystem.BDVideo:
@@ -195,34 +188,34 @@ namespace MPF.Frontend
case RedumpSystem.SonyPlayStation3:
case RedumpSystem.SonyPlayStation4:
case RedumpSystem.SonyPlayStation5:
return (MediaType.BluRay, null);
return MediaType.BluRay;
// GameCube
case RedumpSystem.NintendoGameCube:
return (MediaType.NintendoGameCubeGameDisc, null);
return MediaType.NintendoGameCubeGameDisc;
// Wii
case RedumpSystem.NintendoWii:
return (MediaType.NintendoWiiOpticalDisc, null);
return MediaType.NintendoWiiOpticalDisc;
// WiiU
case RedumpSystem.NintendoWiiU:
return (MediaType.NintendoWiiUOpticalDisc, null);
return MediaType.NintendoWiiUOpticalDisc;
// PSP
case RedumpSystem.SonyPlayStationPortable:
return (MediaType.UMD, null);
return MediaType.UMD;
}
// Handle optical media by size and filesystem
if (TotalSize >= 0 && TotalSize <= 800_000_000 && (DriveFormat == "CDFS" || DriveFormat == "UDF"))
return (MediaType.CDROM, null);
return MediaType.CDROM;
else if (TotalSize > 800_000_000 && TotalSize <= 8_540_000_000 && (DriveFormat == "CDFS" || DriveFormat == "UDF"))
return (MediaType.DVD, null);
return MediaType.DVD;
else if (TotalSize > 8_540_000_000)
return (MediaType.BluRay, null);
return MediaType.BluRay;
return (null, "Could not determine media type!");
return null;
}
/// <summary>
@@ -236,463 +229,6 @@ namespace MPF.Frontend
#endregion
#region Information Extraction
/// <summary>
/// Get the EXE name from a PlayStation disc, if possible
/// </summary>
/// <returns>Executable name on success, null otherwise</returns>
public string? GetPlayStationExecutableName()
{
// If there's no drive path, we can't get exe name
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't get exe name
if (!Directory.Exists(Name))
return null;
// Get the two paths that we will need to check
string psxExePath = Path.Combine(Name, "PSX.EXE");
string systemCnfPath = Path.Combine(Name, "SYSTEM.CNF");
// Read the CNF file as an INI file
var systemCnf = new IniFile(systemCnfPath);
string? bootValue = string.Empty;
// PlayStation uses "BOOT" as the key
if (systemCnf.ContainsKey("BOOT"))
bootValue = systemCnf["BOOT"];
// PlayStation 2 uses "BOOT2" as the key
if (systemCnf.ContainsKey("BOOT2"))
bootValue = systemCnf["BOOT2"];
// If we had any boot value, parse it and get the executable name
if (!string.IsNullOrEmpty(bootValue))
{
var match = Regex.Match(bootValue, @"cdrom.?:\\?(.*)", RegexOptions.Compiled);
if (match.Groups.Count > 1)
{
string? serial = match.Groups[1].Value;
// Some games may have the EXE in a subfolder
serial = Path.GetFileName(serial);
return serial;
}
}
// If the SYSTEM.CNF value can't be found, try PSX.EXE
if (File.Exists(psxExePath))
return "PSX.EXE";
// If neither can be found, we return null
return null;
}
/// <summary>
/// Get the EXE date from a PlayStation disc, if possible
/// </summary>
/// <param name="serial">Internal disc serial, if possible</param>
/// <param name="region">Output region, if possible</param>
/// <param name="date">Output EXE date in "yyyy-mm-dd" format if possible, null on error</param>
/// <returns>True if information could be determined, false otherwise</returns>
public bool GetPlayStationExecutableInfo(out string? serial, out Region? region, out string? date)
{
serial = null; region = null; date = null;
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return false;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return false;
// Get the executable name
string? exeName = GetPlayStationExecutableName();
// If no executable found, we can't do this part
if (exeName == null)
return false;
// EXE name may have a trailing `;` after
// EXE name should always be in all caps
exeName = exeName
.Split(';')[0]
.ToUpperInvariant();
// Serial is most of the EXE name normalized
serial = exeName
.Replace('_', '-')
.Replace(".", string.Empty);
// Get the region, if possible
region = ProcessingTool.GetPlayStationRegion(exeName);
// Now that we have the EXE name, try to get the fileinfo for it
string exePath = Path.Combine(Name, exeName);
if (!File.Exists(exePath))
return false;
// Fix the Y2K timestamp issue
var fi = new FileInfo(exePath);
var dt = new DateTime(fi.LastWriteTimeUtc.Year >= 1900 && fi.LastWriteTimeUtc.Year < 1920 ? 2000 + fi.LastWriteTimeUtc.Year % 100 : fi.LastWriteTimeUtc.Year,
fi.LastWriteTimeUtc.Month, fi.LastWriteTimeUtc.Day);
date = dt.ToString("yyyy-MM-dd");
return true;
}
/// <summary>
/// Get the version from a PlayStation 2 disc, if possible
/// </summary>
/// <returns>Game version if possible, null on error</returns>
public string? GetPlayStation2Version()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// Get the SYSTEM.CNF path to check
string systemCnfPath = Path.Combine(Name, "SYSTEM.CNF");
// Try to parse the SYSTEM.CNF file
var systemCnf = new IniFile(systemCnfPath);
if (systemCnf.ContainsKey("VER"))
return systemCnf["VER"];
// If "VER" can't be found, we can't do much
return null;
}
/// <summary>
/// Get the internal serial from a PlayStation 3 disc, if possible
/// </summary>
/// <returns>Internal disc serial if possible, null on error</returns>
public string? GetPlayStation3Serial()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// Attempt to use PS3_DISC.SFB
string sfbPath = Path.Combine(Name, "PS3_DISC.SFB");
if (File.Exists(sfbPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfbPath));
br.BaseStream.Seek(0x220, SeekOrigin.Begin);
return new string(br.ReadChars(0x10)).TrimEnd('\0');
}
catch
{
// We don't care what the error was
return null;
}
}
// Attempt to use PARAM.SFO
#if NET20 || NET35
string sfoPath = Path.Combine(Path.Combine(Name, "PS3_GAME"), "PARAM.SFO");
#else
string sfoPath = Path.Combine(Name, "PS3_GAME", "PARAM.SFO");
#endif
if (File.Exists(sfoPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfoPath));
br.BaseStream.Seek(-0x18, SeekOrigin.End);
return new string(br.ReadChars(9)).TrimEnd('\0').Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
return null;
}
/// <summary>
/// Get the version from a PlayStation 3 disc, if possible
/// </summary>
/// <returns>Game version if possible, null on error</returns>
public string? GetPlayStation3Version()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// Attempt to use PS3_DISC.SFB
string sfbPath = Path.Combine(Name, "PS3_DISC.SFB");
if (File.Exists(sfbPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfbPath));
br.BaseStream.Seek(0x230, SeekOrigin.Begin);
var discVersion = new string(br.ReadChars(0x10)).TrimEnd('\0');
if (!string.IsNullOrEmpty(discVersion))
return discVersion;
}
catch
{
// We don't care what the error was
return null;
}
}
// Attempt to use PARAM.SFO
#if NET20 || NET35
string sfoPath = Path.Combine(Path.Combine(Name, "PS3_GAME"), "PARAM.SFO");
#else
string sfoPath = Path.Combine(Name, "PS3_GAME", "PARAM.SFO");
#endif
if (File.Exists(sfoPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfoPath));
br.BaseStream.Seek(-0x08, SeekOrigin.End);
return new string(br.ReadChars(5)).TrimEnd('\0');
}
catch
{
// We don't care what the error was
return null;
}
}
return null;
}
/// <summary>
/// Get the firmware version from a PlayStation 3 disc, if possible
/// </summary>
/// <returns>Firmware version if possible, null on error</returns>
public string? GetPlayStation3FirmwareVersion()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// Attempt to read from /PS3_UPDATE/PS3UPDAT.PUP
#if NET20 || NET35
string pupPath = Path.Combine(Path.Combine(Name, "PS3_UPDATE"), "PS3UPDAT.PUP");
#else
string pupPath = Path.Combine(Name, "PS3_UPDATE", "PS3UPDAT.PUP");
#endif
if (!File.Exists(pupPath))
return null;
try
{
using var br = new BinaryReader(File.OpenRead(pupPath));
br.BaseStream.Seek(0x3E, SeekOrigin.Begin);
byte[] buf = new byte[2];
br.Read(buf, 0, 2);
Array.Reverse(buf);
short location = BitConverter.ToInt16(buf, 0);
br.BaseStream.Seek(location, SeekOrigin.Begin);
return new string(br.ReadChars(4));
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the internal serial from a PlayStation 4 disc, if possible
/// </summary>
/// <returns>Internal disc serial if possible, null on error</returns>
public string? GetPlayStation4Serial()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// If we can't find param.sfo, we don't have a PlayStation 4 disc
#if NET20 || NET35
string paramSfoPath = Path.Combine(Path.Combine(Name, "bd"), "param.sfo");
#else
string paramSfoPath = Path.Combine(Name, "bd", "param.sfo");
#endif
if (!File.Exists(paramSfoPath))
return null;
// Let's try reading param.sfo to find the serial at the end of the file
try
{
using var br = new BinaryReader(File.OpenRead(paramSfoPath));
br.BaseStream.Seek(-0x14, SeekOrigin.End);
return new string(br.ReadChars(9)).Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the version from a PlayStation 4 disc, if possible
/// </summary>
/// <returns>Game version if possible, null on error</returns>
public string? GetPlayStation4Version()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// If we can't find param.sfo, we don't have a PlayStation 4 disc
#if NET20 || NET35
string paramSfoPath = Path.Combine(Path.Combine(Name, "bd"), "param.sfo");
#else
string paramSfoPath = Path.Combine(Name, "bd", "param.sfo");
#endif
if (!File.Exists(paramSfoPath))
return null;
// Let's try reading param.sfo to find the version at the end of the file
try
{
using var br = new BinaryReader(File.OpenRead(paramSfoPath));
br.BaseStream.Seek(-0x08, SeekOrigin.End);
return new string(br.ReadChars(5));
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the internal serial from a PlayStation 5 disc, if possible
/// </summary>
/// <returns>Internal disc serial if possible, null on error</returns>
public string? GetPlayStation5Serial()
{
// Attempt to get the param.json file
var json = GetPlayStation5ParamsJsonFromDrive();
if (json == null)
return null;
try
{
return json["disc"]?[0]?["masterDataId"]?.Value<string>()?.Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
// <summary>
/// Get the version from a PlayStation 5 disc, if possible
/// </summary>
/// <returns>Game version if possible, null on error</returns>
public string? GetPlayStation5Version()
{
// Attempt to get the param.json file
var json = GetPlayStation5ParamsJsonFromDrive();
if (json == null)
return null;
try
{
return json["masterVersion"]?.Value<string>();
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the params.json file from a drive path, if possible
/// </summary>
/// <returns>JObject representing the JSON on success, null on error</returns>
private JObject? GetPlayStation5ParamsJsonFromDrive()
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(Name))
return null;
// If we can't find param.json, we don't have a PlayStation 5 disc
#if NET20 || NET35
string paramJsonPath = Path.Combine(Path.Combine(Name, "bd"), "param.json");
#else
string paramJsonPath = Path.Combine(Name, "bd", "param.json");
#endif
return GetPlayStation5ParamsJsonFromFile(paramJsonPath);
}
/// <summary>
/// Get the params.json file from a filename, if possible
/// </summary>
/// <param name="filename">Filename to check</param>
/// <returns>JObject representing the JSON on success, null on error</returns>
private static JObject? GetPlayStation5ParamsJsonFromFile(string? filename)
{
// If the file doesn't exist
if (string.IsNullOrEmpty(filename) || !File.Exists(filename))
return null;
// Let's try reading param.json to find the version in the unencrypted JSON
try
{
using var br = new BinaryReader(File.OpenRead(filename));
br.BaseStream.Seek(0x800, SeekOrigin.Begin);
byte[] jsonBytes = br.ReadBytes((int)(br.BaseStream.Length - 0x800));
return JsonConvert.DeserializeObject(Encoding.ASCII.GetString(jsonBytes)) as JObject;
}
catch
{
// We don't care what the error was
return null;
}
}
#endregion
#region Helpers
/// <summary>

View File

@@ -158,22 +158,22 @@ namespace MPF.Frontend
if (programFound == null && _internalProgram != InternalProgram.Aaru)
{
var processor = new Processors.Aaru(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename, true);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.Aaru;
}
if (programFound == null && _internalProgram != InternalProgram.DiscImageCreator)
{
var processor = new Processors.DiscImageCreator(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename, true);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.DiscImageCreator;
}
if (programFound == null && _internalProgram != InternalProgram.Redumper)
{
var processor = new Processors.Redumper(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename, true);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.Redumper;
}
@@ -294,13 +294,13 @@ namespace MPF.Frontend
};
}
/// <inheritdoc cref="BaseProcessor.FoundAllFiles(string?, string, bool)"/>
public bool FoundAllFiles(string? outputDirectory, string outputFilename, bool preCheck)
/// <inheritdoc cref="BaseProcessor.FoundAllFiles(string?, string)"/>
public bool FoundAllFiles(string? outputDirectory, string outputFilename)
{
if (_processor == null)
return false;
return _processor.FoundAllFiles(outputDirectory, outputFilename, preCheck).Item1;
return _processor.FoundAllFiles(outputDirectory, outputFilename).Count == 0;
}
/// <inheritdoc cref="BaseExecutionContext.GetDefaultExtension(MediaType?)"/>
@@ -424,7 +424,7 @@ namespace MPF.Frontend
public async Task<ResultEventArgs> VerifyAndSaveDumpOutput(
IProgress<ResultEventArgs>? resultProgress = null,
IProgress<ProtectionProgress>? protectionProgress = null,
Func<SubmissionInfo?, (bool?, SubmissionInfo?)>? processUserInfo = null,
ProcessUserInfoDelegate? processUserInfo = null,
SubmissionInfo? seedInfo = null)
{
if (_processor == null)
@@ -437,8 +437,8 @@ namespace MPF.Frontend
var outputFilename = Path.GetFileName(OutputPath);
// Check to make sure that the output had all the correct files
(bool foundFiles, List<string> missingFiles) = _processor.FoundAllFiles(outputDirectory, outputFilename, false);
if (!foundFiles)
List<string> missingFiles = _processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count > 0)
{
resultProgress?.Report(ResultEventArgs.Failure($"There were files missing from the output:\n{string.Join("\n", [.. missingFiles])}"));
return ResultEventArgs.Failure("Error! Please check output directory as dump may be incomplete!");
@@ -470,8 +470,7 @@ namespace MPF.Frontend
{
resultProgress?.Report(ResultEventArgs.Success("Waiting for additional disc information..."));
bool? filledInfo;
(filledInfo, submissionInfo) = processUserInfo(submissionInfo);
bool? filledInfo = processUserInfo(ref submissionInfo);
if (filledInfo == true)
resultProgress?.Report(ResultEventArgs.Success("Additional disc information added!"));
@@ -486,7 +485,7 @@ namespace MPF.Frontend
// Format the information for the text output
resultProgress?.Report(ResultEventArgs.Success("Formatting information..."));
(var formattedValues, var formatResult) = Formatter.FormatOutputData(submissionInfo, _options.EnableRedumpCompatibility);
var formattedValues = Formatter.FormatOutputData(submissionInfo, _options.EnableRedumpCompatibility, out string? formatResult);
if (formattedValues == null)
resultProgress?.Report(ResultEventArgs.Failure(formatResult));
else
@@ -497,7 +496,7 @@ namespace MPF.Frontend
// Write the text output
resultProgress?.Report(ResultEventArgs.Success("Writing submission information file..."));
(bool txtSuccess, string txtResult) = WriteOutputData(outputDirectory, filenameSuffix, formattedValues);
bool txtSuccess = WriteOutputData(outputDirectory, filenameSuffix, formattedValues, out string txtResult);
if (txtSuccess)
resultProgress?.Report(ResultEventArgs.Success(txtResult));
else
@@ -532,33 +531,47 @@ namespace MPF.Frontend
if (_options.CompressLogFiles)
{
resultProgress?.Report(ResultEventArgs.Success("Compressing log files..."));
(bool compressSuccess, string compressResult) = _processor?.CompressLogFiles(outputDirectory, filenameSuffix, outputFilename) ?? (false, "No processor provided!");
if (compressSuccess)
resultProgress?.Report(ResultEventArgs.Success(compressResult));
if (_processor == null)
{
resultProgress?.Report(ResultEventArgs.Failure("No processor provided!"));
}
else
resultProgress?.Report(ResultEventArgs.Failure(compressResult));
{
bool compressSuccess = _processor.CompressLogFiles(outputDirectory, filenameSuffix, outputFilename, out string compressResult);
if (compressSuccess)
resultProgress?.Report(ResultEventArgs.Success(compressResult));
else
resultProgress?.Report(ResultEventArgs.Failure(compressResult));
}
}
// Delete unnecessary files, if required
if (_options.DeleteUnnecessaryFiles)
{
resultProgress?.Report(ResultEventArgs.Success("Deleting unnecessary files..."));
(bool deleteSuccess, string deleteResult) = _processor?.DeleteUnnecessaryFiles(outputDirectory, outputFilename) ?? (false, "No processor provided!");
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
if (_processor == null)
{
resultProgress?.Report(ResultEventArgs.Failure("No processor provided!"));
}
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
{
bool deleteSuccess = _processor.DeleteUnnecessaryFiles(outputDirectory, outputFilename, out string deleteResult);
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
}
}
// Create PS3 IRD, if required
if (_options.CreateIRDAfterDumping && _system == RedumpSystem.SonyPlayStation3 && _type == MediaType.BluRay)
{
resultProgress?.Report(ResultEventArgs.Success("Creating IRD... please wait!"));
(bool deleteSuccess, string deleteResult) = await WriteIRD(OutputPath, submissionInfo?.Extras?.DiscKey, submissionInfo?.Extras?.DiscID, submissionInfo?.Extras?.PIC, submissionInfo?.SizeAndChecksums?.Layerbreak, submissionInfo?.SizeAndChecksums?.CRC32);
bool deleteSuccess = await WriteIRD(OutputPath, submissionInfo?.Extras?.DiscKey, submissionInfo?.Extras?.DiscID, submissionInfo?.Extras?.PIC, submissionInfo?.SizeAndChecksums?.Layerbreak, submissionInfo?.SizeAndChecksums?.CRC32);
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
resultProgress?.Report(ResultEventArgs.Success("IRD created!"));
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
resultProgress?.Report(ResultEventArgs.Failure("Failed to create IRD"));
}
resultProgress?.Report(ResultEventArgs.Success("Submission information process complete!"));
@@ -626,11 +639,14 @@ namespace MPF.Frontend
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <param name="lines">Preformatted list of lines to write out to the file</param>
/// <returns>True on success, false on error</returns>
private static (bool, string) WriteOutputData(string? outputDirectory, string? filenameSuffix, List<string>? lines)
private static bool WriteOutputData(string? outputDirectory, string? filenameSuffix, List<string>? lines, out string status)
{
// Check to see if the inputs are valid
if (lines == null)
return (false, "No formatted data found to write!");
{
status = "No formatted data found to write!";
return false;
}
// Now write out to a generic file
try
@@ -654,10 +670,12 @@ namespace MPF.Frontend
}
catch (Exception ex)
{
return (false, $"Writing could not complete: {ex}");
status = $"Writing could not complete: {ex}";
return false;
}
return (true, "Writing complete!");
status = "Writing complete!";
return true;
}
// MOVE TO REDUMPLIB
@@ -786,7 +804,12 @@ namespace MPF.Frontend
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <returns>True on success, false on error</returns>
private static async Task<(bool, string)> WriteIRD(string isoPath, string? discKeyString, string? discIDString, string? picString, long? layerbreak, string? crc32)
private static async Task<bool> WriteIRD(string isoPath,
string? discKeyString,
string? discIDString,
string? picString,
long? layerbreak,
string? crc32)
{
try
{
@@ -796,7 +819,7 @@ namespace MPF.Frontend
// Parse disc key from submission info (Required)
byte[]? discKey = ProcessingTool.ParseHexKey(discKeyString);
if (discKey == null)
return (false, "Failed to create IRD: No key provided");
return false;
// Parse Disc ID from submission info (Optional)
byte[]? discID = ProcessingTool.ParseDiscID(discIDString);
@@ -825,12 +848,12 @@ namespace MPF.Frontend
// Write IRD to file
ird.Write(irdPath);
return (true, "IRD created!");
return true;
}
catch (Exception)
{
// We don't care what the error is
return (false, "Failed to create IRD");
return false;
}
}

View File

@@ -183,5 +183,39 @@ namespace MPF.Frontend
}
#endregion
#region Functionality Support
/// <summary>
/// Get if a system requires an anti-modchip scan
/// </summary>
public static bool SupportsAntiModchipScans(this RedumpSystem? system)
{
return system switch
{
RedumpSystem.SonyPlayStation => true,
_ => false,
};
}
/// <summary>
/// Get if a system requires a copy protection scan
/// </summary>
public static bool SupportsCopyProtectionScans(this RedumpSystem? system)
{
return system switch
{
RedumpSystem.AppleMacintosh => true,
RedumpSystem.EnhancedCD => true,
RedumpSystem.IBMPCcompatible => true,
RedumpSystem.PalmOS => true,
RedumpSystem.PocketPC => true,
RedumpSystem.RainbowDisc => true,
RedumpSystem.SonyElectronicBook => true,
_ => false,
};
}
#endregion
}
}

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
@@ -49,25 +49,21 @@
<PackageReference Include="MinTasksExtensionsBridge" Version="0.3.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="Microsoft.Net.Http" Version="2.2.29" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="Microsoft.Management.Infrastructure" Version="3.0.0" />
<PackageReference Include="System.Net.Http" Version="4.3.4" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="LibIRD" Version="0.9.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,11 @@
using SabreTools.RedumpLib.Data;
namespace MPF.Frontend
{
/// <summary>
/// Determines how user information is processed, if at all
/// </summary>
/// <param name="info">Submission info that may be overwritten</param>
/// <returns>True for successful updating, false or null otherwise</returns>
public delegate bool? ProcessUserInfoDelegate(ref SubmissionInfo? info);
}

View File

@@ -561,32 +561,37 @@ namespace MPF.Frontend.Tools
/// String representing the message to display the the user.
/// String representing the new release URL.
/// </returns>
public static (bool different, string message, string? url) CheckForNewVersion()
public static void CheckForNewVersion(out bool different, out string message, out string? url)
{
try
{
// Get current assembly version
var assemblyVersion = Assembly.GetEntryAssembly()?.GetName()?.Version;
if (assemblyVersion == null)
return (false, "Assembly version could not be determined", null);
{
different = false;
message = "Assembly version could not be determined";
url = null;
return;
}
string version = $"{assemblyVersion.Major}.{assemblyVersion.Minor}.{assemblyVersion.Build}";
// Get the latest tag from GitHub
var (tag, url) = GetRemoteVersionAndUrl();
bool different = version != tag && tag != null;
_ = GetRemoteVersionAndUrl(out string? tag, out url);
different = version != tag && tag != null;
string message = $"Local version: {version}"
message = $"Local version: {version}"
+ $"{Environment.NewLine}Remote version: {tag}"
+ (different
? $"{Environment.NewLine}The update URL has been added copied to your clipboard"
: $"{Environment.NewLine}You have the newest version!");
return (different, message, url);
}
catch (Exception ex)
{
return (false, ex.ToString(), null);
different = false;
message = ex.ToString();
url = null;
}
}
@@ -613,11 +618,12 @@ namespace MPF.Frontend.Tools
/// <summary>
/// Get the latest version of MPF from GitHub and the release URL
/// </summary>
private static (string? tag, string? url) GetRemoteVersionAndUrl()
private static bool GetRemoteVersionAndUrl(out string? tag, out string? url)
{
tag = null; url = null;
#if NET20 || NET35 || NET40
// Not supported in .NET Frameworks 2.0, 3.5, or 4.0
return (null, null);
return false;
#else
using var hc = new System.Net.Http.HttpClient();
#if NET452
@@ -625,22 +631,22 @@ namespace MPF.Frontend.Tools
#endif
// TODO: Figure out a better way than having this hardcoded...
string url = "https://api.github.com/repos/SabreTools/MPF/releases/latest";
var message = new System.Net.Http.HttpRequestMessage(System.Net.Http.HttpMethod.Get, url);
string releaseUrl = "https://api.github.com/repos/SabreTools/MPF/releases/latest";
var message = new System.Net.Http.HttpRequestMessage(System.Net.Http.HttpMethod.Get, releaseUrl);
message.Headers.Add("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0");
var latestReleaseJsonString = hc.SendAsync(message)?.ConfigureAwait(false).GetAwaiter().GetResult()
.Content?.ReadAsStringAsync().ConfigureAwait(false).GetAwaiter().GetResult();
if (latestReleaseJsonString == null)
return (null, null);
return false;
var latestReleaseJson = Newtonsoft.Json.Linq.JObject.Parse(latestReleaseJsonString);
if (latestReleaseJson == null)
return (null, null);
return false;
var latestTag = latestReleaseJson["tag_name"]?.ToString();
var releaseUrl = latestReleaseJson["html_url"]?.ToString();
tag = latestReleaseJson["tag_name"]?.ToString();
url = latestReleaseJson["html_url"]?.ToString();
return (latestTag, releaseUrl);
return true;
#endif
}

View File

@@ -9,7 +9,14 @@ namespace MPF.Frontend.Tools
{
public static class OptionsLoader
{
private const string ConfigurationPath = "config.json";
/// <summary>
/// Full path to the configuration file used by the program
/// </summary>
#if NET20 || NET35 || NET40 || NET452
private static string ConfigurationPath => Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "config.json");
#else
private static string ConfigurationPath => Path.Combine(AppContext.BaseDirectory, "config.json");
#endif
#region Arguments
@@ -21,7 +28,7 @@ namespace MPF.Frontend.Tools
{
// Help options
if (args.Length == 0 || args[0] == "-h" || args[0] == "-?")
return false;
return null;
// List options
if (args[0] == "-lc" || args[0] == "--listcodes")
@@ -31,7 +38,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(siteCode);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-lm" || args[0] == "--listmedia")
@@ -41,7 +48,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(mediaType);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-lp" || args[0] == "--listprograms")
@@ -51,7 +58,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(program);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-ls" || args[0] == "--listsystems")
@@ -61,7 +68,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(system);
}
Console.ReadLine();
return true;
}
@@ -72,23 +79,36 @@ namespace MPF.Frontend.Tools
/// Process common arguments for all functionality
/// </summary>
/// <returns>True if all arguments pass, false otherwise</returns>
public static (bool, MediaType, RedumpSystem?, string?) ProcessCommonArguments(string[] args)
public static bool ProcessCommonArguments(string[] args, out MediaType mediaType, out RedumpSystem? system, out string? message)
{
// All other use requires at least 3 arguments
if (args.Length < 3)
return (false, MediaType.NONE, null, "Invalid number of arguments");
{
mediaType = MediaType.NONE;
system = null;
message = "Invalid number of arguments";
return false;
}
// Check the MediaType
var mediaType = ToMediaType(args[0].Trim('"'));
mediaType = ToMediaType(args[0].Trim('"'));
if (mediaType == MediaType.NONE)
return (false, MediaType.NONE, null, $"{args[0]} is not a recognized media type");
{
system = null;
message = $"{args[0]} is not a recognized media type";
return false;
}
// Check the RedumpSystem
var knownSystem = Extensions.ToRedumpSystem(args[1].Trim('"'));
if (knownSystem == null)
return (false, MediaType.NONE, null, $"{args[1]} is not a recognized system");
system = Extensions.ToRedumpSystem(args[1].Trim('"'));
if (system == null)
{
message = $"{args[1]} is not a recognized system";
return false;
}
return (true, mediaType, knownSystem, null);
message = null;
return true;
}
/// <summary>

View File

@@ -0,0 +1,591 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.IO;
namespace MPF.Frontend.Tools
{
public static class PhysicalTool
{
#region Generic
/// <summary>s
/// Get the last modified date for a file from a physical disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <param name="filePath">Relative file path</param>
/// <returns>Output last modified date in "yyyy-mm-dd" format if possible, null on error</returns>
public static string? GetFileDate(Drive? drive, string? filePath, bool fixTwoDigitYear = false)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// If the executable name is invalid, we can't do this part
if (string.IsNullOrEmpty(filePath))
return null;
// Now that we have the EXE name, try to get the fileinfo for it
string exePath = Path.Combine(drive.Name, filePath);
if (!File.Exists(exePath))
return null;
// Get the last modified time
var fi = new FileInfo(exePath);
var lastModified = fi.LastWriteTimeUtc;
int year = lastModified.Year;
int month = lastModified.Month;
int day = lastModified.Day;
// Fix the Y2K timestamp issue, if required
if (fixTwoDigitYear)
year = year >= 1900 && year < 1920 ? 2000 + year % 100 : year;
// Format and return the string
var dt = new DateTime(year, month, day);
return dt.ToString("yyyy-MM-dd");
}
#endregion
#region BD-Video
/// <summary>
/// Get if the Bus Encryption Enabled (BEE) flag is set in a path
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Bus encryption enabled status if possible, false otherwise</returns>
public static bool GetBusEncryptionEnabled(Drive? drive)
{
// If there's no drive path, we can't get BEE flag
if (string.IsNullOrEmpty(drive?.Name))
return false;
// If the folder no longer exists, we can't get exe name
if (!Directory.Exists(drive!.Name))
return false;
// Get the two possible file paths
#if NET20 || NET35
string content000 = Path.Combine(Path.Combine(drive.Name, "AACS"), "Content000.cer");
string content001 = Path.Combine(Path.Combine(drive.Name, "AACS"), "Content001.cer");
#else
string content000 = Path.Combine(drive.Name, "AACS", "Content000.cer");
string content001 = Path.Combine(drive.Name, "AACS", "Content001.cer");
#endif
try
{
// Check the required files
if (File.Exists(content000) && new FileInfo(content000).Length > 1)
{
using var fs = File.OpenRead(content000);
_ = fs.ReadByte(); // Skip the first byte
return fs.ReadByte() > 127;
}
else if (File.Exists(content001) && new FileInfo(content001).Length > 1)
{
using var fs = File.OpenRead(content001);
_ = fs.ReadByte(); // Skip the first byte
return fs.ReadByte() > 127;
}
// False if neither file fits the criteria
return false;
}
catch
{
// We don't care what the error is right now
return false;
}
}
#endregion
#region PlayStation
/// <summary>
/// Get the EXE name from a PlayStation disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Executable name on success, null otherwise</returns>
public static string? GetPlayStationExecutableName(Drive? drive)
{
// If there's no drive path, we can't get exe name
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't get exe name
if (!Directory.Exists(drive!.Name))
return null;
// Get the two paths that we will need to check
string psxExePath = Path.Combine(drive.Name, "PSX.EXE");
string systemCnfPath = Path.Combine(drive.Name, "SYSTEM.CNF");
// Read the CNF file as an INI file
var systemCnf = new IniFile(systemCnfPath);
string? bootValue = string.Empty;
// PlayStation uses "BOOT" as the key
if (systemCnf.ContainsKey("BOOT"))
bootValue = systemCnf["BOOT"];
// PlayStation 2 uses "BOOT2" as the key
if (systemCnf.ContainsKey("BOOT2"))
bootValue = systemCnf["BOOT2"];
// If we had any boot value, parse it and get the executable name
if (!string.IsNullOrEmpty(bootValue))
{
var match = Regex.Match(bootValue, @"cdrom.?:\\?(.*)", RegexOptions.Compiled);
if (match.Groups.Count > 1)
{
string? serial = match.Groups[1].Value;
// Some games may have the EXE in a subfolder
serial = Path.GetFileName(serial);
return serial;
}
}
// If the SYSTEM.CNF value can't be found, try PSX.EXE
if (File.Exists(psxExePath))
return "PSX.EXE";
// If neither can be found, we return null
return null;
}
/// <summary>
/// Get the serial from a PlayStation disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Serial on success, null otherwise</returns>
public static string? GetPlayStationSerial(Drive? drive)
{
// Try to get the executable name
string? exeName = GetPlayStationExecutableName(drive);
if (string.IsNullOrEmpty(exeName))
return null;
// Handle generic PSX.EXE
if (exeName == "PSX.EXE")
return null;
// EXE name may have a trailing `;` after
// EXE name should always be in all caps
exeName = exeName!
.Split(';')[0]
.ToUpperInvariant();
// Serial is most of the EXE name normalized
return exeName
.Replace('_', '-')
.Replace(".", string.Empty);
}
/// <summary>
/// Get the version from a PlayStation 2 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Game version if possible, null on error</returns>
public static string? GetPlayStation2Version(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// Get the SYSTEM.CNF path to check
string systemCnfPath = Path.Combine(drive.Name, "SYSTEM.CNF");
// Try to parse the SYSTEM.CNF file
var systemCnf = new IniFile(systemCnfPath);
if (systemCnf.ContainsKey("VER"))
return systemCnf["VER"];
// If "VER" can't be found, we can't do much
return null;
}
/// <summary>
/// Get the internal serial from a PlayStation 3 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Internal disc serial if possible, null on error</returns>
public static string? GetPlayStation3Serial(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// Attempt to use PS3_DISC.SFB
string sfbPath = Path.Combine(drive.Name, "PS3_DISC.SFB");
if (File.Exists(sfbPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfbPath));
br.BaseStream.Seek(0x220, SeekOrigin.Begin);
return new string(br.ReadChars(0x10)).TrimEnd('\0');
}
catch
{
// We don't care what the error was
return null;
}
}
// Attempt to use PARAM.SFO
#if NET20 || NET35
string sfoPath = Path.Combine(Path.Combine(drive.Name, "PS3_GAME"), "PARAM.SFO");
#else
string sfoPath = Path.Combine(drive.Name, "PS3_GAME", "PARAM.SFO");
#endif
if (File.Exists(sfoPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfoPath));
br.BaseStream.Seek(-0x18, SeekOrigin.End);
return new string(br.ReadChars(9)).TrimEnd('\0').Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
return null;
}
/// <summary>
/// Get the version from a PlayStation 3 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Game version if possible, null on error</returns>
public static string? GetPlayStation3Version(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// Attempt to use PS3_DISC.SFB
string sfbPath = Path.Combine(drive.Name, "PS3_DISC.SFB");
if (File.Exists(sfbPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfbPath));
br.BaseStream.Seek(0x230, SeekOrigin.Begin);
var discVersion = new string(br.ReadChars(0x10)).TrimEnd('\0');
if (!string.IsNullOrEmpty(discVersion))
return discVersion;
}
catch
{
// We don't care what the error was
return null;
}
}
// Attempt to use PARAM.SFO
#if NET20 || NET35
string sfoPath = Path.Combine(Path.Combine(drive.Name, "PS3_GAME"), "PARAM.SFO");
#else
string sfoPath = Path.Combine(drive.Name, "PS3_GAME", "PARAM.SFO");
#endif
if (File.Exists(sfoPath))
{
try
{
using var br = new BinaryReader(File.OpenRead(sfoPath));
br.BaseStream.Seek(-0x08, SeekOrigin.End);
return new string(br.ReadChars(5)).TrimEnd('\0');
}
catch
{
// We don't care what the error was
return null;
}
}
return null;
}
/// <summary>
/// Get the firmware version from a PlayStation 3 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Firmware version if possible, null on error</returns>
public static string? GetPlayStation3FirmwareVersion(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// Attempt to read from /PS3_UPDATE/PS3UPDAT.PUP
#if NET20 || NET35
string pupPath = Path.Combine(Path.Combine(drive.Name, "PS3_UPDATE"), "PS3UPDAT.PUP");
#else
string pupPath = Path.Combine(drive.Name, "PS3_UPDATE", "PS3UPDAT.PUP");
#endif
if (!File.Exists(pupPath))
return null;
try
{
using var br = new BinaryReader(File.OpenRead(pupPath));
br.BaseStream.Seek(0x3E, SeekOrigin.Begin);
byte[] buf = new byte[2];
br.Read(buf, 0, 2);
Array.Reverse(buf);
short location = BitConverter.ToInt16(buf, 0);
br.BaseStream.Seek(location, SeekOrigin.Begin);
return new string(br.ReadChars(4));
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the internal serial from a PlayStation 4 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Internal disc serial if possible, null on error</returns>
public static string? GetPlayStation4Serial(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// If we can't find param.sfo, we don't have a PlayStation 4 disc
#if NET20 || NET35
string paramSfoPath = Path.Combine(Path.Combine(drive.Name, "bd"), "param.sfo");
#else
string paramSfoPath = Path.Combine(drive.Name, "bd", "param.sfo");
#endif
if (!File.Exists(paramSfoPath))
return null;
// Let's try reading param.sfo to find the serial at the end of the file
try
{
using var br = new BinaryReader(File.OpenRead(paramSfoPath));
br.BaseStream.Seek(-0x14, SeekOrigin.End);
return new string(br.ReadChars(9)).Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the version from a PlayStation 4 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Game version if possible, null on error</returns>
public static string? GetPlayStation4Version(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// If we can't find param.sfo, we don't have a PlayStation 4 disc
#if NET20 || NET35
string paramSfoPath = Path.Combine(Path.Combine(drive.Name, "bd"), "param.sfo");
#else
string paramSfoPath = Path.Combine(drive.Name, "bd", "param.sfo");
#endif
if (!File.Exists(paramSfoPath))
return null;
// Let's try reading param.sfo to find the version at the end of the file
try
{
using var br = new BinaryReader(File.OpenRead(paramSfoPath));
br.BaseStream.Seek(-0x08, SeekOrigin.End);
return new string(br.ReadChars(5));
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the internal serial from a PlayStation 5 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Internal disc serial if possible, null on error</returns>
public static string? GetPlayStation5Serial(Drive? drive)
{
// Attempt to get the param.json file
var json = GetPlayStation5ParamsJsonFromDrive(drive);
if (json == null)
return null;
try
{
return json["disc"]?[0]?["masterDataId"]?.Value<string>()?.Insert(4, "-");
}
catch
{
// We don't care what the error was
return null;
}
}
// <summary>
/// Get the version from a PlayStation 5 disc, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Game version if possible, null on error</returns>
public static string? GetPlayStation5Version(Drive? drive)
{
// Attempt to get the param.json file
var json = GetPlayStation5ParamsJsonFromDrive(drive);
if (json == null)
return null;
try
{
return json["masterVersion"]?.Value<string>();
}
catch
{
// We don't care what the error was
return null;
}
}
/// <summary>
/// Get the params.json file from a drive path, if possible
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>JObject representing the JSON on success, null on error</returns>
private static JObject? GetPlayStation5ParamsJsonFromDrive(Drive? drive)
{
// If there's no drive path, we can't do this part
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't do this part
if (!Directory.Exists(drive!.Name))
return null;
// If we can't find param.json, we don't have a PlayStation 5 disc
#if NET20 || NET35
string paramJsonPath = Path.Combine(Path.Combine(drive.Name, "bd"), "param.json");
#else
string paramJsonPath = Path.Combine(drive.Name, "bd", "param.json");
#endif
return GetPlayStation5ParamsJsonFromFile(paramJsonPath);
}
/// <summary>
/// Get the params.json file from a filename, if possible
/// </summary>
/// <param name="filename">Filename to check</param>
/// <returns>JObject representing the JSON on success, null on error</returns>
private static JObject? GetPlayStation5ParamsJsonFromFile(string? filename)
{
// If the file doesn't exist
if (string.IsNullOrEmpty(filename) || !File.Exists(filename))
return null;
// Let's try reading param.json to find the version in the unencrypted JSON
try
{
using var br = new BinaryReader(File.OpenRead(filename));
br.BaseStream.Seek(0x800, SeekOrigin.Begin);
byte[] jsonBytes = br.ReadBytes((int)(br.BaseStream.Length - 0x800));
return JsonConvert.DeserializeObject(Encoding.ASCII.GetString(jsonBytes)) as JObject;
}
catch
{
// We don't care what the error was
return null;
}
}
#endregion
#region Xbox
/// <summary>
/// Get all filenames for Xbox One and Xbox Series X
/// </summary>
/// <param name="drive">Drive to extract information from</param>
/// <returns>Filenames if possible, null on error</returns>
public static string? GetXboxFilenames(Drive? drive)
{
// If there's no drive path, we can't get BEE flag
if (string.IsNullOrEmpty(drive?.Name))
return null;
// If the folder no longer exists, we can't get exe name
if (!Directory.Exists(drive!.Name))
return null;
// Get the MSXC directory path
string msxc = Path.Combine(drive.Name, "MSXC");
if (!Directory.Exists(msxc))
return null;
try
{
var files = Directory.GetFiles(msxc, "*", SearchOption.TopDirectoryOnly);
var filenames = files.Select(Path.GetFileName).ToArray();
return string.Join("\n", filenames);
}
catch
{
// We don't care what the error is right now
return null;
}
}
#endregion
}
}

View File

@@ -6,32 +6,10 @@ using System.Text.RegularExpressions;
using System.Threading.Tasks;
using BinaryObjectScanner;
#pragma warning disable SYSLIB1045 // Convert to 'GeneratedRegexAttribute'.
namespace MPF.Frontend.Tools
{
public static class ProtectionTool
{
/// <summary>
/// Get the current detected copy protection(s), if possible
/// </summary>
/// <param name="drive">Drive object representing the current drive</param>
/// <param name="options">Options object that determines what to scan</param>
/// <param name="progress">Optional progress callback</param>
/// <returns>Detected copy protection(s) if possible, null on error</returns>
public static async Task<(string?, Dictionary<string, List<string>>?)> GetCopyProtection(Drive? drive,
Frontend.Options options,
IProgress<ProtectionProgress>? progress = null)
{
if (options.ScanForProtection && drive?.Name != null)
{
(var protection, _) = await RunProtectionScanOnPath(drive.Name, options, progress);
return (FormatProtections(protection), protection);
}
return ("(CHECK WITH PROTECTIONID)", null);
}
/// <summary>
/// Run protection scan on a given path
/// </summary>
@@ -39,68 +17,37 @@ namespace MPF.Frontend.Tools
/// <param name="options">Options object that determines what to scan</param>
/// <param name="progress">Optional progress callback</param>
/// <returns>Set of all detected copy protections with an optional error string</returns>
public static async Task<(Dictionary<string, List<string>>?, string?)> RunProtectionScanOnPath(string path,
public static async Task<ProtectionDictionary> RunProtectionScanOnPath(string path,
Frontend.Options options,
IProgress<ProtectionProgress>? progress = null)
{
try
{
#if NET40
var found = await Task.Factory.StartNew(() =>
{
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
var found = await Task.Factory.StartNew(() =>
#else
var found = await Task.Run(() =>
{
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
var found = await Task.Run(() =>
#endif
// If nothing was returned, return
#if NET20 || NET35
if (found == null || found.Count == 0)
#else
if (found == null || found.IsEmpty)
#endif
return (null, null);
// Filter out any empty protections
var filteredProtections = found
#if NET20 || NET35
.Where(kvp => kvp.Value != null && kvp.Value.Count > 0)
#else
.Where(kvp => kvp.Value != null && !kvp.Value.IsEmpty)
#endif
.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value.OrderBy(s => s).ToList());
// Return the filtered set of protections
return (filteredProtections, null);
}
catch (Exception ex)
{
return (null, ex.ToString());
}
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
// If nothing was returned, return
if (found == null || found.Count == 0)
return [];
// Filter out any empty protections
found.ClearEmptyKeys();
// Return the filtered set of protections
return found;
}
/// <summary>
@@ -108,10 +55,12 @@ namespace MPF.Frontend.Tools
/// </summary>
/// <param name="protections">Dictionary of file to list of protection mappings</param>
/// <returns>Detected protections, if any</returns>
public static string? FormatProtections(Dictionary<string, List<string>>? protections)
public static string? FormatProtections(ProtectionDictionary? protections)
{
// If the filtered list is empty in some way, return
if (protections == null || !protections.Any())
if (protections == null)
return "(CHECK WITH PROTECTIONID)";
else if (protections.Count == 0)
return "None found [OMIT FROM SUBMISSION]";
// Get an ordered list of distinct found protections
@@ -304,55 +253,115 @@ namespace MPF.Frontend.Tools
// SafeDisc
if (foundProtections.Any(p => p.StartsWith("SafeDisc")))
{
if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Confirmed this set of checks works with Redump entries 10430, 11347, 13230, 18614, 28257, 31149, 31824, 52606, 57721, 58455, 58573, 62935, 63941, 64255, 65569, 66005, 70504, 73502, 74520, 78048, 79729, 83468, 98589, and 101261.
// Best case scenario for SafeDisc 2+: A full SafeDisc version is found in a line starting with "Macrovision Protected Application". All other SafeDisc detections can be safely scrubbed.
// TODO: Scrub "Macrovision Protected Application, " from before the SafeDisc version.
if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && p.StartsWith("Macrovision Protected Application") && !p.Contains("SRV Tool APP")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !p.Contains("SRV Tool APP"))
.Where(p => p != "SafeDisc")
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\/4\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Next best case for SafeDisc 2+: A full SafeDisc version is found from the "SafeDisc SRV Tool APP".
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && p.StartsWith("Macrovision Protected Application") && p.Contains("SRV Tool APP")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => p != "SafeDisc")
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled)))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\/4\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Covers specific edge cases where older drivers are erroneously placed in discs with a newer version of SafeDisc, and the specific SafeDisc version is expunged.
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled) || Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled)) && foundProtections.Any(p => p == "SafeDisc 3+ (DVD)"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
}
// Best case for SafeDisc 1.X: A full SafeDisc version is found that isn't part of a version range.
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled) && !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled))))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite");
}
// Next best case for SafeDisc 1: A SafeDisc version range is found from "SECDRV.SYS".
// TODO: Scrub "Macrovision Security Driver {Version}" from before the SafeDisc version.
else if (foundProtections.Any(p => p.StartsWith("Macrovision Security Driver") && Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) || Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}$")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite");
}
// Next best case for SafeDisc 2+: A SafeDisc version range is found from "SECDRV.SYS".
// TODO: Scrub "Macrovision Security Driver {Version}" from before the SafeDisc version.
else if (foundProtections.Any(p => p.StartsWith("Macrovision Security Driver")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
// Only SafeDisc Lite is found.
else if (foundProtections.Any(p => p == "SafeDisc Lite"))
{
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-1\.[0-9]{2}\.[0-9]{3}\/Lite", RegexOptions.Compiled)));
}
// Only SafeDisc 3+ is found.
else if (foundProtections.Any(p => p == "SafeDisc 3+ (DVD)"))
{
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 2+")
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)));
}
// Only SafeDisc 2+ is found.
else if (foundProtections.Any(p => p == "SafeDisc 2+"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => p != "SafeDisc");
}
else if (foundProtections.Any(p => p == "SafeDisc 1/Lite"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => p != "SafeDisc");
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)));
}
}

View File

@@ -57,8 +57,8 @@ namespace MPF.Frontend.Tools
string outputFilename = Path.GetFileName(outputPath);
// Check that all of the relevant files are there
(bool foundFiles, List<string> missingFiles) = processor.FoundAllFiles(outputDirectory, outputFilename, false);
if (!foundFiles)
List<string> missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count > 0)
{
resultProgress?.Report(ResultEventArgs.Failure($"There were files missing from the output:\n{string.Join("\n", [.. missingFiles])}"));
resultProgress?.Report(ResultEventArgs.Failure($"This may indicate an issue with the hardware or media, including unsupported devices.\nPlease see dumping program documentation for more details."));
@@ -81,7 +81,7 @@ namespace MPF.Frontend.Tools
// Get specific tool output handling
processor?.GenerateSubmissionInfo(info, combinedBase, options.EnableRedumpCompatibility);
if (options.IncludeArtifacts)
processor?.GenerateArtifacts(info, combinedBase);
info.Artifacts = processor?.GenerateArtifacts(combinedBase);
// Get a list of matching IDs for each line in the DAT
if (!string.IsNullOrEmpty(info.TracksAndWriteOffsets!.ClrMameProData) && options.HasRedumpLogin)
@@ -107,7 +107,7 @@ namespace MPF.Frontend.Tools
ProcessSystem(info, system, drive, options.AddPlaceholders, processor is DiscImageCreator, combinedBase);
// Run anti-modchip check, if necessary
if (drive != null && SupportsAntiModchipScans(system) && info.CopyProtection!.AntiModchip == YesNo.NULL)
if (drive != null && system.SupportsAntiModchipScans() && info.CopyProtection!.AntiModchip == YesNo.NULL)
{
resultProgress?.Report(ResultEventArgs.Success("Checking for anti-modchip strings... this might take a while!"));
info.CopyProtection.AntiModchip = await ProtectionTool.GetPlayStationAntiModchipDetected(drive?.Name) ? YesNo.Yes : YesNo.No;
@@ -115,14 +115,26 @@ namespace MPF.Frontend.Tools
}
// Run copy protection, if possible or necessary
if (SupportsCopyProtectionScans(system))
if (system.SupportsCopyProtectionScans())
{
resultProgress?.Report(ResultEventArgs.Success("Running copy protection scan... this might take a while!"));
var (protectionString, fullProtections) = await ProtectionTool.GetCopyProtection(drive, options, protectionProgress);
info.CopyProtection!.Protection += protectionString;
info.CopyProtection.FullProtections = fullProtections as Dictionary<string, List<string>?> ?? [];
resultProgress?.Report(ResultEventArgs.Success("Copy protection scan complete!"));
ProtectionDictionary? protections = null;
try
{
if (options.ScanForProtection && drive?.Name != null)
protections = await ProtectionTool.RunProtectionScanOnPath(drive.Name, options, protectionProgress);
var protectionString = ProtectionTool.FormatProtections(protections);
info.CopyProtection!.Protection += protectionString;
info.CopyProtection.FullProtections = ReformatProtectionDictionary(protections);
resultProgress?.Report(ResultEventArgs.Success("Copy protection scan complete!"));
}
catch (Exception ex)
{
resultProgress?.Report(ResultEventArgs.Failure(ex.ToString()));
}
}
// Set fields that may have automatic filling otherwise
@@ -174,7 +186,7 @@ namespace MPF.Frontend.Tools
// Setup the checks
bool allFound = true;
List<int[]> foundIdSets = [];
List<int[]> foundIdSets = [];
// Loop through all of the hashdata to find matching IDs
resultProgress?.Report(ResultEventArgs.Success("Finding disc matches on Redump..."));
@@ -212,17 +224,19 @@ namespace MPF.Frontend.Tools
continue;
}
(bool singleFound, var foundIds, string? result) = await Validator.ValidateSingleTrack(wc, info, sha1);
if (singleFound)
resultProgress?.Report(ResultEventArgs.Success(result));
var foundIds = await Validator.ValidateSingleTrack(wc, info, sha1);
if (foundIds != null && foundIds.Count == 1)
resultProgress?.Report(ResultEventArgs.Success($"Single match found for {sha1}"));
else if (foundIds != null && foundIds.Count != 1)
resultProgress?.Report(ResultEventArgs.Success($"Multiple matches found for {sha1}"));
else
resultProgress?.Report(ResultEventArgs.Failure(result));
resultProgress?.Report(ResultEventArgs.Failure($"No matches found for {sha1}"));
// Add the found IDs to the map
foundIdSets.Add(foundIds?.ToArray() ?? []);
// Ensure that all tracks are found
allFound &= singleFound;
allFound &= (foundIds != null && foundIds.Count == 1);
}
// If all tracks were found, check if there are any fully-matched IDs
@@ -238,7 +252,7 @@ namespace MPF.Frontend.Tools
fullyMatchedIDs = [.. set];
continue;
}
// Try to intersect with all known IDs
fullyMatchedIDs = fullyMatchedIDs.Intersect(set).ToList();
if (!fullyMatchedIDs.Any())
@@ -249,23 +263,20 @@ namespace MPF.Frontend.Tools
// If we don't have any matches but we have a universal hash
if (!info.PartiallyMatchedIDs.Any() && info.CommonDiscInfo?.CommentsSpecialFields?.ContainsKey(SiteCode.UniversalHash) == true)
{
#if NET40
var validateTask = Validator.ValidateUniversalHash(wc, info);
validateTask.Wait();
(bool singleFound, var foundIds, string? result) = validateTask.Result;
#else
(bool singleFound, var foundIds, string? result) = await Validator.ValidateUniversalHash(wc, info);
#endif
if (singleFound)
resultProgress?.Report(ResultEventArgs.Success(result));
string sha1 = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
var foundIds = await Validator.ValidateUniversalHash(wc, info);
if (foundIds != null && foundIds.Count == 1)
resultProgress?.Report(ResultEventArgs.Success($"Single match found for universal hash {sha1}"));
else if (foundIds != null && foundIds.Count != 1)
resultProgress?.Report(ResultEventArgs.Success($"Multiple matches found for universal hash {sha1}"));
else
resultProgress?.Report(ResultEventArgs.Failure(result));
resultProgress?.Report(ResultEventArgs.Failure($"No matches found for universal hash {sha1}"));
// Ensure that the hash is found
allFound = singleFound;
allFound = (foundIds != null && foundIds.Count == 1);
// If we found a match, then the disc is a match
if (singleFound && foundIds != null)
if ((foundIds != null && foundIds.Count == 1) && foundIds != null)
fullyMatchedIDs = foundIds;
else
fullyMatchedIDs = [];
@@ -543,7 +554,7 @@ namespace MPF.Frontend.Tools
info.CommonDiscInfo.Layer0MouldSID = addPlaceholders ? RequiredIfExistsValue : string.Empty;
info.CommonDiscInfo.Layer1MouldSID = addPlaceholders ? RequiredIfExistsValue : string.Empty;
info.CommonDiscInfo.Layer0AdditionalMould = addPlaceholders ? RequiredIfExistsValue : string.Empty;
info.Extras!.BCA ??= (addPlaceholders ? RequiredValue : string.Empty);
info.Extras!.BCA ??= addPlaceholders ? RequiredValue : string.Empty;
break;
case MediaType.NintendoWiiOpticalDisc:
@@ -574,7 +585,7 @@ namespace MPF.Frontend.Tools
}
info.Extras!.DiscKey = addPlaceholders ? RequiredValue : string.Empty;
info.Extras.BCA = info.Extras.BCA ?? (addPlaceholders ? RequiredValue : string.Empty);
info.Extras.BCA ??= addPlaceholders ? RequiredValue : string.Empty;
break;
@@ -624,6 +635,17 @@ namespace MPF.Frontend.Tools
break;
case RedumpSystem.BDVideo:
info.CommonDiscInfo!.Category ??= DiscCategory.Video;
bool bee = PhysicalTool.GetBusEncryptionEnabled(drive);
if (bee && string.IsNullOrEmpty(info.CopyProtection!.Protection))
info.CopyProtection.Protection = "Bus encryption enabled flag set";
else if (bee)
info.CopyProtection!.Protection += "\nBus encryption enabled flag set";
else
info.CopyProtection!.Protection ??= addPlaceholders ? RequiredIfExistsValue : string.Empty;
break;
case RedumpSystem.DVDVideo:
case RedumpSystem.HDDVDVideo:
info.CommonDiscInfo!.Category ??= DiscCategory.Video;
@@ -679,25 +701,19 @@ namespace MPF.Frontend.Tools
break;
case RedumpSystem.KonamiPython2:
string? kp2Exe = PhysicalTool.GetPlayStationExecutableName(drive);
// TODO: Remove this hack when DIC supports build date output
if (isDiscImageCreator)
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", drive?.GetPlayStationExecutableName());
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", kp2Exe);
if (info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? kp2Exe) && !string.IsNullOrEmpty(kp2Exe))
info.CommonDiscInfo.Region = ProcessingTool.GetPlayStationRegion(kp2Exe);
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStationSerial);
info.CommonDiscInfo!.EXEDateBuildDate ??= PhysicalTool.GetFileDate(drive, kp2Exe, fixTwoDigitYear: true);
if (drive?.GetPlayStationExecutableInfo(out var kp2Serial, out Region? kp2Region, out var kp2Date) == true)
{
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? value) || string.IsNullOrEmpty(value))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = kp2Serial ?? string.Empty;
info.CommonDiscInfo.Region ??= kp2Region;
info.CommonDiscInfo.EXEDateBuildDate ??= kp2Date;
}
if (string.IsNullOrEmpty(info.VersionAndEditions!.Version))
info.VersionAndEditions!.Version = drive?.GetPlayStation2Version() ?? string.Empty;
if (CommentFieldExists(info, SiteCode.InternalSerialName, out kp2Exe))
info.CommonDiscInfo!.Region = ProcessingTool.GetPlayStationRegion(kp2Exe);
SetVersionIfNotExists(info, drive, PhysicalTool.GetPlayStation2Version);
break;
case RedumpSystem.KonamiSystemGV:
@@ -717,29 +733,11 @@ namespace MPF.Frontend.Tools
break;
case RedumpSystem.MicrosoftXboxOne:
if (drive?.Name != null)
{
string xboxOneMsxcPath = Path.Combine(drive.Name, "MSXC");
if (drive != null && Directory.Exists(xboxOneMsxcPath))
{
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.Filename] = string.Join("\n",
Directory.GetFiles(xboxOneMsxcPath, "*", SearchOption.TopDirectoryOnly).Select(Path.GetFileName).ToArray());
}
}
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.Filename] = PhysicalTool.GetXboxFilenames(drive) ?? string.Empty;
break;
case RedumpSystem.MicrosoftXboxSeriesXS:
if (drive?.Name != null)
{
string xboxSeriesXMsxcPath = Path.Combine(drive.Name, "MSXC");
if (drive != null && Directory.Exists(xboxSeriesXMsxcPath))
{
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.Filename] = string.Join("\n",
Directory.GetFiles(xboxSeriesXMsxcPath, "*", SearchOption.TopDirectoryOnly).Select(Path.GetFileName).ToArray());
}
}
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.Filename] = PhysicalTool.GetXboxFilenames(drive) ?? string.Empty;
break;
case RedumpSystem.NamcoSegaNintendoTriforce:
@@ -793,83 +791,54 @@ namespace MPF.Frontend.Tools
break;
case RedumpSystem.SonyPlayStation:
string? ps1Exe = PhysicalTool.GetPlayStationExecutableName(drive);
// TODO: Remove this hack when DIC supports build date output
if (isDiscImageCreator)
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", drive?.GetPlayStationExecutableName(), psx: true);
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", ps1Exe, psx: true);
if (info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? psxExe) && !string.IsNullOrEmpty(psxExe))
info.CommonDiscInfo.Region = ProcessingTool.GetPlayStationRegion(psxExe);
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStationSerial);
info.CommonDiscInfo!.EXEDateBuildDate ??= PhysicalTool.GetFileDate(drive, ps1Exe, fixTwoDigitYear: true);
if (drive?.GetPlayStationExecutableInfo(out var psxSerial, out Region? psxRegion, out var psxDate) == true)
{
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? value) || string.IsNullOrEmpty(value))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = psxSerial ?? string.Empty;
info.CommonDiscInfo.Region ??= psxRegion;
info.CommonDiscInfo.EXEDateBuildDate ??= psxDate;
}
if (CommentFieldExists(info, SiteCode.InternalSerialName, out ps1Exe))
info.CommonDiscInfo!.Region = ProcessingTool.GetPlayStationRegion(ps1Exe);
break;
case RedumpSystem.SonyPlayStation2:
info.CommonDiscInfo!.LanguageSelection ??= [];
string? ps2Exe = PhysicalTool.GetPlayStationExecutableName(drive);
// TODO: Remove this hack when DIC supports build date output
if (isDiscImageCreator)
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", drive?.GetPlayStationExecutableName());
info.CommonDiscInfo!.EXEDateBuildDate = DiscImageCreator.GetPlayStationEXEDate($"{basePath}_volDesc.txt", ps2Exe);
if (info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? ps2Exe) && !string.IsNullOrEmpty(ps2Exe))
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStationSerial);
info.CommonDiscInfo!.EXEDateBuildDate ??= PhysicalTool.GetFileDate(drive, ps2Exe, fixTwoDigitYear: true);
if (CommentFieldExists(info, SiteCode.InternalSerialName, out ps2Exe))
info.CommonDiscInfo.Region = ProcessingTool.GetPlayStationRegion(ps2Exe);
if (drive?.GetPlayStationExecutableInfo(out var ps2Serial, out Region? ps2Region, out var ps2Date) == true)
{
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? value) || string.IsNullOrEmpty(value))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = ps2Serial ?? string.Empty;
info.CommonDiscInfo.Region ??= ps2Region;
info.CommonDiscInfo.EXEDateBuildDate ??= ps2Date;
}
if (string.IsNullOrEmpty(info.VersionAndEditions!.Version))
info.VersionAndEditions!.Version = drive?.GetPlayStation2Version() ?? string.Empty;
SetVersionIfNotExists(info, drive, PhysicalTool.GetPlayStation2Version);
break;
case RedumpSystem.SonyPlayStation3:
info.Extras!.DiscKey ??= addPlaceholders ? RequiredValue : string.Empty;
info.Extras.DiscID ??= addPlaceholders ? RequiredValue : string.Empty;
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? ps3Serial) || string.IsNullOrEmpty(ps3Serial))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = drive?.GetPlayStation3Serial() ?? string.Empty;
if (string.IsNullOrEmpty(info.VersionAndEditions!.Version))
info.VersionAndEditions!.Version = drive?.GetPlayStation3Version() ?? string.Empty;
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.Patches, out string? ps3Firmware) || string.IsNullOrEmpty(ps3Firmware))
{
string? firmwareVersion = drive?.GetPlayStation3FirmwareVersion();
if (firmwareVersion != null)
info.CommonDiscInfo!.ContentsSpecialFields![SiteCode.Patches] = $"PS3 Firmware {firmwareVersion}";
}
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStation3Serial);
SetVersionIfNotExists(info, drive, PhysicalTool.GetPlayStation3Version);
SetCommentFieldIfNotExists(info, SiteCode.Patches, drive, FormatPlayStation3FirmwareVersion);
break;
case RedumpSystem.SonyPlayStation4:
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? ps4Serial) || string.IsNullOrEmpty(ps4Serial))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = drive?.GetPlayStation4Serial() ?? string.Empty;
if (string.IsNullOrEmpty(info.VersionAndEditions!.Version))
info.VersionAndEditions!.Version = drive?.GetPlayStation4Version() ?? string.Empty;
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStation4Serial);
SetVersionIfNotExists(info, drive, PhysicalTool.GetPlayStation4Version);
break;
case RedumpSystem.SonyPlayStation5:
if (!info.CommonDiscInfo!.CommentsSpecialFields!.TryGetValue(SiteCode.InternalSerialName, out string? ps5Serial) || string.IsNullOrEmpty(ps5Serial))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = drive?.GetPlayStation5Serial() ?? string.Empty;
if (string.IsNullOrEmpty(info.VersionAndEditions!.Version))
info.VersionAndEditions!.Version = drive?.GetPlayStation5Version() ?? string.Empty;
SetCommentFieldIfNotExists(info, SiteCode.InternalSerialName, drive, PhysicalTool.GetPlayStation5Serial);
SetVersionIfNotExists(info, drive, PhysicalTool.GetPlayStation5Version);
break;
case RedumpSystem.TomyKissSite:
@@ -886,33 +855,81 @@ namespace MPF.Frontend.Tools
}
/// <summary>
/// Helper to determine if a system requires an anti-modchip scan
/// Get a preformatted string for the PS3 firmware version, if possible
/// </summary>
private static bool SupportsAntiModchipScans(RedumpSystem? system)
private static string? FormatPlayStation3FirmwareVersion(Drive? drive)
{
return system switch
{
RedumpSystem.SonyPlayStation => true,
_ => false,
};
string? firmwareVersion = PhysicalTool.GetPlayStation3FirmwareVersion(drive);
if (string.IsNullOrEmpty(firmwareVersion))
return string.Empty;
return $"PS3 Firmware {firmwareVersion}";
}
/// <summary>
/// Helper to determine if a system requires a copy protection scan
/// Determine if a comment field exists based on key
/// </summary>
private static bool SupportsCopyProtectionScans(RedumpSystem? system)
private static bool CommentFieldExists(SubmissionInfo info, SiteCode key, out string? value)
{
return system switch
// Ensure the comments fields exist
if (info.CommonDiscInfo!.CommentsSpecialFields == null)
info.CommonDiscInfo.CommentsSpecialFields = [];
// Check if the field exists
if (!info.CommonDiscInfo.CommentsSpecialFields.TryGetValue(key, out value))
return false;
if (string.IsNullOrEmpty(value))
return false;
// The value is valid
return true;
}
/// <summary>
/// Set a comment field if it doesn't already have a value
/// </summary>
private static void SetCommentFieldIfNotExists(SubmissionInfo info, SiteCode key, Drive? drive, Func<Drive?, string?> valueFunc)
{
// If the field has a valid value, skip
if (CommentFieldExists(info, key, out _))
return;
// Set the value
info.CommonDiscInfo!.CommentsSpecialFields![key] = valueFunc(drive) ?? string.Empty;
}
/// <summary>
/// Set the version if it doesn't already have a value
/// </summary>
private static void SetVersionIfNotExists(SubmissionInfo info, Drive? drive, Func<Drive?, string?> valueFunc)
{
// If the version already exists, skip
if (!string.IsNullOrEmpty(info.VersionAndEditions!.Version))
return;
// Set the version
info.VersionAndEditions.Version = valueFunc(drive) ?? string.Empty;
}
/// <summary>
/// Reformat a protection dictionary for submission info
/// </summary>
/// <param name="oldDict">ProtectionDictionary to format</param>
/// <returns>Reformatted dictionary on success, empty on error</returns>
private static Dictionary<string, List<string>?> ReformatProtectionDictionary(ProtectionDictionary? oldDict)
{
// Null or empty protections return empty
if (oldDict == null || oldDict.Count == 0)
return [];
// Reformat each set into a List
var newDict = new Dictionary<string, List<string>?>();
foreach (string key in oldDict.Keys)
{
RedumpSystem.AppleMacintosh => true,
RedumpSystem.EnhancedCD => true,
RedumpSystem.IBMPCcompatible => true,
RedumpSystem.PalmOS => true,
RedumpSystem.PocketPC => true,
RedumpSystem.RainbowDisc => true,
RedumpSystem.SonyElectronicBook => true,
_ => false,
};
newDict[key] = [.. oldDict[key]];
}
return newDict;
}
#endregion

View File

@@ -445,7 +445,7 @@ namespace MPF.Frontend.ViewModels
/// Performs MPF.Check functionality
/// </summary>
/// <returns>An error message if failed, otherwise string.Empty/null</returns>
public async Task<string?> CheckDump(Func<SubmissionInfo?, (bool?, SubmissionInfo?)> processUserInfo)
public async Task<string?> CheckDump(ProcessUserInfoDelegate processUserInfo)
{
if (string.IsNullOrEmpty(InputPath))
return "Invalid Input path";

View File

@@ -68,7 +68,7 @@ namespace MPF.Frontend.ViewModels
/// <summary>
/// Function to process user information
/// </summary>
private Func<SubmissionInfo?, (bool?, SubmissionInfo?)>? _processUserInfo;
private ProcessUserInfoDelegate? _processUserInfo;
#endregion
@@ -566,7 +566,7 @@ namespace MPF.Frontend.ViewModels
public void Init(
Action<LogLevel, string> loggerAction,
Func<string, string, int, bool, bool?> displayUserMessage,
Func<SubmissionInfo?, (bool?, SubmissionInfo?)> processUserInfo)
ProcessUserInfoDelegate processUserInfo)
{
// Set the callbacks
_logger = loggerAction;
@@ -773,15 +773,13 @@ namespace MPF.Frontend.ViewModels
/// <summary>
/// Check for available updates
/// </summary>
public (bool, string, string?) CheckForUpdates()
public void CheckForUpdates(out bool different, out string message, out string? url)
{
(bool different, string message, var url) = FrontendTool.CheckForNewVersion();
FrontendTool.CheckForNewVersion(out different, out message, out url);
SecretLogLn(message);
if (url == null)
message = "An exception occurred while checking for versions, please try again later. See the log window for more details.";
return (different, message, url);
}
/// <summary>
@@ -1189,16 +1187,12 @@ namespace MPF.Frontend.ViewModels
else if (this.CurrentDrive.MarkedActive)
{
VerboseLog($"Trying to detect media type for drive {this.CurrentDrive.Name} [{this.CurrentDrive.DriveFormat}] using size and filesystem.. ");
(MediaType? detectedMediaType, var errorMessage) = this.CurrentDrive.GetMediaType(this.CurrentSystem);
// If we got an error message, post it to the log
if (errorMessage != null)
VerboseLogLn($"Message from detecting media type: {errorMessage}");
MediaType? detectedMediaType = this.CurrentDrive.GetMediaType(this.CurrentSystem);
// If we got either an error or no media, default to the current System default
if (detectedMediaType == null)
{
VerboseLogLn($"Unable to detect, defaulting to {defaultMediaType.LongName()}.");
VerboseLogLn($"Could not detect media type, defaulting to {defaultMediaType.LongName()}.");
CurrentMediaType = defaultMediaType;
}
else
@@ -1838,9 +1832,9 @@ namespace MPF.Frontend.ViewModels
/// Scan and show copy protection for the current disc
/// </summary>
#if NET40
public (string?, string?) ScanAndShowProtection()
public string? ScanAndShowProtection()
#else
public async Task<(string?, string?)> ScanAndShowProtection()
public async Task<string?> ScanAndShowProtection()
#endif
{
// Determine current environment, just in case
@@ -1848,7 +1842,10 @@ namespace MPF.Frontend.ViewModels
// If we don't have a valid drive
if (this.CurrentDrive?.Name == null)
return (null, "No valid drive found!");
{
ErrorLogLn("No valid drive found!");
return null;
}
VerboseLogLn($"Scanning for copy protection in {this.CurrentDrive.Name}");
@@ -1861,35 +1858,32 @@ namespace MPF.Frontend.ViewModels
var progress = new Progress<ProtectionProgress>();
progress.ProgressChanged += ProgressUpdated;
try
{
#if NET40
var protectionTask = ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
protectionTask.Wait();
var (protections, error) = protectionTask.Result;
var protectionTask = ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
protectionTask.Wait();
var protections = protectionTask.Result;
#else
var (protections, error) = await ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
var protections = await ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
#endif
var output = ProtectionTool.FormatProtections(protections);
// If SmartE is detected on the current disc, remove `/sf` from the flags for DIC only -- Disabled until further notice
//if (Env.InternalProgram == InternalProgram.DiscImageCreator && output.Contains("SmartE"))
//{
// ((ExecutionContexts.DiscImageCreator.ExecutionContext)Env.ExecutionContext)[ExecutionContexts.DiscImageCreator.FlagStrings.ScanFileProtect] = false;
// if (this.Options.VerboseLogging)
// this.Logger.VerboseLogLn($"SmartE detected, removing {ExecutionContexts.DiscImageCreator.FlagStrings.ScanFileProtect} from parameters");
//}
if (string.IsNullOrEmpty(error))
var output = ProtectionTool.FormatProtections(protections);
LogLn($"Detected the following protections in {this.CurrentDrive.Name}:\r\n\r\n{output}");
else
ErrorLogLn($"Path could not be scanned! Exception information:\r\n\r\n{error}");
this.Status = tempContent;
this.StartStopButtonEnabled = ShouldEnableDumpingButton();
this.MediaScanButtonEnabled = true;
this.UpdateVolumeLabelEnabled = true;
this.CopyProtectScanButtonEnabled = true;
this.Status = tempContent;
this.StartStopButtonEnabled = ShouldEnableDumpingButton();
this.MediaScanButtonEnabled = true;
this.UpdateVolumeLabelEnabled = true;
this.CopyProtectScanButtonEnabled = true;
return (output, error);
return output;
}
catch (Exception ex)
{
ErrorLogLn($"Path could not be scanned! Exception information:\r\n\r\n{ex}");
return null;
}
}
/// <summary>
@@ -1917,7 +1911,7 @@ namespace MPF.Frontend.ViewModels
{
case RedumpSystem.SonyPlayStation:
case RedumpSystem.SonyPlayStation2:
drive.GetPlayStationExecutableInfo(out string? serial, out _, out _);
string? serial = PhysicalTool.GetPlayStationSerial(drive);
volumeLabel = serial ?? "track";
break;
@@ -2206,7 +2200,7 @@ namespace MPF.Frontend.ViewModels
string outputFilename = Path.GetFileName(_environment.OutputPath);
// If a complete dump already exists
bool foundFiles = _environment.FoundAllFiles(outputDirectory, outputFilename, true);
bool foundFiles = _environment.FoundAllFiles(outputDirectory, outputFilename);
if (foundFiles && _displayUserMessage != null)
{
bool? mbresult = _displayUserMessage("Overwrite?", "A complete dump already exists! Are you sure you want to overwrite?", 2, true);

View File

@@ -119,11 +119,16 @@ namespace MPF.Frontend.ViewModels
#region UI Commands
/// <summary>
/// Test Redump login credentials
/// Get the human-readable result for a Redump login result
/// </summary>
public static async Task<(bool?, string?)> TestRedumpLogin(string username, string password)
public static string GetRedumpLoginResult(bool? success)
{
return await RedumpClient.ValidateCredentials(username, password);
return success switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
}
/// <summary>

View File

@@ -27,77 +27,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.cicm.xml"))
missingFiles.Add($"{basePath}.cicm.xml");
if (!File.Exists($"{basePath}.ibg"))
missingFiles.Add($"{basePath}.ibg");
if (!File.Exists($"{basePath}.log"))
missingFiles.Add($"{basePath}.log");
if (!File.Exists($"{basePath}.mhddlog.bin"))
missingFiles.Add($"{basePath}.mhddlog.bin");
if (!File.Exists($"{basePath}.resume.xml"))
missingFiles.Add($"{basePath}.resume.xml");
if (!File.Exists($"{basePath}.sub.log"))
missingFiles.Add($"{basePath}.sub.log");
}
break;
case MediaType.DVD:
case MediaType.HDDVD:
case MediaType.BluRay:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.cicm.xml"))
missingFiles.Add($"{basePath}.cicm.xml");
if (!File.Exists($"{basePath}.ibg"))
missingFiles.Add($"{basePath}.ibg");
if (!File.Exists($"{basePath}.log"))
missingFiles.Add($"{basePath}.log");
if (!File.Exists($"{basePath}.mhddlog.bin"))
missingFiles.Add($"{basePath}.mhddlog.bin");
if (!File.Exists($"{basePath}.resume.xml"))
missingFiles.Add($"{basePath}.resume.xml");
}
break;
default:
missingFiles.Add("Media and system combination not supported for Aaru");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
if (File.Exists(basePath + ".cicm.xml"))
info.Artifacts["cicm"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + ".cicm.xml")) ?? string.Empty;
if (File.Exists(basePath + ".ibg"))
info.Artifacts["ibg"] = Convert.ToBase64String(File.ReadAllBytes(basePath + ".ibg"));
if (File.Exists(basePath + ".log"))
info.Artifacts["log"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + ".log")) ?? string.Empty;
if (File.Exists(basePath + ".mhddlog.bin"))
info.Artifacts["mhddlog_bin"] = Convert.ToBase64String(File.ReadAllBytes(basePath + ".mhddlog.bin"));
if (File.Exists(basePath + ".resume.xml"))
info.Artifacts["resume"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + ".resume.xml")) ?? string.Empty;
if (File.Exists(basePath + ".sub.log"))
info.Artifacts["sub_log"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + ".sub.log")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -257,49 +186,74 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var logFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
if (File.Exists($"{basePath}.cicm.xml"))
logFiles.Add($"{basePath}.cicm.xml");
if (File.Exists($"{basePath}.error.log"))
logFiles.Add($"{basePath}.error.log");
if (File.Exists($"{basePath}.ibg"))
logFiles.Add($"{basePath}.ibg");
if (File.Exists($"{basePath}.log"))
logFiles.Add($"{basePath}.log");
if (File.Exists($"{basePath}.mhddlog.bin"))
logFiles.Add($"{basePath}.mhddlog.bin");
if (File.Exists($"{basePath}.resume.xml"))
logFiles.Add($"{basePath}.resume.xml");
if (File.Exists($"{basePath}.sub.log"))
logFiles.Add($"{basePath}.sub.log");
break;
return [
new($"{baseFilename}.aaruf", OutputFileFlags.Required),
new($"{baseFilename}.cicm.xml", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cicm"),
new($"{baseFilename}.error.log", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"error_log"),
new($"{baseFilename}.ibg", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ibg"),
new($"{baseFilename}.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new($"{baseFilename}.mhddlog.bin", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"mhddlog"),
new($"{baseFilename}.resume.xml", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"resume"),
new($"{baseFilename}.sub.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"sub_log"),
];
case MediaType.DVD:
case MediaType.HDDVD:
case MediaType.BluRay:
if (File.Exists($"{basePath}.cicm.xml"))
logFiles.Add($"{basePath}.cicm.xml");
if (File.Exists($"{basePath}.error.log"))
logFiles.Add($"{basePath}.error.log");
if (File.Exists($"{basePath}.ibg"))
logFiles.Add($"{basePath}.ibg");
if (File.Exists($"{basePath}.log"))
logFiles.Add($"{basePath}.log");
if (File.Exists($"{basePath}.mhddlog.bin"))
logFiles.Add($"{basePath}.mhddlog.bin");
if (File.Exists($"{basePath}.resume.xml"))
logFiles.Add($"{basePath}.resume.xml");
break;
return [
new($"{baseFilename}.aaruf", OutputFileFlags.Required),
new($"{baseFilename}.cicm.xml", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cicm"),
new($"{baseFilename}.error.log", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"error_log"),
new($"{baseFilename}.ibg", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ibg"),
new($"{baseFilename}.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new($"{baseFilename}.mhddlog.bin", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"mhddlog"),
new($"{baseFilename}.resume.xml", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"resume"),
];
}
return logFiles;
return [];
}
#endregion

View File

@@ -44,21 +44,6 @@ namespace MPF.Processors
#region Abstract Methods
/// <summary>
/// Validate if all required output files exist
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <param name="preCheck">True if this is a check done before a dump, false if done after</param>
/// <returns>Tuple of true if all required files exist, false otherwise and a list representing missing files</returns>
public abstract (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck);
/// <summary>
/// Generate artifacts and add to the SubmissionInfo
/// </summary>
/// <param name="submissionInfo">Base submission info to fill in specifics for</param>
/// <param name="basePath">Base filename and path to use for checking</param>
public abstract void GenerateArtifacts(SubmissionInfo submissionInfo, string basePath);
/// <summary>
/// Generate a SubmissionInfo for the output files
/// </summary>
@@ -67,27 +52,17 @@ namespace MPF.Processors
/// <param name="redumpCompat">Determines if outputs are processed according to Redump specifications</param>
public abstract void GenerateSubmissionInfo(SubmissionInfo submissionInfo, string basePath, bool redumpCompat);
/// <summary>
/// Generate a list of all log files generated
// <summary>
/// Generate a list of all output files generated
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all log file paths, empty otherwise</returns>
public abstract List<string> GetLogFilePaths(string basePath);
/// <param name="baseDirectory">Base filename and path to use for checking</param>
/// <param name="baseFilename">Base filename and path to use for checking</param>
/// <returns>List of all output files, empty otherwise</returns>
internal abstract List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename);
#endregion
#region Virtual Methods
/// <summary>
/// Generate a list of all deleteable files generated
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all deleteable file paths, empty otherwise</returns>
public virtual List<string> GetDeleteableFilePaths(string basePath) => [];
#endregion
#region Shared Methods
#region Output Files
/// <summary>
/// Compress log files to save space
@@ -97,12 +72,12 @@ namespace MPF.Processors
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <returns>True if the process succeeded, false otherwise</returns>
public (bool, string) CompressLogFiles(string? outputDirectory, string? filenameSuffix, string outputFilename)
public bool CompressLogFiles(string? outputDirectory, string? filenameSuffix, string outputFilename, out string status)
{
#if NET20 || NET35 || NET40
return (false, "Log compression is not available for this framework version");
status = "Log compression is not available for this framework version";
return false;
#else
// Prepare the necessary paths
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
string combinedBase;
@@ -111,17 +86,19 @@ namespace MPF.Processors
else
combinedBase = Path.Combine(outputDirectory, outputFilename);
string archiveName = combinedBase + "_logs.zip";
// Generate the archive filename
string archiveName = $"{combinedBase}_logs.zip";
// Get the list of log files from the parameters object
var files = GetLogFilePaths(combinedBase);
// Get the lists of zippable files
var zippableFiles = GetZippableFilePaths(combinedBase);
var generatedFiles = GetGeneratedFilePaths(outputDirectory, filenameSuffix);
// Add on generated log files if they exist
var mpfFiles = GetGeneratedFilePaths(outputDirectory, filenameSuffix);
files.AddRange(mpfFiles);
if (!files.Any())
return (true, "No files to compress!");
// Don't create an archive if there are no paths
if (!zippableFiles.Any() && !generatedFiles.Any())
{
status = "No files to compress!";
return true;
}
// If the file already exists, we want to delete the old one
try
@@ -131,7 +108,8 @@ namespace MPF.Processors
}
catch
{
return (false, "Could not delete old archive!");
status = "Could not delete old archive!";
return false;
}
// Add the log files to the archive and delete the uncompressed file after
@@ -139,39 +117,17 @@ namespace MPF.Processors
try
{
zf = ZipFile.Open(archiveName, ZipArchiveMode.Create);
foreach (string file in files)
{
if (string.IsNullOrEmpty(outputDirectory))
{
zf.CreateEntryFromFile(file, file, CompressionLevel.Optimal);
}
else
{
string entryName = file[outputDirectory!.Length..].TrimStart(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
#if NETFRAMEWORK || NETCOREAPP3_1 || NET5_0
zf.CreateEntryFromFile(file, entryName, CompressionLevel.Optimal);
#else
zf.CreateEntryFromFile(file, entryName, CompressionLevel.SmallestSize);
#endif
}
_ = AddToArchive(zf, zippableFiles, outputDirectory, true);
_ = AddToArchive(zf, generatedFiles, outputDirectory, false);
// If the file is MPF-specific, don't delete
if (mpfFiles.Contains(file))
continue;
try
{
File.Delete(file);
}
catch { }
}
return (true, "Compression complete!");
status = "Compression complete!";
return true;
}
catch (Exception ex)
{
return (false, $"Compression could not complete: {ex}");
status = $"Compression could not complete: {ex}";
return false;
}
finally
{
@@ -187,7 +143,7 @@ namespace MPF.Processors
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <returns>True if the process succeeded, false otherwise</returns>
public (bool, string) DeleteUnnecessaryFiles(string? outputDirectory, string outputFilename)
public bool DeleteUnnecessaryFiles(string? outputDirectory, string outputFilename, out string status)
{
// Prepare the necessary paths
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
@@ -201,26 +157,23 @@ namespace MPF.Processors
var files = GetDeleteableFilePaths(combinedBase);
if (!files.Any())
return (true, "No files to delete!");
{
status = "No files to delete!";
return true;
}
// Attempt to delete all of the files
try
foreach (string file in files)
{
foreach (string file in files)
try
{
try
{
File.Delete(file);
}
catch { }
File.Delete(file);
}
catch { }
}
return (true, "Deletion complete!");
}
catch (Exception ex)
{
return (false, $"Deletion could not complete: {ex}");
}
status = "Deletion complete!";
return true;
}
/// <summary>
@@ -229,11 +182,10 @@ namespace MPF.Processors
/// <param name="outputDirectory">Output folder to write to</param>
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <param name="preCheck">True if this is a check done before a dump, false if done after</param>
/// <returns>Tuple of true if all required files exist, false otherwise and a list representing missing files</returns>
public (bool, List<string>) FoundAllFiles(string? outputDirectory, string outputFilename, bool preCheck)
/// <returns>A list representing missing files, empty if none</returns>
public List<string> FoundAllFiles(string? outputDirectory, string outputFilename)
{
// First, sanitized the output filename to strip off any potential extension
// Sanitize the output filename to strip off any potential extension
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
// Then get the base path for all checking
@@ -244,9 +196,366 @@ namespace MPF.Processors
basePath = Path.Combine(outputDirectory, outputFilename);
// Finally, let the parameters say if all files exist
return CheckAllOutputFilesExist(basePath, preCheck);
return CheckRequiredFiles(basePath);
}
/// <summary>
/// Generate artifacts and return them as a dictionary
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>Dictiionary of artifact keys to Base64-encoded values, if possible</param>
public Dictionary<string, string> GenerateArtifacts(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
// Create the artifacts dictionary
var artifacts = new Dictionary<string, string>();
// Only try to create artifacts for files that exist
foreach (var outputFile in outputFiles)
{
// Skip non-artifact files
if (!outputFile.IsArtifact || outputFile.ArtifactKey == null)
continue;
// Skip non-existent files
foreach (string filename in outputFile.Filenames)
{
string possibleFile = Path.Combine(baseDirectory, filename);
if (!File.Exists(possibleFile))
continue;
// Get binary artifacts as a byte array
if (outputFile.IsBinaryArtifact)
{
byte[] data = File.ReadAllBytes(possibleFile);
string str = Convert.ToBase64String(data);
artifacts.Add(outputFile.ArtifactKey, str);
}
else
{
string? data = ProcessingTool.GetFullFile(possibleFile);
string str = ProcessingTool.GetBase64(data) ?? string.Empty;
artifacts.Add(outputFile.ArtifactKey, str);
}
break;
}
}
return artifacts;
}
#if NET452_OR_GREATER || NETCOREAPP
/// <summary>
/// Try to add a set of files to an existing archive
/// </summary>
/// <param name="archive">Archive to add the file to</param>
/// <param name="files">Full path to a set of existing files</param>
/// <param name="outputDirectory">Directory that the existing files live in</param>
/// <param name="delete">Indicates if the files should be deleted after adding</param>
/// <returns>True if all files were added successfully, false otherwise</returns>
private static bool AddToArchive(ZipArchive archive, List<string> files, string? outputDirectory, bool delete)
{
// An empty list means success
if (files.Count == 0)
return true;
// Loop through and add all files
bool allAdded = true;
foreach (string file in files)
{
allAdded &= AddToArchive(archive, file, outputDirectory, delete);
}
return allAdded;
}
/// <summary>
/// Try to add a file to an existing archive
/// </summary>
/// <param name="archive">Archive to add the file to</param>
/// <param name="file">Full path to an existing file</param>
/// <param name="outputDirectory">Directory that the existing file lives in</param>
/// <param name="delete">Indicates if the file should be deleted after adding</param>
/// <returns>True if the file was added successfully, false otherwise</returns>
private static bool AddToArchive(ZipArchive archive, string file, string? outputDirectory, bool delete)
{
// Check if the file exists
if (!File.Exists(file))
return false;
// Get the entry name from the file
string entryName = file;
if (!string.IsNullOrEmpty(outputDirectory))
entryName = entryName.Substring(outputDirectory!.Length);
// Ensure the entry is formatted correctly
entryName = entryName.TrimStart(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
// Create and add the entry
try
{
#if NETFRAMEWORK || NETCOREAPP3_1 || NET5_0
archive.CreateEntryFromFile(file, entryName, CompressionLevel.Optimal);
#else
archive.CreateEntryFromFile(file, entryName, CompressionLevel.SmallestSize);
#endif
}
catch
{
return false;
}
// Try to delete the file if requested
if (delete)
{
try { File.Delete(file); } catch { }
}
return true;
}
#endif
/// <summary>
/// Validate if all required output files exist
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>A list representing missing files, empty if none</returns>
private List<string> CheckRequiredFiles(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return ["Media and system combination not supported"];
// Check for the log file
bool logArchiveExists = false;
#if NET452_OR_GREATER || NETCOREAPP
ZipArchive? logArchive = null;
#endif
if (File.Exists($"{basePath}_logs.zip"))
{
logArchiveExists = true;
#if NET452_OR_GREATER || NETCOREAPP
try
{
// Try to open the archive
logArchive = ZipFile.OpenRead($"{basePath}_logs.zip");
}
catch
{
logArchiveExists = false;
}
#endif
}
// Get a list of all missing required files
var missingFiles = new List<string>();
foreach (var outputFile in outputFiles)
{
// Only check required files
if (!outputFile.IsRequired)
continue;
// Use the built-in existence function
if (outputFile.Exists(baseDirectory))
continue;
// If the log archive doesn't exist
if (!logArchiveExists)
{
missingFiles.Add(outputFile.Filenames[0]);
continue;
}
#if NET20 || NET35 || NET40
// Assume the zipfile has the file in it
continue;
#else
// Check the log archive
if (outputFile.Exists(logArchive))
continue;
// Add the file to the missing list
missingFiles.Add(outputFile.Filenames[0]);
#endif
}
return missingFiles;
}
/// <summary>
/// Generate a list of all deleteable filenames
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all deleteable filenames, empty otherwise</returns>
private List<string> GetDeleteableFilenames(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
// Filter down to deleteable files
var deleteableFiles = outputFiles.Where(of => of.IsDeleteable);
return deleteableFiles.SelectMany(of => of.Filenames).ToList();
}
/// <summary>
/// Generate a list of all deleteable file paths
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all deleteable file paths, empty otherwise</returns>
private List<string> GetDeleteableFilePaths(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
// Get the list of deleteable files
var deleteableFilenames = GetDeleteableFilenames(basePath);
if (deleteableFilenames.Count == 0)
return [];
// Return only files that exist
var deleteableFiles = new List<string>();
foreach (var filename in deleteableFilenames)
{
// Skip non-existent files
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
deleteableFiles.Add(possiblePath);
}
return deleteableFiles;
}
/// <summary>
/// Generate a list of all MPF-generated filenames
/// </summary>
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <returns>List of all MPF-generated filenames, empty otherwise</returns>
private static List<string> GetGeneratedFilenames(string? filenameSuffix)
{
// Set the base file path names
const string submissionInfoBase = "!submissionInfo";
const string protectionInfoBase = "!protectionInfo";
// Ensure the filename suffix is formatted correctly
filenameSuffix = string.IsNullOrEmpty(filenameSuffix) ? string.Empty : $"_{filenameSuffix}";
// Define the output filenames
return [
$"{protectionInfoBase}{filenameSuffix}.txt",
$"{submissionInfoBase}{filenameSuffix}.json",
$"{submissionInfoBase}{filenameSuffix}.json.gz",
$"{submissionInfoBase}{filenameSuffix}.txt",
];
}
/// <summary>
/// Generate a list of all MPF-specific log files generated
/// </summary>
/// <param name="basePath">Base directory to use for checking</param>
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <returns>List of all log file paths, empty otherwise</returns>
private static List<string> GetGeneratedFilePaths(string? baseDirectory, string? filenameSuffix)
{
// Get the list of generated files
var generatedFilenames = GetGeneratedFilenames(filenameSuffix);
if (generatedFilenames.Count == 0)
return [];
// Ensure the output directory
baseDirectory ??= string.Empty;
// Return only files that exist
var generatedFiles = new List<string>();
foreach (var filename in generatedFilenames)
{
// Skip non-existent files
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
generatedFiles.Add(possiblePath);
}
return generatedFiles;
}
/// <summary>
/// Generate a list of all zippable filenames
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all zippable filenames, empty otherwise</returns>
private List<string> GetZippableFilenames(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
// Filter down to zippable files
var zippableFiles = outputFiles.Where(of => of.IsZippable);
return zippableFiles.SelectMany(of => of.Filenames).ToList();
}
/// <summary>
/// Generate a list of all zippable file paths
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all zippable file paths, empty otherwise</returns>
private List<string> GetZippableFilePaths(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
// Get the list of zippable files
var zippableFilenames = GetZippableFilenames(basePath);
if (zippableFilenames.Count == 0)
return [];
// Return only files that exist
var zippableFiles = new List<string>();
foreach (var filename in zippableFilenames)
{
// Skip non-existent files
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
zippableFiles.Add(possiblePath);
}
return zippableFiles;
}
#endregion
#region Shared Methods
/// <summary>
/// Get the hex contents of the PIC file
/// </summary>
@@ -334,64 +643,6 @@ namespace MPF.Processors
}
}
/// <summary>
/// Generate a list of all MPF-specific log files generated
/// </summary>
/// <param name="outputDirectory">Output folder to write to</param>
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <returns>List of all log file paths, empty otherwise</returns>
private static List<string> GetGeneratedFilePaths(string? outputDirectory, string? filenameSuffix)
{
var files = new List<string>();
if (string.IsNullOrEmpty(outputDirectory) && string.IsNullOrEmpty(filenameSuffix))
{
if (File.Exists("!submissionInfo.txt"))
files.Add("!submissionInfo.txt");
if (File.Exists("!submissionInfo.json"))
files.Add("!submissionInfo.json");
if (File.Exists("!submissionInfo.json.gz"))
files.Add("!submissionInfo.json.gz");
if (File.Exists("!protectionInfo.txt"))
files.Add("!protectionInfo.txt");
}
else if (string.IsNullOrEmpty(outputDirectory) && !string.IsNullOrEmpty(filenameSuffix))
{
if (File.Exists($"!submissionInfo_{filenameSuffix}.txt"))
files.Add($"!submissionInfo_{filenameSuffix}.txt");
if (File.Exists($"!submissionInfo_{filenameSuffix}.json"))
files.Add($"!submissionInfo_{filenameSuffix}.json");
if (File.Exists($"!submissionInfo_{filenameSuffix}.json.gz"))
files.Add($"!submissionInfo_{filenameSuffix}.json.gz");
if (File.Exists($"!protectionInfo_{filenameSuffix}.txt"))
files.Add($"!protectionInfo_{filenameSuffix}.txt");
}
else if (!string.IsNullOrEmpty(outputDirectory) && string.IsNullOrEmpty(filenameSuffix))
{
if (File.Exists(Path.Combine(outputDirectory, "!submissionInfo.txt")))
files.Add(Path.Combine(outputDirectory, "!submissionInfo.txt"));
if (File.Exists(Path.Combine(outputDirectory, "!submissionInfo.json")))
files.Add(Path.Combine(outputDirectory, "!submissionInfo.json"));
if (File.Exists(Path.Combine(outputDirectory, "!submissionInfo.json.gz")))
files.Add(Path.Combine(outputDirectory, "!submissionInfo.json.gz"));
if (File.Exists(Path.Combine(outputDirectory, "!protectionInfo.txt")))
files.Add(Path.Combine(outputDirectory, "!protectionInfo.txt"));
}
else if (!string.IsNullOrEmpty(outputDirectory) && !string.IsNullOrEmpty(filenameSuffix))
{
if (File.Exists(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.txt")))
files.Add(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.txt"));
if (File.Exists(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.json")))
files.Add(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.json"));
if (File.Exists(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.json.gz")))
files.Add(Path.Combine(outputDirectory, $"!submissionInfo_{filenameSuffix}.json.gz"));
if (File.Exists(Path.Combine(outputDirectory, $"!protectionInfo_{filenameSuffix}.txt")))
files.Add(Path.Combine(outputDirectory, $"!protectionInfo_{filenameSuffix}.txt"));
}
return files;
}
#endregion
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.Models.Logiqx;
@@ -19,44 +18,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.DVD: // Only added here to help users; not strictly correct
case MediaType.NintendoGameCubeGameDisc:
case MediaType.NintendoWiiOpticalDisc:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}-dumpinfo.txt"))
missingFiles.Add($"{basePath}-dumpinfo.txt");
if (!File.Exists($"{basePath}.bca"))
missingFiles.Add($"{basePath}.bca");
}
break;
default:
missingFiles.Add("Media and system combination not supported for CleanRip");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
if (File.Exists(basePath + ".bca"))
info.Artifacts["bca"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + ".bca", binary: true)) ?? string.Empty;
if (File.Exists(basePath + "-dumpinfo.txt"))
info.Artifacts["dumpinfo"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(basePath + "-dumpinfo.txt")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -105,23 +66,28 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var logFiles = new List<string>();
switch (Type)
{
case MediaType.DVD: // Only added here to help users; not strictly correct
case MediaType.NintendoGameCubeGameDisc:
case MediaType.NintendoWiiOpticalDisc:
if (File.Exists($"{basePath}-dumpinfo.txt"))
logFiles.Add($"{basePath}-dumpinfo.txt");
if (File.Exists($"{basePath}.bca"))
logFiles.Add($"{basePath}.bca");
return [
new($"{baseFilename}.bca", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"bca"),
new($"{baseFilename}.iso", OutputFileFlags.Required),
break;
new($"{baseFilename}-dumpinfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"dumpinfo"),
];
}
return logFiles;
return [];
}
#endregion
@@ -207,7 +173,9 @@ namespace MPF.Processors
if (hex == null)
return null;
return Regex.Replace(hex, ".{32}", "$0\n");
// First separate into lines then into blocks of 4 hex digits
string bca = Regex.Replace(hex, ".{32}", "$0\n");
return Regex.Replace(bca, "[0-9a-fA-F]{4}", "$0 ");
}
catch
{

View File

@@ -0,0 +1,92 @@
using System;
using System.IO;
#if NET452_OR_GREATER || NETCOREAPP
using System.IO.Compression;
#endif
namespace MPF.Processors
{
/// <summary>
/// Represents a single output file with custom detection rules
/// </summary>
internal class CustomOutputFile : OutputFile
{
/// <summary>
/// Optional func for determining if a file exists
/// </summary>
private readonly Func<string, bool> _existsFunc;
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public CustomOutputFile(string filename, OutputFileFlags flags, Func<string, bool> existsFunc)
: base([filename], flags)
{
_existsFunc = existsFunc;
}
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public CustomOutputFile(string filename, OutputFileFlags flags, string artifactKey, Func<string, bool> existsFunc)
: base([filename], flags, artifactKey)
{
_existsFunc = existsFunc;
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public CustomOutputFile(string[] filenames, OutputFileFlags flags, Func<string, bool> existsFunc)
: base(filenames, flags)
{
_existsFunc = existsFunc;
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public CustomOutputFile(string[] filenames, OutputFileFlags flags, string artifactKey, Func<string, bool> existsFunc)
: base(filenames, flags, artifactKey)
{
_existsFunc = existsFunc;
}
/// <inheritdoc/>
public override bool Exists(string baseDirectory)
{
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
foreach (string filename in Filenames)
{
// Check for invalid filenames
if (string.IsNullOrEmpty(filename))
continue;
try
{
string possibleFile = Path.Combine(baseDirectory, filename);
if (_existsFunc(possibleFile))
return true;
}
catch { }
}
return false;
}
#if NET452_OR_GREATER || NETCOREAPP
/// <summary>
/// Indicates if an output file exists in an archive
/// </summary>
/// <param name="archive">Zip archive to check in</param>
public override bool Exists(ZipArchive? archive)
{
// Files aren't extracted so this check can't be done
return false;
}
#endif
}
}

View File

@@ -8,6 +8,52 @@ using SabreTools.Models.Logiqx;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
/*
If there are no external programs, such as error checking, etc., DIC outputs
a slightly different set of files. This reduced set needs to be documented in
order for special use cases, such as self-built versions of DIC or removed
helper programs, can be detected to the best of our ability. Below is the list
of files that are generated in that case:
.bin
.c2
.ccd
.cue
.img/.imgtmp
.scm/.scmtmp
.sub/.subtmp
_cmd.txt (formerly)
_img.cue
This list needs to be translated into the minimum viable set of information
such that things like error checking can be passed back as a flag, or some
similar method.
Here are some notes about the various output files and what they represent:
- bin - Final split output disc image (CD/GD only)
- c2 - Represents each byte per sector as one bit; 0 means no error, 1 means error
- c2Error - Human-readable version of `c2`; only errors are printed
- ccd - CloneCD control file referencing the `img` file
- cmd - Represents the commandline that was run
- cue - CDRWIN cuesheet referencing the `bin` file(s)
- dat - Logiqx datfile referencing the `bin` file(s)
- disc - Disc metadata and information
- drive - Drive metadata and information
- img - CloneCD output disc image (CD/GD only)
- img.cue - CDRWIN cuesheet referencing the `img` file
- img_EdcEcc - ECC check output as run on the `img` file
- iso - Final output disc image (DVD/BD only)
- mainError - Read, drive, or system errors
- mainInfo - ISOBuster-formatted sector information
- scm - Scrambled disc image
- sub - Binary subchannel data as read from the disc
- subError - Subchannel read errors
- subInfo - Subchannel informational messages
- subIntention - Subchannel intentional error information
- subReadable - Human-readable version of `sub`
- toc - Binary representation of the table of contents
- volDesc - Volume descriptor information
*/
namespace MPF.Processors
{
/// <summary>
@@ -21,270 +67,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
/*
If there are no external programs, such as error checking, etc., DIC outputs
a slightly different set of files. This reduced set needs to be documented in
order for special use cases, such as self-built versions of DIC or removed
helper programs, can be detected to the best of our ability. Below is the list
of files that are generated in that case:
.bin
.c2
.ccd
.cue
.img/.imgtmp
.scm/.scmtmp
.sub/.subtmp
_cmd.txt (formerly)
_img.cue
This list needs to be translated into the minimum viable set of information
such that things like error checking can be passed back as a flag, or some
similar method.
Here are some notes about the various output files and what they represent:
- bin - Final split output disc image (CD/GD only)
- c2 - Represents each byte per sector as one bit; 0 means no error, 1 means error
- c2Error - Human-readable version of `c2`; only errors are printed
- ccd - CloneCD control file referencing the `img` file
- cmd - Represents the commandline that was run
- cue - CDRWIN cuesheet referencing the `bin` file(s)
- dat - Logiqx datfile referencing the `bin` file(s)
- disc - Disc metadata and information
- drive - Drive metadata and information
- img - CloneCD output disc image (CD/GD only)
- img.cue - CDRWIN cuesheet referencing the `img` file
- img_EdcEcc - ECC check output as run on the `img` file
- iso - Final output disc image (DVD/BD only)
- mainError - Read, drive, or system errors
- mainInfo - ISOBuster-formatted sector information
- scm - Scrambled disc image
- sub - Binary subchannel data as read from the disc
- subError - Subchannel read errors
- subInfo - Subchannel informational messages
- subIntention - Subchannel intentional error information
- subReadable - Human-readable version of `sub`
- toc - Binary representation of the table of contents
- volDesc - Volume descriptor information
*/
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
case MediaType.GDROM:
if (!File.Exists($"{basePath}.cue"))
missingFiles.Add($"{basePath}.cue");
if (!File.Exists($"{basePath}.img") && !File.Exists($"{basePath}.imgtmp"))
missingFiles.Add($"{basePath}.img");
// Audio-only discs don't output these files
if (!System.IsAudio())
{
if (!File.Exists($"{basePath}.scm") && !File.Exists($"{basePath}.scmtmp"))
missingFiles.Add($"{basePath}.scm");
}
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
// GD-ROM and GD-R don't output this for the HD area
if (Type != MediaType.GDROM)
{
if (!File.Exists($"{basePath}.ccd"))
missingFiles.Add($"{basePath}.ccd");
}
if (!File.Exists($"{basePath}.dat"))
missingFiles.Add($"{basePath}.dat");
if (!File.Exists($"{basePath}.sub") && !File.Exists($"{basePath}.subtmp"))
missingFiles.Add($"{basePath}.sub");
if (!File.Exists($"{basePath}_disc.txt"))
missingFiles.Add($"{basePath}_disc.txt");
if (!File.Exists($"{basePath}_drive.txt"))
missingFiles.Add($"{basePath}_drive.txt");
if (!File.Exists($"{basePath}_img.cue"))
missingFiles.Add($"{basePath}_img.cue");
if (!File.Exists($"{basePath}_mainError.txt"))
missingFiles.Add($"{basePath}_mainError.txt");
if (!File.Exists($"{basePath}_mainInfo.txt"))
missingFiles.Add($"{basePath}_mainInfo.txt");
if (!File.Exists($"{basePath}_subError.txt"))
missingFiles.Add($"{basePath}_subError.txt");
if (!File.Exists($"{basePath}_subInfo.txt"))
missingFiles.Add($"{basePath}_subInfo.txt");
if (!File.Exists($"{basePath}_subReadable.txt") && !File.Exists($"{basePath}_sub.txt"))
missingFiles.Add($"{basePath}_subReadable.txt");
if (!File.Exists($"{basePath}_volDesc.txt"))
missingFiles.Add($"{basePath}_volDesc.txt");
// Audio-only discs don't output these files
if (!System.IsAudio())
{
if (!File.Exists($"{basePath}.img_EdcEcc.txt") && !File.Exists($"{basePath}.img_EccEdc.txt"))
missingFiles.Add($"{basePath}.img_EdcEcc.txt");
}
}
// Removed or inconsistent files
//{
// // Doesn't output on Linux
// if (!File.Exists($"{basePath}.c2"))
// missingFiles.Add($"{basePath}.c2");
// // Doesn't output on Linux
// if (!File.Exists($"{basePath}_c2Error.txt"))
// missingFiles.Add($"{basePath}_c2Error.txt");
// // Replaced by timestamp-named file
// if (!File.Exists($"{basePath}_cmd.txt"))
// missingFiles.Add($"{basePath}_cmd.txt");
// // Not guaranteed output
// if (!File.Exists($"{basePath}_subIntention.txt"))
// missingFiles.Add($"{basePath}_subIntention.txt");
// // Not guaranteed output
// if (File.Exists($"{basePath}_suppl.dat"))
// missingFiles.Add($"{basePath}_suppl.dat");
// // Not guaranteed output (at least PCE)
// if (!File.Exists($"{basePath}.toc"))
// missingFiles.Add($"{basePath}.toc");
//}
break;
case MediaType.DVD:
case MediaType.HDDVD:
case MediaType.BluRay:
case MediaType.NintendoGameCubeGameDisc:
case MediaType.NintendoWiiOpticalDisc:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.dat"))
missingFiles.Add($"{basePath}.dat");
if (!File.Exists($"{basePath}_disc.txt"))
missingFiles.Add($"{basePath}_disc.txt");
if (!File.Exists($"{basePath}_drive.txt"))
missingFiles.Add($"{basePath}_drive.txt");
if (!File.Exists($"{basePath}_mainError.txt"))
missingFiles.Add($"{basePath}_mainError.txt");
if (!File.Exists($"{basePath}_mainInfo.txt"))
missingFiles.Add($"{basePath}_mainInfo.txt");
if (!File.Exists($"{basePath}_volDesc.txt"))
missingFiles.Add($"{basePath}_volDesc.txt");
}
// Removed or inconsistent files
//{
// // Replaced by timestamp-named file
// if (!File.Exists($"{basePath}_cmd.txt"))
// missingFiles.Add($"{basePath}_cmd.txt");
// // Not guaranteed output
// if (File.Exists($"{basePath}_CSSKey.txt"))
// missingFiles.Add($"{basePath}_CSSKey.txt");
// // Only output for some parameters
// if (File.Exists($"{basePath}.raw"))
// missingFiles.Add($"{basePath}.raw");
// // Not guaranteed output
// if (File.Exists($"{basePath}_suppl.dat"))
// missingFiles.Add($"{basePath}_suppl.dat");
//}
break;
case MediaType.FloppyDisk:
case MediaType.HardDisk:
// TODO: Determine what outputs come out from a HDD, SD, etc.
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.dat"))
missingFiles.Add($"{basePath}.dat");
if (!File.Exists($"{basePath}_disc.txt"))
missingFiles.Add($"{basePath}_disc.txt");
}
// Removed or inconsistent files
//{
// // Replaced by timestamp-named file
// if (!File.Exists($"{basePath}_cmd.txt"))
// missingFiles.Add($"{basePath}_cmd.txt");
//}
break;
default:
missingFiles.Add("Media and system combination not supported for DiscImageCreator");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
//if (File.Exists($"{basePath}.c2"))
// info.Artifacts["c2"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}.c2")) ?? string.Empty;
if (File.Exists($"{basePath}_c2Error.txt"))
info.Artifacts["c2Error"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_c2Error.txt")) ?? string.Empty;
if (File.Exists($"{basePath}.ccd"))
info.Artifacts["ccd"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.ccd")) ?? string.Empty;
if (File.Exists($"{basePath}_cmd.txt")) // TODO: Figure out how to read in the timestamp-named file
info.Artifacts["cmd"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_cmd.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_CSSKey.txt"))
info.Artifacts["csskey"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_CSSKey.txt")) ?? string.Empty;
if (File.Exists($"{basePath}.cue"))
info.Artifacts["cue"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.cue")) ?? string.Empty;
if (File.Exists($"{basePath}.dat"))
info.Artifacts["dat"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.dat")) ?? string.Empty;
if (File.Exists($"{basePath}_disc.txt"))
info.Artifacts["disc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_disc.txt")) ?? string.Empty;
//if (File.Exists(Path.Combine(outputDirectory, $"{basePath}_DMI.bin")))
// info.Artifacts["dmi"] = Convert.ToBase64String(File.ReadAllBytes(Path.Combine(outputDirectory, $"{basePath}_DMI.bin"))) ?? string.Empty;
if (File.Exists($"{basePath}_drive.txt"))
info.Artifacts["drive"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_drive.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_img.cue"))
info.Artifacts["img_cue"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_img.cue")) ?? string.Empty;
if (File.Exists($"{basePath}.img_EdcEcc.txt"))
info.Artifacts["img_EdcEcc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.img_EdcEcc.txt")) ?? string.Empty;
if (File.Exists($"{basePath}.img_EccEdc.txt"))
info.Artifacts["img_EdcEcc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.img_EccEdc.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_mainError.txt"))
info.Artifacts["mainError"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_mainError.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_mainInfo.txt"))
info.Artifacts["mainInfo"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_mainInfo.txt")) ?? string.Empty;
//if (File.Exists($"{basePath}_PFI.bin"))
// info.Artifacts["pfi"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}_PFI.bin")) ?? string.Empty;
//if (File.Exists($"{basePath}_PIC.bin"))
// info.Artifacts["pic"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}_PIC.bin")) ?? string.Empty;
//if (File.Exists($"{basePath}_SS.bin"))
// info.Artifacts["ss"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}_SS.bin")) ?? string.Empty;
if (File.Exists($"{basePath}.sub"))
info.Artifacts["sub"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}.sub")) ?? string.Empty;
if (File.Exists($"{basePath}_subError.txt"))
info.Artifacts["subError"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_subError.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_subInfo.txt"))
info.Artifacts["subInfo"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_subInfo.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_subIntention.txt"))
info.Artifacts["subIntention"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_subIntention.txt")) ?? string.Empty;
//if (File.Exists($"{basePath}_sub.txt"))
// info.Artifacts["subReadable"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_sub.txt")) ?? string.Empty;
//if (File.Exists($"{basePath}_subReadable.txt"))
// info.Artifacts["subReadable"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_subReadable.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_volDesc.txt"))
info.Artifacts["volDesc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_volDesc.txt")) ?? string.Empty;
}
/// <inheritdoc/>
/// <remarks>Determining the PSX/PS2 executable name is the last use of drive in this method</remarks>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
var outputDirectory = Path.GetDirectoryName(basePath);
@@ -293,7 +75,7 @@ namespace MPF.Processors
info = Builder.EnsureAllSections(info);
// Get the dumping program and version
var (dicCmd, dicVersion) = GetCommandFilePathAndVersion(basePath);
var dicVersion = GetCommandFilePathAndVersion(basePath, out var dicCmd);
info.DumpingInfo!.DumpingProgram ??= string.Empty;
info.DumpingInfo.DumpingProgram += $" {dicVersion ?? "Unknown Version"}";
info.DumpingInfo.DumpingDate = ProcessingTool.GetFileModifiedDate(dicCmd)?.ToString("yyyy-MM-dd HH:mm:ss");
@@ -450,7 +232,7 @@ namespace MPF.Processors
break;
case RedumpSystem.MicrosoftXbox:
string xmidString = ProcessingTool.GetXGD1XMID($"{basePath}_DMI.bin");
string xmidString = ProcessingTool.GetXMID($"{basePath}_DMI.bin");
var xmid = SabreTools.Serialization.Wrappers.XMID.Create(xmidString);
if (xmid != null)
{
@@ -494,7 +276,7 @@ namespace MPF.Processors
break;
case RedumpSystem.MicrosoftXbox360:
string xemidString = ProcessingTool.GetXGD23XeMID($"{basePath}_DMI.bin");
string xemidString = ProcessingTool.GetXeMID($"{basePath}_DMI.bin");
var xemid = SabreTools.Serialization.Wrappers.XeMID.Create(xemidString);
if (xemid != null)
{
@@ -546,12 +328,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -582,12 +367,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -602,12 +390,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -622,12 +413,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -642,12 +436,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -698,180 +495,229 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetDeleteableFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var deleteableFiles = new List<string>();
switch (Type)
{
// TODO: Handle (Pregap) files -- need examples
case MediaType.CDROM:
return [
new($"{baseFilename}.c2", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"c2"), // Doesn't output on Linux
new($"{baseFilename}.ccd", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ccd"),
new($"{baseFilename}.cue", OutputFileFlags.Required),
new($"{baseFilename}.dat", OutputFileFlags.Required
| OutputFileFlags.Zippable),
new($"{baseFilename}.img", OutputFileFlags.Required
| OutputFileFlags.Deleteable),
new([$"{baseFilename}.img_EdcEcc.txt", $"{baseFilename}.img_EccEdc.txt"], System.IsAudio()
? OutputFileFlags.Artifact | OutputFileFlags.Zippable
: OutputFileFlags.Required | OutputFileFlags.Artifact | OutputFileFlags.Zippable,
"img_edcecc"),
new([$"{baseFilename}.scm", $"{baseFilename}.scmtmp"], System.IsAudio()
? OutputFileFlags.Deleteable
: OutputFileFlags.Required | OutputFileFlags.Deleteable),
new([$"{baseFilename}.sub", $"{baseFilename}.subtmp"], OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"sub"),
new($"{baseFilename}.toc", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"toc"),
new($"{baseFilename}_c2Error.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"c2_error"), // Doesn't output on Linux
new RegexOutputFile(Regex.Escape(baseFilename) + @"_(\d{8})T\d{6}\.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_cmd.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_disc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"disc"),
new($"{baseFilename}_drive.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"drive"),
new($"{baseFilename}_img.cue", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"img_cue"),
new($"{baseFilename}_mainError.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_error"),
new($"{baseFilename}_mainInfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_info"),
new([$"{baseFilename}_sub.txt", $"{baseFilename}_subReadable.txt"], OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"sub_readable"),
new($"{baseFilename}_subError.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"sub_error"),
new($"{baseFilename}_subInfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"sub_info"),
new($"{baseFilename}_subIntention.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"sub_intention"),
new($"{baseFilename}_suppl.dat", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"suppl_dat"),
new($"{baseFilename}_volDesc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"vol_desc"),
new([$"{baseFilename} (Track 0).sub", $"{baseFilename} (Track 00).sub"], OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"track00_sub"),
new([$"{baseFilename} (Track 1)(-LBA).sub", $"{baseFilename} (Track 01)(-LBA).sub"], OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"track1_lba_sub"),
new([$"{baseFilename} (Track AA).sub", $"{baseFilename} (Lead-out)(Track AA).sub"], OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"track_aa_sub"),
];
// TODO: Confirm GD-ROM HD area outputs
case MediaType.GDROM:
if (File.Exists($"{basePath}.img"))
deleteableFiles.Add($"{basePath}.img");
if (File.Exists($"{basePath} (Track 0).img"))
deleteableFiles.Add($"{basePath} (Track 0).img");
if (File.Exists($"{basePath} (Track 00).img"))
deleteableFiles.Add($"{basePath} (Track 00).img");
if (File.Exists($"{basePath} (Track 1)(-LBA).img"))
deleteableFiles.Add($"{basePath} (Track 1)(-LBA).img");
if (File.Exists($"{basePath} (Track 01)(-LBA).img"))
deleteableFiles.Add($"{basePath} (Track 01)(-LBA).img");
if (File.Exists($"{basePath} (Track AA).img"))
deleteableFiles.Add($"{basePath} (Track AA).img");
return [
new($"{baseFilename}.dat", OutputFileFlags.Required
| OutputFileFlags.Zippable),
new($"{baseFilename}.toc", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"toc"),
if (File.Exists($"{basePath}.scm"))
deleteableFiles.Add($"{basePath}.scm");
if (File.Exists($"{basePath} (Track 0).scm"))
deleteableFiles.Add($"{basePath} (Track 0).scm");
if (File.Exists($"{basePath} (Track 00).scm"))
deleteableFiles.Add($"{basePath} (Track 00).scm");
if (File.Exists($"{basePath} (Track 1)(-LBA).scm"))
deleteableFiles.Add($"{basePath} (Track 1)(-LBA).scm");
if (File.Exists($"{basePath} (Track 01)(-LBA).scm"))
deleteableFiles.Add($"{basePath} (Track 01)(-LBA).scm");
if (File.Exists($"{basePath} (Track AA).scm"))
deleteableFiles.Add($"{basePath} (Track AA).scm");
break;
new RegexOutputFile(Regex.Escape(baseFilename) + @"_(\d{8})T\d{6}\.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_cmd.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_disc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"disc"),
new($"{baseFilename}_drive.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"drive"),
new($"{baseFilename}_mainError.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_error"),
new($"{baseFilename}_mainInfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_info"),
new($"{baseFilename}_suppl.dat", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"suppl_dat"),
new($"{baseFilename}_volDesc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"vol_desc"),
];
case MediaType.DVD:
case MediaType.HDDVD:
case MediaType.BluRay:
case MediaType.NintendoGameCubeGameDisc:
case MediaType.NintendoWiiOpticalDisc:
if (File.Exists($"{basePath}.raw"))
deleteableFiles.Add($"{basePath}.raw");
return [
new($"{baseFilename}.dat", OutputFileFlags.Required
| OutputFileFlags.Zippable),
new($"{baseFilename}.raw", OutputFileFlags.None),
new($"{baseFilename}.toc", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"toc"),
break;
}
new RegexOutputFile(Regex.Escape(baseFilename) + @"_(\d{8})T\d{6}\.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_cmd.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_CSSKey.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"css_key"),
new($"{baseFilename}_disc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"disc"),
new($"{baseFilename}_drive.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"drive"),
new($"{baseFilename}_mainError.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_error"),
new($"{baseFilename}_mainInfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_info"),
new($"{baseFilename}_suppl.dat", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"suppl_dat"),
new($"{baseFilename}_volDesc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"vol_desc"),
return deleteableFiles;
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
{
(var cmdPath, _) = GetCommandFilePathAndVersion(basePath);
var logFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
case MediaType.GDROM:
if (File.Exists($"{basePath}.c2"))
logFiles.Add($"{basePath}.c2");
if (File.Exists($"{basePath}_c2Error.txt"))
logFiles.Add($"{basePath}_c2Error.txt");
if (File.Exists($"{basePath}.ccd"))
logFiles.Add($"{basePath}.ccd");
if (cmdPath != null && File.Exists(cmdPath))
logFiles.Add(cmdPath);
if (File.Exists($"{basePath}_cmd.txt"))
logFiles.Add($"{basePath}_cmd.txt");
if (File.Exists($"{basePath}.dat"))
logFiles.Add($"{basePath}.dat");
if (File.Exists($"{basePath}.sub"))
logFiles.Add($"{basePath}.sub");
if (File.Exists($"{basePath} (Track 0).sub"))
logFiles.Add($"{basePath} (Track 0).sub");
if (File.Exists($"{basePath} (Track 00).sub"))
logFiles.Add($"{basePath} (Track 00).sub");
if (File.Exists($"{basePath} (Track 1)(-LBA).sub"))
logFiles.Add($"{basePath} (Track 1)(-LBA).sub");
if (File.Exists($"{basePath} (Track 01)(-LBA).sub"))
logFiles.Add($"{basePath} (Track 01)(-LBA).sub");
if (File.Exists($"{basePath} (Track AA).sub"))
logFiles.Add($"{basePath} (Track AA).sub");
if (File.Exists($"{basePath}.subtmp"))
logFiles.Add($"{basePath}.subtmp");
if (File.Exists($"{basePath}.toc"))
logFiles.Add($"{basePath}.toc");
if (File.Exists($"{basePath}_disc.txt"))
logFiles.Add($"{basePath}_disc.txt");
if (File.Exists($"{basePath}_drive.txt"))
logFiles.Add($"{basePath}_drive.txt");
if (File.Exists($"{basePath}_img.cue"))
logFiles.Add($"{basePath}_img.cue");
if (File.Exists($"{basePath}.img_EdcEcc.txt"))
logFiles.Add($"{basePath}.img_EdcEcc.txt");
if (File.Exists($"{basePath}.img_EccEdc.txt"))
logFiles.Add($"{basePath}.img_EccEdc.txt");
if (File.Exists($"{basePath}_mainError.txt"))
logFiles.Add($"{basePath}_mainError.txt");
if (File.Exists($"{basePath}_mainInfo.txt"))
logFiles.Add($"{basePath}_mainInfo.txt");
if (File.Exists($"{basePath}_sub.txt"))
logFiles.Add($"{basePath}_sub.txt");
if (File.Exists($"{basePath}_subError.txt"))
logFiles.Add($"{basePath}_subError.txt");
if (File.Exists($"{basePath}_subInfo.txt"))
logFiles.Add($"{basePath}_subInfo.txt");
if (File.Exists($"{basePath}_subIntention.txt"))
logFiles.Add($"{basePath}_subIntention.txt");
if (File.Exists($"{basePath}_subReadable.txt"))
logFiles.Add($"{basePath}_subReadable.txt");
if (File.Exists($"{basePath}_suppl.dat"))
logFiles.Add($"{basePath}_suppl.dat");
if (File.Exists($"{basePath}_volDesc.txt"))
logFiles.Add($"{basePath}_volDesc.txt");
break;
case MediaType.DVD:
case MediaType.HDDVD:
case MediaType.BluRay:
case MediaType.NintendoGameCubeGameDisc:
case MediaType.NintendoWiiOpticalDisc:
if (cmdPath != null && File.Exists(cmdPath))
logFiles.Add(cmdPath);
if (File.Exists($"{basePath}_cmd.txt"))
logFiles.Add($"{basePath}_cmd.txt");
if (File.Exists($"{basePath}_CSSKey.txt"))
logFiles.Add($"{basePath}_CSSKey.txt");
if (File.Exists($"{basePath}.dat"))
logFiles.Add($"{basePath}.dat");
if (File.Exists($"{basePath}.toc"))
logFiles.Add($"{basePath}.toc");
if (File.Exists($"{basePath}_disc.txt"))
logFiles.Add($"{basePath}_disc.txt");
if (File.Exists($"{basePath}_drive.txt"))
logFiles.Add($"{basePath}_drive.txt");
if (File.Exists($"{basePath}_mainError.txt"))
logFiles.Add($"{basePath}_mainError.txt");
if (File.Exists($"{basePath}_mainInfo.txt"))
logFiles.Add($"{basePath}_mainInfo.txt");
if (File.Exists($"{basePath}_suppl.dat"))
logFiles.Add($"{basePath}_suppl.dat");
if (File.Exists($"{basePath}_volDesc.txt"))
logFiles.Add($"{basePath}_volDesc.txt");
if (File.Exists($"{basePath}_DMI.bin"))
logFiles.Add($"{basePath}_DMI.bin");
if (File.Exists($"{basePath}_PFI.bin"))
logFiles.Add($"{basePath}_PFI.bin");
if (File.Exists($"{basePath}_PIC.bin"))
logFiles.Add($"{basePath}_PIC.bin");
if (File.Exists($"{basePath}_SS.bin"))
logFiles.Add($"{basePath}_SS.bin");
break;
// TODO: Figure out when these are required
new($"{baseFilename}_DMI.bin", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"dmi"),
new($"{baseFilename}_PFI.bin", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"pfi"),
new($"{baseFilename}_PIC.bin", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"pic"),
new($"{baseFilename}_SS.bin", System.IsXGD()
? OutputFileFlags.Required | OutputFileFlags.Binary | OutputFileFlags.Zippable
: OutputFileFlags.Binary | OutputFileFlags.Zippable,
"ss"),
];
case MediaType.FloppyDisk:
case MediaType.HardDisk:
// TODO: Determine what outputs come out from a HDD, SD, etc.
if (cmdPath != null && File.Exists(cmdPath))
logFiles.Add(cmdPath);
if (File.Exists($"{basePath}_cmd.txt"))
logFiles.Add($"{basePath}_cmd.txt");
if (File.Exists($"{basePath}.dat"))
logFiles.Add($"{basePath}.dat");
if (File.Exists($"{basePath}_disc.txt"))
logFiles.Add($"{basePath}_disc.txt");
return [
new($"{baseFilename}.dat", OutputFileFlags.Required
| OutputFileFlags.Zippable),
break;
new RegexOutputFile(Regex.Escape(baseFilename) + @"_(\d{8})T\d{6}\.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_cmd.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"cmd"),
new($"{baseFilename}_disc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"disc"),
];
}
return logFiles;
return [];
}
#endregion
@@ -882,31 +728,31 @@ namespace MPF.Processors
/// Get the command file path and extract the version from it
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>Tuple of file path and version as strings, both null on error</returns>
private static (string?, string?) GetCommandFilePathAndVersion(string basePath)
/// <returns>The version as a string, both null on error</returns>
private static string? GetCommandFilePathAndVersion(string basePath, out string? commandPath)
{
// If we have an invalid base path, we can do nothing
commandPath = null;
if (string.IsNullOrEmpty(basePath))
return (null, null);
return null;
// Generate the matching regex based on the base path
string basePathFileName = Path.GetFileName(basePath);
var cmdFilenameRegex = new Regex(Regex.Escape(basePathFileName) + @"_(\d{8})T\d{6}\.txt");
string baseFilename = Path.GetFileName(basePath);
var cmdFilenameRegex = new Regex(Regex.Escape(baseFilename) + @"_(\d{8})T\d{6}\.txt");
// Find the first match for the command file
var parentDirectory = Path.GetDirectoryName(basePath);
if (string.IsNullOrEmpty(parentDirectory))
return (null, null);
return null;
var currentFiles = Directory.GetFiles(parentDirectory);
var commandPath = currentFiles.FirstOrDefault(f => cmdFilenameRegex.IsMatch(f));
if (string.IsNullOrEmpty(commandPath))
return (null, null);
commandPath = currentFiles.FirstOrDefault(f => cmdFilenameRegex.IsMatch(f));
if (string.IsNullOrEmpty(value: commandPath))
return null;
// Extract the version string
var match = cmdFilenameRegex.Match(commandPath);
string version = match.Groups[1].Value;
return (commandPath, version);
return match.Groups[1].Value;
}
#endregion
@@ -2056,15 +1902,36 @@ namespace MPF.Processors
try
{
// Fast forward to the offsets
// Get a list for all found offsets
var offsets = new List<string>();
// Loop over all possible offsets
using var sr = File.OpenText(disc);
while (sr.ReadLine()?.Trim()?.StartsWith("========== Offset") == false) ;
sr.ReadLine(); // Combined Offset
sr.ReadLine(); // Drive Offset
sr.ReadLine(); // Separator line
while (!sr.EndOfStream)
{
// Fast forward to the offsets
while (sr.ReadLine()?.Trim()?.StartsWith("========== Offset") == false) ;
if (sr.EndOfStream)
break;
sr.ReadLine(); // Combined Offset
sr.ReadLine(); // Drive Offset
sr.ReadLine(); // Separator line
// Now that we're at the offsets, attempt to get the sample offset
string offset = sr.ReadLine()?.Split(' ')?.LastOrDefault() ?? string.Empty;
offsets.Add(offset);
}
// Deduplicate the offsets
offsets = offsets
.Where(s => !string.IsNullOrEmpty(s))
.Distinct()
.ToList();
// Now that we're at the offsets, attempt to get the sample offset
return sr.ReadLine()?.Split(' ')?.LastOrDefault();
return string.Join("; ", [.. offsets]);
}
catch
{

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
@@ -48,20 +48,16 @@
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="System.IO.Compression" Version="4.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="System.IO.Compression.ZipFile" Version="4.3.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="psxt001z.Library" Version="0.21.0-rc1" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.6.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.2" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
<PackageReference Include="SabreTools.Serialization" Version="1.6.9" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,250 @@
using System;
using System.IO;
#if NET452_OR_GREATER || NETCOREAPP
using System.IO.Compression;
using System.Linq;
#endif
namespace MPF.Processors
{
/// <summary>
/// Represents attributes about an <see cref="OutputFile"/>
/// </summary>
[Flags]
internal enum OutputFileFlags : ushort
{
/// <summary>
/// Default state
/// </summary>
None = 0x0000,
/// <summary>
/// File is required to exist
/// </summary>
Required = 0x0001,
/// <summary>
/// File is included as an artifact
/// </summary>
Artifact = 0x0002,
/// <summary>
/// File is included as a binary artifact
/// </summary>
Binary = 0x0004,
/// <summary>
/// File can be deleted after processing
/// </summary>
Deleteable = 0x0008,
/// <summary>
/// File can be zipped after processing
/// </summary>
Zippable = 0x0010,
}
/// <summary>
/// Represents a single output file
/// </summary>
internal class OutputFile
{
/// <summary>
/// Set of all filename variants
/// </summary>
public string[] Filenames { get; private set; }
/// <summary>
/// Key used when creating an artifact
/// </summary>
public string? ArtifactKey { get; private set; }
/// <summary>
/// Indicates if the file is required
/// </summary>
public bool IsRequired
{
get
{
#if NET20 || NET35
return (_flags & OutputFileFlags.Required) != 0;
#else
return _flags.HasFlag(OutputFileFlags.Required);
#endif
}
}
/// <summary>
/// Indicates if the file is an artifact
/// </summary>
public bool IsArtifact
{
get
{
#if NET20 || NET35
return (_flags & OutputFileFlags.Artifact) != 0
|| (_flags & OutputFileFlags.Binary) != 0;
#else
return _flags.HasFlag(OutputFileFlags.Artifact)
|| _flags.HasFlag(OutputFileFlags.Binary);
#endif
}
}
/// <summary>
/// Indicates if the file is a binary artifact
/// </summary>
public bool IsBinaryArtifact
{
get
{
#if NET20 || NET35
return (_flags & OutputFileFlags.Binary) != 0;
#else
return _flags.HasFlag(OutputFileFlags.Binary);
#endif
}
}
/// <summary>
/// Indicates if the file is deleteable after processing
/// </summary>
public bool IsDeleteable
{
get
{
#if NET20 || NET35
return (_flags & OutputFileFlags.Deleteable) != 0;
#else
return _flags.HasFlag(OutputFileFlags.Deleteable);
#endif
}
}
/// <summary>
/// Indicates if the file is zippable after processing
/// </summary>
public bool IsZippable
{
get
{
#if NET20 || NET35
return (_flags & OutputFileFlags.Zippable) != 0;
#else
return _flags.HasFlag(OutputFileFlags.Zippable);
#endif
}
}
/// <summary>
/// Represents attributes about the current file
/// </summary>
protected readonly OutputFileFlags _flags;
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public OutputFile(string filename, OutputFileFlags flags)
: this([filename], flags)
{
}
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public OutputFile(string filename, OutputFileFlags flags, string artifactKey)
: this([filename], flags, artifactKey)
{
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public OutputFile(string[] filenames, OutputFileFlags flags)
{
Filenames = filenames;
ArtifactKey = null;
_flags = flags;
// Validate the inputs
if (filenames.Length == 0)
throw new ArgumentException($"{nameof(filenames)} must contain at least one value");
if (IsArtifact && string.IsNullOrEmpty(ArtifactKey))
throw new ArgumentException($"{nameof(flags)} should not contain the Artifact or Binary flag");
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public OutputFile(string[] filenames, OutputFileFlags flags, string artifactKey)
{
Filenames = filenames;
ArtifactKey = artifactKey;
_flags = flags;
// Validate the inputs
if (filenames.Length == 0)
throw new ArgumentException($"{nameof(filenames)} must contain at least one value");
if (IsArtifact && string.IsNullOrEmpty(ArtifactKey))
throw new ArgumentException($"{nameof(flags)} should not contain the Artifact or Binary flag");
}
/// <summary>
/// Indicates if an output file exists in a base directory
/// </summary>
/// <param name="baseDirectory">Base directory to check in</param>
public virtual bool Exists(string baseDirectory)
{
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
foreach (string filename in Filenames)
{
// Check for invalid filenames
if (string.IsNullOrEmpty(filename))
continue;
try
{
string possibleFile = Path.Combine(baseDirectory, filename);
if (File.Exists(possibleFile))
return true;
}
catch { }
}
return false;
}
#if NET452_OR_GREATER || NETCOREAPP
/// <summary>
/// Indicates if an output file exists in an archive
/// </summary>
/// <param name="archive">Zip archive to check in</param>
public virtual bool Exists(ZipArchive? archive)
{
// If the archive is invalid
if (archive == null)
return false;
foreach (string filename in Filenames)
{
// Check for invalid filenames
if (string.IsNullOrEmpty(filename))
continue;
try
{
// Check all entries on filename alone
if (archive.Entries.Any(e => e.Name == filename))
return true;
}
catch { }
}
return false;
}
#endif
}
}

View File

@@ -18,40 +18,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
if (Type != MediaType.BluRay || System != RedumpSystem.SonyPlayStation3)
{
missingFiles.Add("Media and system combination not supported for PS3 CFW");
}
else
{
string? getKeyBasePath = GetCFWBasePath(basePath);
if (!File.Exists($"{getKeyBasePath}.getkey.log"))
missingFiles.Add($"{getKeyBasePath}.getkey.log");
if (!File.Exists($"{getKeyBasePath}.disc.pic"))
missingFiles.Add($"{getKeyBasePath}.disc.pic");
}
return (missingFiles.Count == 0, missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
string? getKeyBasePath = GetCFWBasePath(basePath);
if (File.Exists(getKeyBasePath + ".disc.pic"))
info.Artifacts["discpic"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(getKeyBasePath + ".disc.pic", binary: true)) ?? string.Empty;
if (File.Exists(getKeyBasePath + ".getkey.log"))
info.Artifacts["getkeylog"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(getKeyBasePath + ".getkey.log")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -110,26 +76,25 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var logFiles = new List<string>();
string? getKeyBasePath = GetCFWBasePath(basePath);
if (System != RedumpSystem.SonyPlayStation3)
return logFiles;
switch (Type)
{
case MediaType.BluRay:
if (File.Exists($"{getKeyBasePath}.getkey.log"))
logFiles.Add($"{getKeyBasePath}.getkey.log");
if (File.Exists($"{getKeyBasePath}.disc.pic"))
logFiles.Add($"{getKeyBasePath}.disc.pic");
break;
return [
new($"{baseFilename}.iso", OutputFileFlags.Required),
new($"{baseFilename}.getkey.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"getkey_log"),
new($"{baseFilename}.disc.pic", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"disc_pic"),
];
}
return logFiles;
return [];
}
#endregion

View File

@@ -816,7 +816,7 @@ namespace MPF.Processors
/// </summary>
/// <param name="dmi">DMI.bin file location</param>
/// <returns>String representation of the XGD1 DMI information, empty string on error</returns>
public static string GetXGD1XMID(string dmi)
public static string GetXMID(string dmi)
{
if (!File.Exists(dmi))
return string.Empty;
@@ -838,7 +838,7 @@ namespace MPF.Processors
/// </summary>
/// <param name="dmi">DMI.bin file location</param>
/// <returns>String representation of the XGD2/3 DMI information, empty string on error</returns>
public static string GetXGD23XeMID(string dmi)
public static string GetXeMID(string dmi)
{
if (!File.Exists(dmi))
return string.Empty;
@@ -1073,7 +1073,7 @@ namespace MPF.Processors
case 3:
// Determine if XGD3 SS.bin is SSv1 (Kreon) or SSv2 (0800)
bool ssv2 = ss.Skip(32).Take(72).All(x => x == 0);
bool ssv2 = ss.Skip(32).Take(72).Any(x => x != 0);
if (ssv2)
{
@@ -1174,12 +1174,18 @@ namespace MPF.Processors
startLBA[i] = (1913776 + 0x030000) * 2 - (startPSN ^ 0xFFFFFF) - 0x030000 - 1;
endLBA[i] = (1913776 + 0x030000) * 2 - (endPSN ^ 0xFFFFFF) - 0x030000 - 1;
}
else if (xgdType > 1 && startPSN >= (1913760 + 0x030000))
else if (xgdType == 2 && startPSN >= (1913760 + 0x030000))
{
// Layer 1 of XGD2 or XGD3
// Layer 1 of XGD2
startLBA[i] = (1913760 + 0x030000) * 2 - (startPSN ^ 0xFFFFFF) - 0x030000 - 1;
endLBA[i] = (1913760 + 0x030000) * 2 - (endPSN ^ 0xFFFFFF) - 0x030000 - 1;
}
else if (xgdType == 3 && startPSN >= (2133520 + 0x030000))
{
// Layer 1 of XGD3
startLBA[i] = (2133520 + 0x030000) * 2 - (startPSN ^ 0xFFFFFF) - 0x030000 - 1;
endLBA[i] = (2133520 + 0x030000) * 2 - (endPSN ^ 0xFFFFFF) - 0x030000 - 1;
}
else
{
// Layer 0

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.Models.CueSheets;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
@@ -19,172 +20,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
if (!File.Exists($"{basePath}.cue"))
missingFiles.Add($"{basePath}.cue");
if (!File.Exists($"{basePath}.scram") && !File.Exists($"{basePath}.scrap"))
missingFiles.Add($"{basePath}.scram");
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.fulltoc"))
missingFiles.Add($"{basePath}.fulltoc");
if (!File.Exists($"{basePath}.log"))
missingFiles.Add($"{basePath}.log");
else if (GetDatfile($"{basePath}.log") == null)
missingFiles.Add($"{basePath}.log (dat section)");
if (!File.Exists($"{basePath}.state"))
missingFiles.Add($"{basePath}.state");
if (!File.Exists($"{basePath}.subcode"))
missingFiles.Add($"{basePath}.subcode");
if (!File.Exists($"{basePath}.toc"))
missingFiles.Add($"{basePath}.toc");
}
// Removed or inconsistent files
//{
// // Depends on the disc
// if (!File.Exists($"{basePath}.cdtext"))
// missingFiles.Add($"{basePath}.cdtext");
//
// // Not available in all versions
// if (!File.Exists($"{basePath}.asus"))
// missingFiles.Add($"{basePath}.asus");
// if (!File.Exists($"{basePath}.atip"))
// missingFiles.Add($"{basePath}.atip");
// if (!File.Exists($"{basePath}.hash"))
// missingFiles.Add($"{basePath}.hash");
// // Also: "{basePath} (Track X).hash" (get from cuesheet)
// if (!File.Exists($"{basePath}.pma"))
// missingFiles.Add($"{basePath}.pma");
// if (!File.Exists($"{basePath}.skeleton"))
// missingFiles.Add($"{basePath}.skeleton");
// // Also: "{basePath} (Track X).skeleton" (get from cuesheet)
//}
break;
case MediaType.DVD:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.log"))
missingFiles.Add($"{basePath}.log");
else if (GetDatfile($"{basePath}.log") == null)
missingFiles.Add($"{basePath}.dat");
if (!File.Exists($"{basePath}.manufacturer") && !File.Exists($"{basePath}.1.manufacturer") && !File.Exists($"{basePath}.2.manufacturer"))
missingFiles.Add($"{basePath}.manufacturer");
if (!File.Exists($"{basePath}.physical") && !File.Exists($"{basePath}.0.physical") && !File.Exists($"{basePath}.1.physical") && !File.Exists($"{basePath}.2.physical"))
missingFiles.Add($"{basePath}.physical");
if (!File.Exists($"{basePath}.state"))
missingFiles.Add($"{basePath}.state");
}
// Removed or inconsistent files
//{
// // Not available in all versions
// if (!File.Exists($"{basePath}.asus"))
// missingFiles.Add($"{basePath}.asus");
// if (!File.Exists($"{basePath}.hash"))
// missingFiles.Add($"{basePath}.hash");
// if (!File.Exists($"{basePath}.skeleton"))
// missingFiles.Add($"{basePath}.skeleton");
//}
break;
case MediaType.HDDVD: // TODO: Verify that this is output
case MediaType.BluRay:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}.log"))
missingFiles.Add($"{basePath}.log");
else if (GetDatfile($"{basePath}.log") == null)
missingFiles.Add($"{basePath}.dat");
if (!File.Exists($"{basePath}.physical") && !File.Exists($"{basePath}.0.physical") && !File.Exists($"{basePath}.1.physical") && !File.Exists($"{basePath}.2.physical"))
missingFiles.Add($"{basePath}.physical");
if (!File.Exists($"{basePath}.state"))
missingFiles.Add($"{basePath}.state");
}
// Removed or inconsistent files
//{
// // Not available in all versions
// if (!File.Exists($"{basePath}.asus"))
// missingFiles.Add($"{basePath}.asus");
// if (!File.Exists($"{basePath}.hash"))
// missingFiles.Add($"{basePath}.hash");
// if (!File.Exists($"{basePath}.skeleton"))
// missingFiles.Add($"{basePath}.skeleton");
//}
break;
default:
missingFiles.Add("Media and system combination not supported for Redumper");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
if (File.Exists($"{basePath}.asus"))
info.Artifacts["asus"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.asus")) ?? string.Empty;
if (File.Exists($"{basePath}.atip"))
info.Artifacts["atip"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.atip")) ?? string.Empty;
if (File.Exists($"{basePath}.cdtext"))
info.Artifacts["cdtext"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.cdtext")) ?? string.Empty;
if (File.Exists($"{basePath}.cue"))
info.Artifacts["cue"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.cue")) ?? string.Empty;
if (File.Exists($"{basePath}.fulltoc"))
info.Artifacts["fulltoc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.fulltoc")) ?? string.Empty;
if (File.Exists($"{basePath}.hash"))
info.Artifacts["hash"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.hash")) ?? string.Empty;
// TODO: "{basePath} (Track X).hash" (get from cuesheet)
if (File.Exists($"{basePath}.log"))
info.Artifacts["log"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.log")) ?? string.Empty;
if (File.Exists($"{basePath}.manufacturer"))
info.Artifacts["manufacturer"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.manufacturer")) ?? string.Empty;
if (File.Exists($"{basePath}.1.manufacturer"))
info.Artifacts["manufacturer1"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.1.manufacturer")) ?? string.Empty;
if (File.Exists($"{basePath}.2.manufacturer"))
info.Artifacts["manufacturer2"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.2.manufacturer")) ?? string.Empty;
if (File.Exists($"{basePath}.physical"))
info.Artifacts["physical"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.physical")) ?? string.Empty;
if (File.Exists($"{basePath}.0.physical"))
info.Artifacts["physical0"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.0.physical")) ?? string.Empty;
if (File.Exists($"{basePath}.1.physical"))
info.Artifacts["physical1"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.1.physical")) ?? string.Empty;
if (File.Exists($"{basePath}.2.physical"))
info.Artifacts["physical2"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.2.physical")) ?? string.Empty;
if (File.Exists($"{basePath}.pma"))
info.Artifacts["pma"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.pma")) ?? string.Empty;
// if (File.Exists($"{basePath}.skeleton"))
// info.Artifacts["skeleton"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.skeleton")) ?? string.Empty;
// // Also: "{basePath} (Track X).skeleton" (get from cuesheet)
// if (File.Exists($"{basePath}.scram"))
// info.Artifacts["scram"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.scram")) ?? string.Empty;
// if (File.Exists($"{basePath}.scrap"))
// info.Artifacts["scrap"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.scrap")) ?? string.Empty;
if (File.Exists($"{basePath}.state"))
info.Artifacts["state"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.state")) ?? string.Empty;
if (File.Exists($"{basePath}.subcode"))
info.Artifacts["subcode"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.subcode")) ?? string.Empty;
if (File.Exists($"{basePath}.toc"))
info.Artifacts["toc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.toc")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -338,15 +173,60 @@ namespace MPF.Processors
break;
case RedumpSystem.MicrosoftXbox:
// TODO: Support DMI and additional file information when generated
string xmidString = ProcessingTool.GetXMID($"{basePath}.manufacturer");
var xmid = SabreTools.Serialization.Wrappers.XMID.Create(xmidString);
if (xmid != null)
{
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.XMID] = xmidString?.TrimEnd('\0') ?? string.Empty;
info.CommonDiscInfo.Serial = xmid.Serial ?? string.Empty;
if (!redumpCompat)
info.VersionAndEditions!.Version = xmid.Version ?? string.Empty;
info.CommonDiscInfo.Region = ProcessingTool.GetXGDRegion(xmid.Model.RegionIdentifier);
}
if (HashTool.GetStandardHashes($"{basePath}.manufacturer", out _, out string? dmi1Crc, out _, out _))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.DMIHash] = dmi1Crc ?? string.Empty;
if (HashTool.GetStandardHashes($"{basePath}.physical", out _, out string? pfi1Crc, out _, out _))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.PFIHash] = pfi1Crc ?? string.Empty;
// TODO: Support SS information when generated
break;
case RedumpSystem.MicrosoftXbox360:
// TODO: Support DMI and additional file information when generated
string xemidString = ProcessingTool.GetXeMID($"{basePath}.manufacturer");
var xemid = SabreTools.Serialization.Wrappers.XeMID.Create(xemidString);
if (xemid != null)
{
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.XeMID] = xemidString?.TrimEnd('\0') ?? string.Empty;
info.CommonDiscInfo.Serial = xemid.Serial ?? string.Empty;
if (!redumpCompat)
info.VersionAndEditions!.Version = xemid.Version ?? string.Empty;
info.CommonDiscInfo.Region = ProcessingTool.GetXGDRegion(xemid.Model.RegionIdentifier);
}
if (HashTool.GetStandardHashes($"{basePath}.manufacturer", out _, out string? dmi23Crc, out _, out _))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.DMIHash] = dmi23Crc ?? string.Empty;
if (HashTool.GetStandardHashes($"{basePath}.physical", out _, out string? pfi23Crc, out _, out _))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.PFIHash] = pfi23Crc ?? string.Empty;
// TODO: Support SS information when generated
break;
case RedumpSystem.NamcoSegaNintendoTriforce:
// TODO: Support header information and GD-ROM info when generated
if (Type == MediaType.CDROM)
{
info.Extras!.Header = GetGDROMHeader($"{basePath}.log",
out string? buildDate,
out string? serial,
out _,
out string? version) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = version ?? string.Empty;
}
break;
case RedumpSystem.SegaMegaCDSegaCD:
@@ -357,36 +237,75 @@ namespace MPF.Processors
break;
case RedumpSystem.SegaChihiro:
// TODO: Support header information and GD-ROM info when generated
if (Type == MediaType.CDROM)
{
info.Extras!.Header = GetGDROMHeader($"{basePath}.log",
out string? buildDate,
out string? serial,
out _,
out string? version) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = version ?? string.Empty;
}
break;
case RedumpSystem.SegaDreamcast:
// TODO: Support header information and GD-ROM info when generated
if (Type == MediaType.CDROM)
{
info.Extras!.Header = GetGDROMHeader($"{basePath}.log",
out string? buildDate,
out string? serial,
out _,
out string? version) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = version ?? string.Empty;
}
break;
case RedumpSystem.SegaNaomi:
// TODO: Support header information and GD-ROM info when generated
if (Type == MediaType.CDROM)
{
info.Extras!.Header = GetGDROMHeader($"{basePath}.log",
out string? buildDate,
out string? serial,
out _,
out string? version) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = version ?? string.Empty;
}
break;
case RedumpSystem.SegaNaomi2:
// TODO: Support header information and GD-ROM info when generated
if (Type == MediaType.CDROM)
{
info.Extras!.Header = GetGDROMHeader($"{basePath}.log",
out string? buildDate,
out string? serial,
out _,
out string? version) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = version ?? string.Empty;
}
break;
case RedumpSystem.SegaSaturn:
info.Extras!.Header = GetSaturnHeader($"{basePath}.log") ?? string.Empty;
// Take only the first 16 lines for Saturn
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetSaturnBuildInfo(info.Extras.Header, out var saturnSerial, out var saturnVersion, out var buildDate))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = saturnSerial ?? string.Empty;
info.VersionAndEditions!.Version = saturnVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = buildDate ?? string.Empty;
}
info.Extras!.Header = GetSaturnHeader($"{basePath}.log",
out string? saturnBuildDate,
out string? saturnSerial,
out _,
out string? saturnVersion) ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = saturnSerial ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = saturnBuildDate ?? string.Empty;
// TODO: Support region setting from parsed value
info.VersionAndEditions!.Version = saturnVersion ?? string.Empty;
break;
case RedumpSystem.SonyPlayStation:
@@ -442,128 +361,194 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetDeleteableFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var deleteableFiles = new List<string>();
if (File.Exists($"{basePath}.scram"))
deleteableFiles.Add($"{basePath}.scram");
if (File.Exists($"{basePath}.scrap"))
deleteableFiles.Add($"{basePath}.scrap");
return deleteableFiles;
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
{
var logFiles = new List<string>();
switch (Type)
{
case MediaType.CDROM:
if (File.Exists($"{basePath}.asus"))
logFiles.Add($"{basePath}.asus");
if (File.Exists($"{basePath}.atip"))
logFiles.Add($"{basePath}.atip");
if (File.Exists($"{basePath}.cdtext"))
logFiles.Add($"{basePath}.cdtext");
if (File.Exists($"{basePath}.fulltoc"))
logFiles.Add($"{basePath}.fulltoc");
if (File.Exists($"{basePath}.log"))
logFiles.Add($"{basePath}.log");
if (File.Exists($"{basePath}.pma"))
logFiles.Add($"{basePath}.pma");
if (File.Exists($"{basePath}.state"))
logFiles.Add($"{basePath}.state");
if (File.Exists($"{basePath}.subcode"))
logFiles.Add($"{basePath}.subcode");
if (File.Exists($"{basePath}.toc"))
logFiles.Add($"{basePath}.toc");
List<OutputFile> cdrom = [
new($"{baseFilename}.asus", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"asus"),
new($"{baseFilename}.atip", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"atip"),
new($"{baseFilename}.cdtext", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"cdtext"),
new($"{baseFilename}.cue", OutputFileFlags.Required),
new($"{baseFilename}.fulltoc", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"fulltoc"),
new($"{baseFilename}.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new($"{baseFilename}.pma", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"pma"),
new([$"{baseFilename}.scram", $"{baseFilename}.scrap"], OutputFileFlags.Required
| OutputFileFlags.Deleteable),
new($"{baseFilename}.state", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"state"),
new($"{baseFilename}.subcode", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"subcode"),
new($"{baseFilename}.toc", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"toc"),
];
// Get the base path for cuesheet reading
string basePath;
if (string.IsNullOrEmpty(baseDirectory))
basePath = baseFilename;
else
basePath = Path.Combine(baseDirectory, baseFilename);
// Include .hash and .skeleton for all files in cuesheet
var cueSheet = SabreTools.Serialization.Deserializers.CueSheet.DeserializeFile($"{basePath}.cue");
string? baseDir = Path.GetDirectoryName(basePath);
if (cueSheet?.Files != null && baseDir != null)
if (cueSheet?.Files != null)
{
int trackId = 1;
foreach (CueFile? file in cueSheet.Files)
{
string? trackName = Path.GetFileNameWithoutExtension(file?.FileName);
if (trackName == null)
continue;
string trackPath = Path.Combine(baseDir, trackName);
if (File.Exists($"{trackPath}.hash"))
logFiles.Add($"{trackPath}.hash");
if (File.Exists($"{trackPath}.skeleton"))
logFiles.Add($"{trackPath}.skeleton");
cdrom.Add(new($"{trackName}.hash", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
$"hash_{trackId}"));
cdrom.Add(new($"{trackName}.skeleton", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
$"skeleton_{trackId}"));
trackId++;
}
}
else
{
if (File.Exists($"{basePath}.hash"))
logFiles.Add($"{basePath}.hash");
if (File.Exists($"{basePath}.skeleton"))
logFiles.Add($"{basePath}.skeleton");
cdrom.Add(new($"{baseFilename}.hash", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"hash"));
cdrom.Add(new($"{baseFilename}.skeleton", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"skeleton"));
}
break;
return cdrom;
case MediaType.DVD:
if (File.Exists($"{basePath}.asus"))
logFiles.Add($"{basePath}.asus");
if (File.Exists($"{basePath}.hash"))
logFiles.Add($"{basePath}.hash");
if (File.Exists($"{basePath}.log"))
logFiles.Add($"{basePath}.log");
if (File.Exists($"{basePath}.manufacturer"))
logFiles.Add($"{basePath}.manufacturer");
if (File.Exists($"{basePath}.1.manufacturer"))
logFiles.Add($"{basePath}.1.manufacturer");
if (File.Exists($"{basePath}.2.manufacturer"))
logFiles.Add($"{basePath}.2.manufacturer");
if (File.Exists($"{basePath}.physical"))
logFiles.Add($"{basePath}.physical");
if (File.Exists($"{basePath}.0.physical"))
logFiles.Add($"{basePath}.0.physical");
if (File.Exists($"{basePath}.1.physical"))
logFiles.Add($"{basePath}.1.physical");
if (File.Exists($"{basePath}.2.physical"))
logFiles.Add($"{basePath}.2.physical");
if (File.Exists($"{basePath}.skeleton"))
logFiles.Add($"{basePath}.skeleton");
if (File.Exists($"{basePath}.state"))
logFiles.Add($"{basePath}.state");
break;
return [
new($"{baseFilename}.asus", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"asus"),
new($"{baseFilename}.hash", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"hash"),
new($"{baseFilename}.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new([$"{baseFilename}.manufacturer", $"{baseFilename}.0.manufacturer"], OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"manufacturer_0"),
new($"{baseFilename}.1.manufacturer", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"manufacturer_1"),
new([$"{baseFilename}.physical", $"{baseFilename}.0.physical"], OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_0"),
new($"{baseFilename}.1.physical", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_1"),
new($"{baseFilename}.security", System.IsXGD()
? OutputFileFlags.Required | OutputFileFlags.Binary | OutputFileFlags.Zippable
: OutputFileFlags.Binary | OutputFileFlags.Zippable,
"security"),
new($"{baseFilename}.skeleton", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"skeleton"),
new($"{baseFilename}.ss", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ss"),
new($"{baseFilename}.ssv1", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ssv1"),
new($"{baseFilename}.ssv2", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ssv2"),
new($"{baseFilename}.state", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"state"),
];
case MediaType.HDDVD: // TODO: Confirm that this information outputs
case MediaType.BluRay:
if (File.Exists($"{basePath}.asus"))
logFiles.Add($"{basePath}.asus");
if (File.Exists($"{basePath}.hash"))
logFiles.Add($"{basePath}.hash");
if (File.Exists($"{basePath}.log"))
logFiles.Add($"{basePath}.log");
if (File.Exists($"{basePath}.physical"))
logFiles.Add($"{basePath}.physical");
if (File.Exists($"{basePath}.0.physical"))
logFiles.Add($"{basePath}.0.physical");
if (File.Exists($"{basePath}.1.physical"))
logFiles.Add($"{basePath}.1.physical");
if (File.Exists($"{basePath}.2.physical"))
logFiles.Add($"{basePath}.2.physical");
if (File.Exists($"{basePath}.skeleton"))
logFiles.Add($"{basePath}.skeleton");
if (File.Exists($"{basePath}.state"))
logFiles.Add($"{basePath}.state");
break;
return [
new($"{baseFilename}.asus", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"asus"),
new($"{baseFilename}.hash", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"hash"),
new($"{baseFilename}.log", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new([$"{baseFilename}.physical", $"{baseFilename}.0.physical"], OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_0"),
new($"{baseFilename}.1.physical", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_1"),
new($"{baseFilename}.2.physical", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_2"),
new($"{baseFilename}.3.physical", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"physical_3"),
new($"{baseFilename}.skeleton", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"skeleton"),
new($"{baseFilename}.state", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"state"),
];
}
return logFiles;
return [];
}
#endregion
#region Private Extra Methods
/// <summary>
/// Get if the datfile exists in the log
/// </summary>
/// <param name="log">Log file location</param>
private static bool DatfileExists(string log)
=> GetDatfile(log) != null;
#endregion
#region Information Extraction Methods
/// <summary>
@@ -797,17 +782,18 @@ namespace MPF.Processors
/// </summary>
/// <param name="log">Log file location</param>
/// <returns>True if error counts could be retrieved, false otherwise</returns>
public static bool GetErrorCount(string log, out long redumpErrors, out long c2Errors)
private static bool GetErrorCount(string log, out long redumpErrors, out long c2Errors)
{
// Set the default values for error counts
redumpErrors = -1; c2Errors = -1;
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
{
redumpErrors = -1; c2Errors = -1;
return false;
}
try
{
redumpErrors = 0; c2Errors = 0;
using var sr = File.OpenText(log);
// Find the error counts
@@ -821,7 +807,9 @@ namespace MPF.Processors
if (line.StartsWith("C2:"))
{
string[] parts = line.Split(' ');
if (!long.TryParse(parts[1], out c2Errors))
if (long.TryParse(parts[1], out long c2TrackErrors))
c2Errors += c2TrackErrors;
else
c2Errors = -1;
}
@@ -829,21 +817,101 @@ namespace MPF.Processors
else if (line.StartsWith("REDUMP.ORG errors:"))
{
string[] parts = line!.Split(' ');
if (!long.TryParse(parts[2], out redumpErrors))
if (long.TryParse(parts[2], out long redumpTrackErrors))
redumpErrors += redumpTrackErrors;
else
redumpErrors = -1;
}
// If either value is -1, exit the loop
if (c2Errors == -1 || redumpErrors == -1)
break;
}
// If the Redump error count is -1, then an issue occurred
return redumpErrors != -1;
// If either error count is -1, then an issue occurred
return c2Errors != -1 && redumpErrors != -1;
}
catch
{
// We don't care what the exception is right now
redumpErrors = -1; c2Errors = -1;
return false;
}
}
/// <summary>
/// Get the header from a GD-ROM LD area, if possible
/// </summary>
/// <param name="log">Log file location</param>
/// <returns>Header as a string if possible, null on error</returns>
private static string? GetGDROMHeader(string log, out string? buildDate, out string? serial, out string? region, out string? version)
{
// Set the default values
buildDate = null; serial = null; region = null; version = null;
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
return null;
try
{
// Fast forward to the MCD line
using var sr = File.OpenText(log);
while (!sr.EndOfStream && sr.ReadLine()?.TrimStart()?.StartsWith("DC [") == false) ;
if (sr.EndOfStream)
return null;
string? line, headerString = string.Empty;
while (!sr.EndOfStream)
{
line = sr.ReadLine()?.TrimStart();
if (line == null)
break;
if (line.StartsWith("build date:"))
{
buildDate = line.Substring("build date: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line.StartsWith("serial:"))
{
serial = line.Substring("serial: ".Length).Trim();
}
else if (line.StartsWith("region:"))
{
region = line.Substring("region: ".Length).Trim();
}
else if (line.StartsWith("regions:"))
{
region = line.Substring("regions: ".Length).Trim();
}
else if (line.StartsWith("header:"))
{
line = sr.ReadLine()?.TrimStart();
while (line?.StartsWith("00") == true)
{
headerString += line + "\n";
line = sr.ReadLine()?.TrimStart();
}
}
else
{
break;
}
}
return headerString.TrimEnd('\n');
}
catch
{
// We don't care what the exception is right now
return null;
}
}
/// <summary>
/// Get hardware information from the input file, if possible
/// </summary>
@@ -1135,11 +1203,11 @@ namespace MPF.Processors
if (line.StartsWith("anti-modchip:"))
{
// Valid but skip
// Valid but skip
}
else if (line.StartsWith("EXE:"))
{
// Valid but skip
// Valid but skip
}
else if (line.StartsWith("EXE date:"))
{
@@ -1147,11 +1215,11 @@ namespace MPF.Processors
}
else if (line.StartsWith("libcrypt:"))
{
// Valid but skip
// Valid but skip
}
else if (line.StartsWith("region:"))
{
// Valid but skip
// Valid but skip
}
else if (line.StartsWith("serial:"))
{
@@ -1324,9 +1392,9 @@ namespace MPF.Processors
/// <<param name="segaHeader">String representing a formatter variant of the Saturn header</param>
/// <returns>True on successful extraction of info, false otherwise</returns>
/// TODO: Remove when Redumper gets native reading support
private static bool GetSaturnBuildInfo(string? segaHeader, out string? serial, out string? version, out string? date)
private static bool GetSaturnBuildInfo(string? segaHeader, out string? buildDate, out string? serial, out string? version)
{
serial = null; version = null; date = null;
buildDate = null; serial = null; version = null;
// If the input header is null, we can't do a thing
if (string.IsNullOrEmpty(segaHeader))
@@ -1340,8 +1408,8 @@ namespace MPF.Processors
string dateLine = header[3].Substring(58);
serial = serialVersionLine.Substring(0, 10).Trim();
version = serialVersionLine.Substring(10, 6).TrimStart('V', 'v');
date = dateLine.Substring(0, 8);
date = $"{date[0]}{date[1]}{date[2]}{date[3]}-{date[4]}{date[5]}-{date[6]}{date[7]}";
buildDate = dateLine.Substring(0, 8);
buildDate = $"{buildDate[0]}{buildDate[1]}{buildDate[2]}{buildDate[3]}-{buildDate[4]}{buildDate[5]}-{buildDate[6]}{buildDate[7]}";
return true;
}
catch
@@ -1356,8 +1424,11 @@ namespace MPF.Processors
/// </summary>
/// <param name="log">Log file location</param>
/// <returns>Header as a byte array if possible, null on error</returns>
private static string? GetSaturnHeader(string log)
private static string? GetSaturnHeader(string log, out string? buildDate, out string? serial, out string? region, out string? version)
{
// Set the default values
buildDate = null; serial = null; region = null; version = null;
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
return null;
@@ -1374,13 +1445,36 @@ namespace MPF.Processors
while (!sr.EndOfStream)
{
line = sr.ReadLine()?.TrimStart();
if (line?.StartsWith("header:") == true)
if (line == null)
break;
if (line.StartsWith("build date:"))
{
buildDate = line.Substring("build date: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line.StartsWith("serial:"))
{
serial = line.Substring("serial: ".Length).Trim();
}
else if (line.StartsWith("region:"))
{
region = line.Substring("region: ".Length).Trim();
}
else if (line.StartsWith("regions:"))
{
region = line.Substring("regions: ".Length).Trim();
}
else if (line?.StartsWith("header:") == true)
{
line = sr.ReadLine()?.TrimStart();
while (line?.StartsWith("00") == true)
{
headerString += line + "\n";
line = sr.ReadLine()?.Trim();
line = sr.ReadLine()?.TrimStart();
}
}
else
@@ -1389,7 +1483,18 @@ namespace MPF.Processors
}
}
return headerString.TrimEnd('\n');
// Trim the header
headerString = headerString.TrimEnd('\n');
// Fallback if any info could not be found
if (GetSaturnBuildInfo(headerString, out string? buildDateP, out string? serialP, out string? versionP))
{
buildDate ??= buildDateP;
serial ??= serialP;
version ??= versionP;
}
return headerString;
}
catch
{
@@ -1543,38 +1648,6 @@ namespace MPF.Processors
}
}
/// <summary>
/// Get the write offset from the input file, if possible
/// </summary>
/// <param name="log">Log file location</param>
/// <returns>Sample write offset if possible, null on error</returns>
private static string? GetWriteOffset(string log)
{
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
return null;
try
{
// If we find the disc write offset line, return the offset
using var sr = File.OpenText(log);
while (!sr.EndOfStream)
{
string? line = sr.ReadLine()?.TrimStart();
if (line?.StartsWith("disc write offset") == true)
return line.Substring("disc write offset: ".Length).Trim();
}
// We couldn't detect it then
return null;
}
catch
{
// We don't care what the exception is right now
return null;
}
}
/// <summary>
/// Get the version. if possible
/// </summary>
@@ -1673,6 +1746,38 @@ namespace MPF.Processors
}
}
/// <summary>
/// Get the write offset from the input file, if possible
/// </summary>
/// <param name="log">Log file location</param>
/// <returns>Sample write offset if possible, null on error</returns>
private static string? GetWriteOffset(string log)
{
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
return null;
try
{
// If we find the disc write offset line, return the offset
using var sr = File.OpenText(log);
while (!sr.EndOfStream)
{
string? line = sr.ReadLine()?.TrimStart();
if (line?.StartsWith("disc write offset") == true)
return line.Substring("disc write offset: ".Length).Trim();
}
// We couldn't detect it then
return null;
}
catch
{
// We don't care what the exception is right now
return null;
}
}
#endregion
}
}

View File

@@ -0,0 +1,88 @@
using System.IO;
#if NET452_OR_GREATER || NETCOREAPP
using System.IO.Compression;
#endif
using System.Linq;
using System.Text.RegularExpressions;
namespace MPF.Processors
{
/// <summary>
/// Represents a single output file with a Regex-matched name
/// </summary>
internal class RegexOutputFile : OutputFile
{
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public RegexOutputFile(string filename, OutputFileFlags flags)
: base([filename], flags)
{
}
/// <summary>
/// Create an OutputFile with a single filename
/// </summary>
public RegexOutputFile(string filename, OutputFileFlags flags, string artifactKey)
: base([filename], flags, artifactKey)
{
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public RegexOutputFile(string[] filenames, OutputFileFlags flags)
: base(filenames, flags)
{
}
/// <summary>
/// Create an OutputFile with set of filenames
/// </summary>
public RegexOutputFile(string[] filenames, OutputFileFlags flags, string artifactKey)
: base(filenames, flags, artifactKey)
{
}
/// <inheritdoc/>
public override bool Exists(string baseDirectory)
{
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
// Get list of all files in directory
var directoryFiles = Directory.GetFiles(baseDirectory);
foreach (string file in directoryFiles)
{
if (Filenames.Any(pattern => Regex.IsMatch(file, pattern)))
return true;
}
return false;
}
#if NET452_OR_GREATER || NETCOREAPP
/// <summary>
/// Indicates if an output file exists in an archive
/// </summary>
/// <param name="archive">Zip archive to check in</param>
public override bool Exists(ZipArchive? archive)
{
// If the archive is invalid
if (archive == null)
return false;
// Get list of all files in archive
var archiveFiles = archive.Entries.Select(e => e.Name).ToList();
foreach (string file in archiveFiles)
{
if (Filenames.Any(pattern => Regex.IsMatch(file, pattern)))
return true;
}
return false;
}
#endif
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Logiqx;
using SabreTools.RedumpLib;
@@ -19,54 +18,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.UMD:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
if (!File.Exists($"{basePath}_disc.txt"))
missingFiles.Add($"{basePath}_disc.txt");
if (!File.Exists($"{basePath}_mainError.txt"))
missingFiles.Add($"{basePath}_mainError.txt");
if (!File.Exists($"{basePath}_mainInfo.txt"))
missingFiles.Add($"{basePath}_mainInfo.txt");
if (!File.Exists($"{basePath}_volDesc.txt"))
missingFiles.Add($"{basePath}_volDesc.txt");
}
break;
default:
missingFiles.Add("Media and system combination not supported for UmdImageCreator");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
if (File.Exists($"{basePath}_disc.txt"))
info.Artifacts["disc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_disc.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_drive.txt"))
info.Artifacts["drive"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_drive.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_mainError.txt"))
info.Artifacts["mainError"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_mainError.txt")) ?? string.Empty;
if (File.Exists($"{basePath}_mainInfo.txt"))
info.Artifacts["mainInfo"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_mainInfo.txt")) ?? string.Empty;
//if (File.Exists($"{basePath}_PFI.bin"))
// info.Artifacts["pfi"] = Convert.ToBase64String(File.ReadAllBytes($"{basePath}_PFI.bin")) ?? string.Empty;
if (File.Exists($"{basePath}_volDesc.txt"))
info.Artifacts["volDesc"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}_volDesc.txt")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -103,15 +54,22 @@ namespace MPF.Processors
info.SizeAndChecksums.SHA1 = sha1;
}
if (GetUMDAuxInfo(basePath + "_disc.txt", out var title, out DiscCategory? umdcat, out var umdversion, out var umdlayer, out long umdsize))
if (GetUMDAuxInfo(basePath + "_disc.txt",
out var title,
out DiscCategory? category,
out string? serial,
out var version,
out var layer,
out long size))
{
info.CommonDiscInfo!.Title = title ?? string.Empty;
info.CommonDiscInfo.Category = umdcat ?? DiscCategory.Games;
info.VersionAndEditions!.Version = umdversion ?? string.Empty;
info.SizeAndChecksums!.Size = umdsize;
info.CommonDiscInfo.Category = category ?? DiscCategory.Games;
info.CommonDiscInfo.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.SizeAndChecksums!.Size = size;
if (!string.IsNullOrEmpty(umdlayer))
info.SizeAndChecksums.Layerbreak = Int64.Parse(umdlayer ?? "-1");
if (!string.IsNullOrEmpty(layer))
info.SizeAndChecksums.Layerbreak = Int64.Parse(layer ?? "-1");
}
break;
@@ -119,30 +77,40 @@ namespace MPF.Processors
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var logFiles = new List<string>();
switch (Type)
{
case MediaType.UMD:
if (File.Exists($"{basePath}_disc.txt"))
logFiles.Add($"{basePath}_disc.txt");
if (File.Exists($"{basePath}_drive.txt"))
logFiles.Add($"{basePath}_drive.txt");
if (File.Exists($"{basePath}_mainError.txt"))
logFiles.Add($"{basePath}_mainError.txt");
if (File.Exists($"{basePath}_mainInfo.txt"))
logFiles.Add($"{basePath}_mainInfo.txt");
if (File.Exists($"{basePath}_volDesc.txt"))
logFiles.Add($"{basePath}_volDesc.txt");
return [
new($"{baseFilename}.iso", OutputFileFlags.Required),
if (File.Exists($"{basePath}_PFI.bin"))
logFiles.Add($"{basePath}_PFI.bin");
break;
new($"{baseFilename}_disc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"disc"),
new($"{baseFilename}_drive.txt", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"drive"),
new($"{baseFilename}_mainError.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_error"),
new($"{baseFilename}_mainInfo.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"main_info"),
new($"{baseFilename}_PFI.bin", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"pfi"),
new($"{baseFilename}_volDesc.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"vol_desc"),
];
}
return logFiles;
return [];
}
#endregion
@@ -188,9 +156,17 @@ namespace MPF.Processors
/// </summary>
/// <param name="disc">_disc.txt file location</param>
/// <returns>True on successful extraction of info, false otherwise</returns>
private static bool GetUMDAuxInfo(string disc, out string? title, out DiscCategory? umdcat, out string? umdversion, out string? umdlayer, out long umdsize)
private static bool GetUMDAuxInfo(string disc,
out string? title,
out DiscCategory? category,
out string? serial,
out string? version,
out string? layer,
out long size)
{
title = null; umdcat = null; umdversion = null; umdlayer = null; umdsize = -1;
title = null; serial = null; version = null; layer = null;
category = null;
size = -1;
// If the file doesn't exist, we can't get info from it
if (!File.Exists(disc))
@@ -207,20 +183,26 @@ namespace MPF.Processors
break;
if (line.StartsWith("TITLE") && title == null)
title = line.Substring("TITLE: ".Length);
else if (line.StartsWith("DISC_VERSION") && umdversion == null)
umdversion = line.Split(' ')[1];
title = line.Split(' ')[1];
else if (line.StartsWith("DISC_ID") && version == null)
serial = line.Split(' ')[1];
else if (line.StartsWith("DISC_VERSION") && version == null)
version = line.Split(' ')[1];
else if (line.StartsWith("pspUmdTypes"))
umdcat = ProcessingTool.GetUMDCategory(line.Split(' ')[1]);
category = ProcessingTool.GetUMDCategory(line.Split(' ')[1]);
else if (line.StartsWith("L0 length"))
umdlayer = line.Split(' ')[2];
layer = line.Split(' ')[2];
else if (line.StartsWith("FileSize:"))
umdsize = Int64.Parse(line.Split(' ')[1]);
size = Int64.Parse(line.Split(' ')[1]);
}
// If we have a serial, format it
if (!string.IsNullOrEmpty(serial) && serial!.Length > 4)
serial = serial.Substring(0, 4) + "-" + serial.Substring(4);
// If the L0 length is the size of the full disc, there's no layerbreak
if (Int64.TryParse(umdlayer, out long umdlayerValue) && umdlayerValue * 2048 == umdsize)
umdlayer = null;
if (Int64.TryParse(layer, out long umdlayerValue) && umdlayerValue * 2048 == size)
layer = null;
return true;
}

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.Models.Logiqx;
using SabreTools.RedumpLib;
@@ -18,64 +19,6 @@ namespace MPF.Processors
#region BaseProcessor Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
var missingFiles = new List<string>();
switch (Type)
{
case MediaType.DVD:
if (!File.Exists($"{basePath}_logs.zip") || !preCheck)
{
string baseDir = Path.GetDirectoryName(basePath) + Path.DirectorySeparatorChar;
string? logPath = GetLogName(baseDir);
if (string.IsNullOrEmpty(logPath))
missingFiles.Add($"{baseDir}Log.txt");
if (!File.Exists($"{baseDir}DMI.bin"))
missingFiles.Add($"{baseDir}DMI.bin");
if (!File.Exists($"{baseDir}PFI.bin"))
missingFiles.Add($"{baseDir}PFI.bin");
if (!File.Exists($"{baseDir}SS.bin"))
missingFiles.Add($"{baseDir}SS.bin");
// Not required from XBC
//if (!File.Exists($"{basePath}.dvd"))
// missingFiles.Add($"{basePath}.dvd");
}
break;
default:
missingFiles.Add("Media and system combination not supported for XboxBackupCreator");
break;
}
return (!missingFiles.Any(), missingFiles);
}
/// <inheritdoc/>
public override void GenerateArtifacts(SubmissionInfo info, string basePath)
{
info.Artifacts ??= [];
string baseDir = Path.GetDirectoryName(basePath) + Path.DirectorySeparatorChar;
string? logPath = GetLogName(baseDir);
if (File.Exists(logPath))
info.Artifacts["log"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile(logPath!)) ?? string.Empty;
if (File.Exists($"{basePath}.dvd"))
info.Artifacts["dvd"] = ProcessingTool.GetBase64(ProcessingTool.GetFullFile($"{basePath}.dvd")) ?? string.Empty;
//if (File.Exists($"{baseDir}DMI.bin"))
// info.Artifacts["dmi"] = Convert.ToBase64String(File.ReadAllBytes($"{baseDir}DMI.bin")) ?? string.Empty;
// TODO: Include PFI artifact only if the hash doesn't match known PFI hashes
//if (File.Exists($"{baseDir}PFI.bin"))
// info.Artifacts["pfi"] = Convert.ToBase64String(File.ReadAllBytes($"{baseDir}PFI.bin")) ?? string.Empty;
//if (File.Exists($"{baseDir}SS.bin"))
// info.Artifacts["ss"] = Convert.ToBase64String(File.ReadAllBytes($"{baseDir}SS.bin")) ?? string.Empty;
//if (File.Exists($"{baseDir}RawSS.bin"))
// info.Artifacts["rawss"] = Convert.ToBase64String(File.ReadAllBytes($"{baseDir}RawSS.bin")) ?? string.Empty;
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, string basePath, bool redumpCompat)
{
@@ -83,10 +26,10 @@ namespace MPF.Processors
info = Builder.EnsureAllSections(info);
// Get base directory
string baseDir = Path.GetDirectoryName(basePath) + Path.DirectorySeparatorChar;
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
// Get log filename
string? logPath = GetLogName(baseDir);
string? logPath = GetLogName(baseDirectory);
if (string.IsNullOrEmpty(logPath))
return;
@@ -132,7 +75,7 @@ namespace MPF.Processors
case RedumpSystem.MicrosoftXbox:
// Parse DMI.bin
string xmidString = ProcessingTool.GetXGD1XMID($"{baseDir}DMI.bin");
string xmidString = ProcessingTool.GetXMID(Path.Combine(baseDirectory, "DMI.bin"));
var xmid = SabreTools.Serialization.Wrappers.XMID.Create(xmidString);
if (xmid != null)
{
@@ -156,7 +99,7 @@ namespace MPF.Processors
//string? mediaID = GetMediaID(logPath);
// Parse DMI.bin
string xemidString = ProcessingTool.GetXGD23XeMID($"{baseDir}DMI.bin");
string xemidString = ProcessingTool.GetXeMID(Path.Combine(baseDirectory, "DMI.bin"));
var xemid = SabreTools.Serialization.Wrappers.XeMID.Create(xemidString);
if (xemid != null)
{
@@ -171,11 +114,16 @@ namespace MPF.Processors
break;
}
// Get the output file paths
string dmiPath = Path.Combine(baseDirectory, "DMI.bin");
string pfiPath = Path.Combine(baseDirectory, "PFI.bin");
string ssPath = Path.Combine(baseDirectory, "SS.bin");
// Deal with SS.bin
if (File.Exists($"{baseDir}SS.bin"))
if (File.Exists(ssPath))
{
// Save security sector ranges
string? ranges = ProcessingTool.GetSSRanges($"{baseDir}SS.bin");
string? ranges = ProcessingTool.GetSSRanges(ssPath);
if (!string.IsNullOrEmpty(ranges))
info.Extras!.SecuritySectorRanges = ranges;
@@ -183,50 +131,59 @@ namespace MPF.Processors
//info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.SSVersion] =
// Recreate RawSS.bin
RecreateSS(logPath!, $"{baseDir}SS.bin", $"{baseDir}RawSS.bin");
RecreateSS(logPath!, ssPath, Path.Combine(baseDirectory, "RawSS.bin"));
// Run ss_sector_range to get repeatable SS hash
ProcessingTool.CleanSS($"{baseDir}SS.bin", $"{baseDir}SS.bin");
ProcessingTool.CleanSS(ssPath, ssPath);
}
// DMI/PFI/SS CRC32 hashes
if (File.Exists($"{baseDir}DMI.bin"))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.DMIHash] = HashTool.GetFileHash($"{baseDir}DMI.bin", HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
if (File.Exists($"{baseDir}PFI.bin"))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.PFIHash] = HashTool.GetFileHash($"{baseDir}PFI.bin", HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
if (File.Exists($"{baseDir}SS.bin"))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.SSHash] = HashTool.GetFileHash($"{baseDir}SS.bin", HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
if (File.Exists(dmiPath))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.DMIHash] = HashTool.GetFileHash(dmiPath, HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
if (File.Exists(pfiPath))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.PFIHash] = HashTool.GetFileHash(pfiPath, HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
if (File.Exists(ssPath))
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.SSHash] = HashTool.GetFileHash(ssPath, HashType.CRC32)?.ToUpperInvariant() ?? string.Empty;
break;
}
}
/// <inheritdoc/>
public override List<string> GetLogFilePaths(string basePath)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
var logFiles = new List<string>();
string baseDir = Path.GetDirectoryName(basePath) + Path.DirectorySeparatorChar;
switch (Type)
{
case MediaType.DVD:
string? logPath = GetLogName(baseDir);
if (!string.IsNullOrEmpty(logPath))
logFiles.Add(logPath!);
if (File.Exists($"{basePath}.dvd"))
logFiles.Add($"{basePath}.dvd");
if (File.Exists($"{baseDir}DMI.bin"))
logFiles.Add($"{baseDir}DMI.bin");
if (File.Exists($"{baseDir}PFI.bin"))
logFiles.Add($"{baseDir}PFI.bin");
if (File.Exists($"{baseDir}SS.bin"))
logFiles.Add($"{baseDir}SS.bin");
if (File.Exists($"{baseDir}RawSS.bin"))
logFiles.Add($"{baseDir}RawSS.bin");
break;
return [
new($"{baseFilename}.dvd", OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"dvd"),
new($"{baseFilename}.iso", OutputFileFlags.Required),
new("DMI.bin", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"dmi"),
new RegexOutputFile("[lL]og.txt", OutputFileFlags.Required
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new("PFI.bin", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"pfi"),
new("RawSS.bin", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"raw_ss"),
new("SS.bin", OutputFileFlags.Required
| OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"ss"),
];
}
return logFiles;
return [];
}
#endregion
@@ -240,8 +197,8 @@ namespace MPF.Processors
/// <returns>Log path if found, null otherwise</returns>
private static string? GetLogName(string baseDir)
{
if (IsSuccessfulLog($"{baseDir}Log.txt"))
return $"{baseDir}Log.txt";
if (IsSuccessfulLog(Path.Combine(baseDir, "Log.txt")))
return Path.Combine(baseDir, "Log.txt");
// Search for a renamed log file (assume there is only one)
string[] files = Directory.GetFiles(baseDir, "*.txt", SearchOption.TopDirectoryOnly);

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
@@ -15,22 +15,22 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.CodeCoverage" Version="17.11.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.13.0" />
<PackageReference Include="xunit.assert" Version="2.8.0" />
<PackageReference Include="xunit.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.execution" Version="2.8.0" />
<PackageReference Include="xunit.runner.console" Version="2.8.0">
<PackageReference Include="xunit.analyzers" Version="1.16.0" />
<PackageReference Include="xunit.assert" Version="2.9.2" />
<PackageReference Include="xunit.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.2" />
<PackageReference Include="xunit.runner.console" Version="2.9.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>

View File

@@ -1,6 +1,6 @@
using System.Windows.Controls;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Media;
using System.Windows;
namespace MPF.UI
{

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -16,7 +16,7 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseWindowsForms>true</UseWindowsForms>
<UseWPF>true</UseWPF>
<VersionPrefix>3.2.1</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<AssemblyName>MPF</AssemblyName>
@@ -37,8 +37,7 @@
<ReferenceWpfLunaTheme>false</ReferenceWpfLunaTheme>
<ReferenceWpfRoyaleTheme>false</ReferenceWpfRoyaleTheme>
</PropertyGroup>
<PropertyGroup
Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`)) OR $(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`)) OR $(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<ReferenceWpfAeroTheme>false</ReferenceWpfAeroTheme>
<ReferenceWpfAero2Theme>true</ReferenceWpfAero2Theme>
<ReferenceWpfAeroLiteTheme>false</ReferenceWpfAeroLiteTheme>
@@ -73,10 +72,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -181,7 +181,7 @@ namespace MPF.UI.Windows
/// </summary>
/// <param name="submissionInfo">SubmissionInfo object to display and possibly change</param>
/// <returns>Dialog open result</returns>
public (bool?, SubmissionInfo?) ShowDiscInformationWindow(SubmissionInfo? submissionInfo)
public bool? ShowDiscInformationWindow(ref SubmissionInfo? submissionInfo)
{
var discInformationWindow = new DiscInformationWindow(CheckDumpViewModel.Options, submissionInfo)
{
@@ -199,7 +199,7 @@ namespace MPF.UI.Windows
if (result == true)
submissionInfo = (discInformationWindow.DiscInformationViewModel.SubmissionInfo.Clone() as SubmissionInfo)!;
return (result, submissionInfo!);
return result;
}
#endregion

View File

@@ -237,7 +237,7 @@
Text="{Binding Path=SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[(redump:SiteCode)ActivisionID], Mode=TwoWay}"/>
<controls:UserInput x:Name="BandaiIDTextBox" Label="Bandai ID"
Text="{Binding Path=SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[(redump:SiteCode)BandaiID], Mode=TwoWay}"/>
<controls:UserInput x:Name="BethesdaIDTextBox" Label="Bandai ID"
<controls:UserInput x:Name="BethesdaIDTextBox" Label="Bethesda ID"
Text="{Binding Path=SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[(redump:SiteCode)BethesdaID], Mode=TwoWay}"/>
<controls:UserInput x:Name="CDProjektIDTextBox" Label="CD Projekt ID"
Text="{Binding Path=SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[(redump:SiteCode)CDProjektID], Mode=TwoWay}"/>

View File

@@ -237,7 +237,7 @@ namespace MPF.UI.Windows
/// <param name="showIfSame">True to show the box even if it's the same, false to only show if it's different</param>
public void CheckForUpdates(bool showIfSame)
{
(bool different, string message, var url) = MainViewModel.CheckForUpdates();
MainViewModel.CheckForUpdates(out bool different, out string message, out var url);
// If we have a new version, put it in the clipboard
if (different && !string.IsNullOrEmpty(url))
@@ -287,8 +287,8 @@ namespace MPF.UI.Windows
public void ShowDebugDiscInfoWindow()
{
var submissionInfo = MainViewModel.CreateDebugSubmissionInfo();
var result = ShowDiscInformationWindow(submissionInfo);
Formatter.ProcessSpecialFields(result.Item2);
_ = ShowDiscInformationWindow(ref submissionInfo);
Formatter.ProcessSpecialFields(submissionInfo);
}
/// <summary>
@@ -296,7 +296,7 @@ namespace MPF.UI.Windows
/// </summary>
/// <param name="submissionInfo">SubmissionInfo object to display and possibly change</param>
/// <returns>Dialog open result</returns>
public (bool?, SubmissionInfo?) ShowDiscInformationWindow(SubmissionInfo? submissionInfo)
public bool? ShowDiscInformationWindow(ref SubmissionInfo? submissionInfo)
{
if (MainViewModel.Options.ShowDiscEjectReminder)
CustomMessageBox.Show(this, "It is now safe to eject the disc", "Eject", MessageBoxButton.OK, MessageBoxImage.Information);
@@ -317,7 +317,7 @@ namespace MPF.UI.Windows
if (result == true)
submissionInfo = (discInformationWindow.DiscInformationViewModel.SubmissionInfo.Clone() as SubmissionInfo)!;
return (result, submissionInfo!);
return result;
}
/// <summary>
@@ -337,7 +337,8 @@ namespace MPF.UI.Windows
WindowStartupLocation = WindowStartupLocation.CenterOwner,
};
checkDumpWindow.Closed += delegate {
checkDumpWindow.Closed += delegate
{
// Unhide Main window after Check window has been closed
this.Show();
this.Activate();
@@ -362,7 +363,8 @@ namespace MPF.UI.Windows
WindowStartupLocation = WindowStartupLocation.CenterOwner,
};
createIRDWindow.Closed += delegate {
createIRDWindow.Closed += delegate
{
// Unhide Main window after Create IRD window has been closed
this.Show();
this.Activate();
@@ -540,14 +542,14 @@ namespace MPF.UI.Windows
#endif
{
#if NET40
var (output, error) = MainViewModel.ScanAndShowProtection();
var output = MainViewModel.ScanAndShowProtection();
#else
var (output, error) = await MainViewModel.ScanAndShowProtection();
var output = await MainViewModel.ScanAndShowProtection();
#endif
if (!MainViewModel.LogPanelExpanded)
{
if (!string.IsNullOrEmpty(output) && string.IsNullOrEmpty(error))
if (!string.IsNullOrEmpty(output))
CustomMessageBox.Show(this, output, "Detected Protection(s)", MessageBoxButton.OK, MessageBoxImage.Information);
else
CustomMessageBox.Show(this, "An exception occurred, see the log for details", "Error!", MessageBoxButton.OK, MessageBoxImage.Error);

View File

@@ -6,6 +6,7 @@ using System.Windows.Controls;
using System.Windows.Forms;
using MPF.Frontend;
using MPF.Frontend.ViewModels;
using SabreTools.RedumpLib.Web;
using WPFCustomMessageBox;
#pragma warning disable IDE1006 // Naming Styles
@@ -201,10 +202,11 @@ namespace MPF.UI.Windows
private async Task ValidateRedumpCredentials()
{
#if NET35
(bool? success, string? message) = await OptionsViewModel.TestRedumpLogin(_RedumpUsernameTextBox!.Text, _RedumpPasswordBox!.Password);
bool? success = await RedumpClient.ValidateCredentials(_RedumpUsernameTextBox!.Text, _RedumpPasswordBox!.Password);
#else
(bool? success, string? message) = await OptionsViewModel.TestRedumpLogin(RedumpUsernameTextBox.Text, RedumpPasswordBox.Password);
bool? success = await RedumpClient.ValidateCredentials(RedumpUsernameTextBox.Text, RedumpPasswordBox.Password);
#endif
string message = OptionsViewModel.GetRedumpLoginResult(success);
if (success == true)
CustomMessageBox.Show(this, message, "Success", MessageBoxButton.OK, MessageBoxImage.Information);

View File

@@ -1,5 +1,5 @@
# version format
version: 3.2.1-{build}
version: 3.2.3-{build}
# pull request template
pull_requests: