Compare commits

...

47 Commits
3.2.2 ... 3.2.3

Author SHA1 Message Date
Matt Nadareski
8a048c8a57 Bump version 2024-11-06 15:36:56 -05:00
Matt Nadareski
d51db072fc Force rebuild of rolling 2024-11-05 15:33:45 -05:00
Matt Nadareski
468c9937da Reduce null use in BaseProcessor 2024-11-05 15:30:54 -05:00
Matt Nadareski
206ac76633 Fix missed GetOutputFiles invocation 2024-11-05 15:23:03 -05:00
Matt Nadareski
339b0d93d1 Ensure debug symbols are stripped 2024-11-05 14:13:18 -05:00
Matt Nadareski
fd5da5de74 Add conf to build matrix 2024-11-05 13:53:05 -05:00
Matt Nadareski
42146f991d Attempt to reduce nesting in GHA builds 2024-11-05 13:46:44 -05:00
Matt Nadareski
f3f05eee48 Attempt to reduce nesting in GHA builds 2024-11-05 13:45:40 -05:00
Matt Nadareski
61bf2f69aa Attempt to reduce nesting in GHA builds 2024-11-05 13:40:05 -05:00
Matt Nadareski
546aa70b69 Update BinaryObjectScanner to 3.1.16 2024-11-05 13:17:49 -05:00
Matt Nadareski
65cd1cede3 Remove last instances of ValueTuple usage 2024-11-03 23:14:22 -05:00
Matt Nadareski
dffa16139a Remove tupling from everything except CLI programs 2024-11-03 23:09:54 -05:00
Matt Nadareski
98bacf11fc Reduce tupling even more 2024-11-03 22:51:24 -05:00
Matt Nadareski
ee0ccecfb2 Replace user info func with ProcessUserInfoDelegate 2024-11-03 22:29:26 -05:00
Matt Nadareski
a0825f276b Use new ProtectionDictionary type 2024-11-03 22:15:42 -05:00
Matt Nadareski
66d61e20de Reduce call complexity for login result 2024-11-03 21:59:22 -05:00
Matt Nadareski
6455ebd434 Simplify GetMediaType return 2024-11-03 21:55:04 -05:00
Matt Nadareski
f4436fddfa Remove unused CompilerServices.Unsafe library 2024-11-03 21:06:14 -05:00
Matt Nadareski
317777cf93 Remove old .NET version of ValueTuple where possible 2024-11-03 20:40:36 -05:00
Matt Nadareski
da6dbe136e Remove unnecessary System.ValueTuple usage 2024-11-03 20:38:35 -05:00
Matt Nadareski
13d7cf8124 Remove unused IndexRange library 2024-11-03 20:16:48 -05:00
Matt Nadareski
bb8fea625b Update BinaryObjectScanner to 3.1.15 2024-11-03 20:13:38 -05:00
Matt Nadareski
15a0659867 Use rolling release, not AppVeyor, in issue templates 2024-10-22 12:39:51 -04:00
Matt Nadareski
988a5f6d2b Update RedumpLib to 1.4.4 2024-10-18 13:04:53 -04:00
Matt Nadareski
1941639473 No directory means no files 2024-10-16 12:04:39 -04:00
Matt Nadareski
d1772f743e Reduce cleverness in output file code 2024-10-16 11:45:06 -04:00
Matt Nadareski
870c2d1cec Add separate field for Regex; assorted cleanup 2024-10-16 02:48:39 -04:00
Matt Nadareski
f7f6ae1eee Ensure Regex directories are unescaped 2024-10-16 02:31:11 -04:00
Matt Nadareski
dd9e527592 Use new output file logic in processors 2024-10-16 01:57:31 -04:00
Matt Nadareski
e1122fa976 Ensure consistency in output file path checking (fixes #755) 2024-10-16 01:56:48 -04:00
Matt Nadareski
8a44fa3355 Ensure that the full base path is being used 2024-10-14 21:47:05 -04:00
Matt Nadareski
51a9e3005f Use fake filename for Redumper DAT 2024-10-14 21:23:16 -04:00
Matt Nadareski
413b6da24b Fix trimming of header output (fixes #754) 2024-10-13 11:00:07 -04:00
Matt Nadareski
172a0fb5dc Update Redumper to build 416 2024-10-12 20:13:27 -04:00
Matt Nadareski
83a189a5d3 Format CleanRip BCA wtih 2-byte blocks (fixes #743) 2024-10-10 12:02:22 -04:00
Matt Nadareski
60c27ec89b Sum track errors in Redumper (fixes #745) 2024-10-10 11:19:27 -04:00
Matt Nadareski
51733557cd Remove ReadLine in list commands 2024-10-10 11:11:54 -04:00
Matt Nadareski
93d964c603 Make .NET 8 the default in issue reports 2024-10-09 12:37:21 -04:00
Matt Nadareski
2925f2262b Change multiple offset delimiter 2024-10-08 22:04:03 -04:00
Matt Nadareski
5b211a7345 Fix faulty offset dedupe logic 2024-10-08 21:53:06 -04:00
Matt Nadareski
ed4bd24fcb Include all DIC write offsets (fixes #750) 2024-10-08 15:47:24 -04:00
Matt Nadareski
8a7761753b Update to DIC 20241001 2024-10-01 22:44:41 -04:00
Matt Nadareski
1b8cca9999 Update changelog 2024-10-01 10:55:33 -04:00
TheRogueArchivist
b75391b1c6 Fix SafeDisc filtering (#749)
* Start updating filtering for SafeDisc

This will need more work, as it is currently incomplete, unoptimized, and untested.

* Further updates to SafeDisc Filtering

Still not done, but most if not all the major edge cases should be accounted for. Mostly just needs testing to make sure I didn't accidentally break something along the way, and further polishing of code and outputs.

* Further update to SafeDisc Filtering

More cleanly covers another specific case.

* Hopefully final main additions to SafeDisc filtering

* Update SafeDisc matching for newest BOS

Fix things that broke with the BOS update, and update a few comments.
2024-10-01 10:53:58 -04:00
Matt Nadareski
e9c2fd9245 Update BinaryObjectScanner to 3.1.14 2024-09-28 13:21:37 -04:00
Matt Nadareski
fb24bbd8a5 Update to DIC 20240901 2024-09-26 10:56:50 -04:00
Matt Nadareski
4e3083c8e6 Fix date 2024-09-24 14:11:02 -04:00
44 changed files with 760 additions and 530 deletions

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the feature already exists.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the feature already exists.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check [previous issues](https://github.com/SabreTools/MPF/issues) to see if any of those are related to what you're about to ask for.

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the feature already exists.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the feature already exists.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check [previous issues](https://github.com/SabreTools/MPF/issues) to see if any of those are related to what you're about to ask for.

View File

@@ -9,7 +9,7 @@ assignees: mnadareski
**Before You Submit**
- Remember to try the [latest WIP build](https://ci.appveyor.com/project/mnadareski/mpf/build/artifacts) to see if the issue has already been addressed.
- Remember to try the [latest WIP build](https://github.com/SabreTools/MPF/releases/tag/rolling) to see if the issue has already been addressed.
- Is it copy protection related? If so, report the issue [here](https://github.com/SabreTools/BinaryObjectScanner/issues) instead.
- Check multiple discs to help narrow down the issue
- Check the Options to see if changing any of those affects your issue.
@@ -25,7 +25,6 @@ What version are you using?
**Build**
What runtime version are you using?
- [ ] .NET 6.0 running on (Operating System)
- [ ] .NET 8.0 running on (Operating System)
**Describe the issue**

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.Check]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64, osx-arm64]
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,22 +27,24 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.CLI]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64, osx-arm64]
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,22 +27,24 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -13,6 +13,7 @@ jobs:
project: [MPF.UI]
runtime: [win-x86, win-x64]
framework: [net8.0-windows] #[net40, net452, net472, net48, netcoreapp3.1, net5.0-windows, net6.0-windows, net7.0-windows, net8.0-windows]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -26,36 +27,38 @@ jobs:
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c Debug --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
- name: Bundle DiscImageCreator
run: |
wget https://github.com/user-attachments/files/15521936/DiscImageCreator_20240601.zip
unzip -u DiscImageCreator_20240601.zip
mkdir -p MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator
mv Release_ANSI/* MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator/
wget https://github.com/user-attachments/files/17211434/DiscImageCreator_20241001.zip
unzip -u DiscImageCreator_20241001.zip
mkdir -p MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator
mv Release_ANSI/* MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Creator/
- name: Bundle Redumper
run: |
wget https://github.com/superg/redumper/releases/download/build_371/redumper-2024.05.27_build371-win64.zip
unzip redumper-2024.05.27_build371-win64.zip
mkdir -p MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper
mv redumper-2024.05.27_build371-win64/bin/redumper.exe MPF.UI/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper/
wget https://github.com/superg/redumper/releases/download/build_416/redumper-2024.10.12_build416-win64.zip
unzip redumper-2024.10.12_build416-win64.zip
mkdir -p MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper
mv redumper-2024.10.12_build416-win64/bin/redumper.exe MPF.UI/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/Programs/Redumper/
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
name: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}
path: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_debug.zip
artifacts: ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True

View File

@@ -1,4 +1,49 @@
### 3.2.2 (2024-09-xx)
### 3.2.3 (2024-11-06)
- Update to DIC 20240901
- Update BinaryObjectScanner to 3.1.14
- Fix SafeDisc filtering (TheRogueArchivist)
- Update to DIC 20241001
- Include all DIC write offsets
- Fix faulty offset dedupe logic
- Change multiple offset delimiter
- Make .NET 8 the default in issue reports
- Remove ReadLine in list commands
- Sum track errors in Redumper
- Format CleanRip BCA wtih 2-byte blocks
- Update Redumper to build 416
- Fix trimming of header output
- Use fake filename for Redumper DAT
- Ensure that the full base path is being used
- Ensure consistency in output file path checking
- Use new output file logic in processors
- Ensure Regex directories are unescaped
- Add separate field for Regex; assorted cleanup
- Reduce cleverness in output file code
- No directory means no files
- Update RedumpLib to 1.4.4
- Use rolling release, not AppVeyor, in issue templates
- Update BinaryObjectScanner to 3.1.15
- Remove unused IndexRange library
- Remove unnecessary System.ValueTuple usage
- Remove old .NET version of ValueTuple where possible
- Remove unused CompilerServices.Unsafe library
- Simplify GetMediaType return
- Reduce call complexity for login result
- Use new ProtectionDictionary type
- Replace user info func with ProcessUserInfoDelegate
- Reduce tupling even more
- Remove tupling from everything except CLI programs
- Remove last instances of ValueTuple usage
- Update BinaryObjectScanner to 3.1.16
- Attempt to reduce nesting in GHA builds
- Add conf to build matrix
- Ensure debug symbols are stripped
- Fix missed GetOutputFiles invocation
- Reduce null use in BaseProcessor
- Force rebuild of rolling
### 3.2.2 (2024-09-24)
- Clean up some Check options, add IRD option
- Add Check flags for protection scan extras

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Title>MPF CLI</Title>
@@ -36,6 +36,10 @@
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
@@ -44,14 +48,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -40,7 +40,7 @@ namespace MPF.CLI
}
// Try processing the common arguments
(bool success, MediaType mediaType, RedumpSystem? knownSystem, var error) = OptionsLoader.ProcessCommonArguments(args);
bool success = OptionsLoader.ProcessCommonArguments(args, out MediaType mediaType, out RedumpSystem? knownSystem, out var error);
if (!success)
{
DisplayHelp(error);
@@ -48,12 +48,20 @@ namespace MPF.CLI
}
// Validate the supplied credentials
(bool? _, string? message) = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
bool? validated = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
string message = validated switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
if (!string.IsNullOrEmpty(message))
Console.WriteLine(message);
// Process any custom parameters
(CommandOptions opts, int startIndex) = LoadFromArguments(args, options, startIndex: 2);
int startIndex = 2;
CommandOptions opts = LoadFromArguments(args, options, ref startIndex);
// Validate the internal program
switch (options.InternalProgram)
@@ -202,18 +210,21 @@ namespace MPF.CLI
/// <summary>
/// Load the current set of options from application arguments
/// </summary>
private static (CommandOptions, int) LoadFromArguments(string[] args, Frontend.Options options, int startIndex = 0)
private static CommandOptions LoadFromArguments(string[] args, Frontend.Options options, ref int startIndex)
{
// Create return values
var opts = new CommandOptions();
// If we have no arguments, just return
if (args == null || args.Length == 0)
return (opts, 0);
{
startIndex = 0;
return opts;
}
// If we have an invalid start index, just return
if (startIndex < 0 || startIndex >= args.Length)
return (opts, startIndex);
return opts;
// Loop through the arguments and parse out values
for (; startIndex < args.Length; startIndex++)
@@ -299,7 +310,7 @@ namespace MPF.CLI
}
}
return (opts, startIndex);
return opts;
}
/// <summary>

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Title>MPF Check</Title>
@@ -40,6 +40,10 @@
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="MinAsyncBridge" Version="0.12.4" />
@@ -48,14 +52,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Frontend\MPF.Frontend.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -55,7 +55,7 @@ namespace MPF.Check
}
// Try processing the common arguments
(bool success, MediaType mediaType, RedumpSystem? knownSystem, var error) = OptionsLoader.ProcessCommonArguments(args);
bool success = OptionsLoader.ProcessCommonArguments(args, out MediaType mediaType, out RedumpSystem? knownSystem, out var error);
if (!success)
{
DisplayHelp(error);
@@ -63,7 +63,8 @@ namespace MPF.Check
}
// Loop through and process options
(CommandOptions opts, int startIndex) = LoadFromArguments(args, options, startIndex: 2);
int startIndex = 2;
CommandOptions opts = LoadFromArguments(args, options, ref startIndex);
if (options.InternalProgram == InternalProgram.NONE)
{
DisplayHelp("A program name needs to be provided");
@@ -77,7 +78,14 @@ namespace MPF.Check
protectionProgress.ProgressChanged += ConsoleLogger.ProgressUpdated;
// Validate the supplied credentials
(bool? _, string? message) = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
bool? validated = RedumpClient.ValidateCredentials(options.RedumpUsername ?? string.Empty, options.RedumpPassword ?? string.Empty).GetAwaiter().GetResult();
string message = validated switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
if (!string.IsNullOrEmpty(message))
Console.WriteLine(message);
@@ -151,7 +159,7 @@ namespace MPF.Check
/// <summary>
/// Load the current set of options from application arguments
/// </summary>
private static (CommandOptions, int) LoadFromArguments(string[] args, Frontend.Options options, int startIndex = 0)
private static CommandOptions LoadFromArguments(string[] args, Frontend.Options options, ref int startIndex)
{
// Create return values
var opts = new CommandOptions();
@@ -165,11 +173,14 @@ namespace MPF.Check
// If we have no arguments, just return
if (args == null || args.Length == 0)
return (opts, 0);
{
startIndex = 0;
return opts;
}
// If we have an invalid start index, just return
if (startIndex < 0 || startIndex >= args.Length)
return (opts, startIndex);
return opts;
// Loop through the arguments and parse out values
for (; startIndex < args.Length; startIndex++)
@@ -317,7 +328,7 @@ namespace MPF.Check
options.IncludeDebugProtectionInformation = enableDebug && scan && !string.IsNullOrEmpty(opts.DevicePath);
options.HideDriveLetters = hideDriveLetters && scan && !string.IsNullOrEmpty(opts.DevicePath);
return (opts, startIndex);
return opts;
}
/// <summary>

View File

@@ -332,7 +332,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid byte, false otherwise</returns>
protected static bool IsValidInt8(string parameter, sbyte lowerBound = -1, sbyte upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!sbyte.TryParse(value, out sbyte temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -352,7 +352,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int16, false otherwise</returns>
protected static bool IsValidInt16(string parameter, short lowerBound = -1, short upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!short.TryParse(value, out short temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -372,7 +372,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int32, false otherwise</returns>
protected static bool IsValidInt32(string parameter, int lowerBound = -1, int upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!int.TryParse(value, out int temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -392,7 +392,7 @@ namespace MPF.ExecutionContexts
/// <returns>True if it's a valid Int64, false otherwise</returns>
protected static bool IsValidInt64(string parameter, long lowerBound = -1, long upperBound = -1)
{
(string value, long _) = ExtractFactorFromValue(parameter);
string value = ExtractFactorFromValue(parameter, out _);
if (!long.TryParse(value, out long temp))
return false;
else if (lowerBound != -1 && temp < lowerBound)
@@ -568,7 +568,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (sbyte.TryParse(value, out sbyte sByteValue))
return (sbyte)(sByteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -588,7 +588,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (sbyte.TryParse(value, out sbyte sByteValue))
return (sbyte)(sByteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -655,7 +655,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (short.TryParse(value, out short shortValue))
return (short)(shortValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -675,7 +675,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (short.TryParse(value, out short shortValue))
return (short)(shortValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -742,7 +742,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (int.TryParse(value, out int intValue))
return (int)(intValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -762,7 +762,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (int.TryParse(value, out int intValue))
return (int)(intValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -829,7 +829,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (long.TryParse(value, out long longValue))
return (long)(longValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -849,7 +849,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (long.TryParse(value, out long longValue))
return (long)(longValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -990,7 +990,7 @@ namespace MPF.ExecutionContexts
this[longFlagString] = true;
i++;
(string value, long factor) = ExtractFactorFromValue(parts[i]);
string value = ExtractFactorFromValue(parts[i], out long factor);
if (byte.TryParse(value, out byte byteValue))
return (byte)(byteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -1010,7 +1010,7 @@ namespace MPF.ExecutionContexts
string valuePart = commandParts[1];
this[longFlagString] = true;
(string value, long factor) = ExtractFactorFromValue(valuePart);
string value = ExtractFactorFromValue(valuePart, out long factor);
if (byte.TryParse(value, out byte byteValue))
return (byte)(byteValue * factor);
string hexValue = RemoveHexIdentifier(value);
@@ -1027,10 +1027,10 @@ namespace MPF.ExecutionContexts
/// </summary>
/// <param name="value">String value to treat as suffixed number</param>
/// <returns>Trimmed value and multiplication factor</returns>
private static (string trimmed, long factor) ExtractFactorFromValue(string value)
private static string ExtractFactorFromValue(string value, out long factor)
{
value = value.Trim('"');
long factor = 1;
factor = 1;
// Characters
if (value.EndsWith("c", StringComparison.Ordinal))
@@ -1081,7 +1081,7 @@ namespace MPF.ExecutionContexts
value = value.TrimEnd('G');
}
return (value, factor);
return value;
}
/// <summary>

View File

@@ -204,6 +204,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.DisableBeep,
FlagStrings.DVDReread,
FlagStrings.ForceUnitAccess,
FlagStrings.Range,
FlagStrings.UseAnchorVolumeDescriptorPointer,
],
@@ -232,6 +233,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.ScanSectorProtect,
FlagStrings.SeventyFour,
FlagStrings.SubchannelReadLevel,
FlagStrings.TryReadingPregap,
FlagStrings.VideoNow,
FlagStrings.VideoNowColor,
FlagStrings.VideoNowXP,
@@ -340,6 +342,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
FlagStrings.ScanSectorProtect,
FlagStrings.SeventyFour,
FlagStrings.SubchannelReadLevel,
FlagStrings.TryReadingPregap,
FlagStrings.VideoNow,
FlagStrings.VideoNowColor,
FlagStrings.VideoNowXP,
@@ -852,6 +855,13 @@ namespace MPF.ExecutionContexts.DiscImageCreator
parameters.Add(FlagStrings.Tages);
}
// Try Reading Pregap
if (IsFlagSupported(FlagStrings.TryReadingPregap))
{
if (this[FlagStrings.TryReadingPregap] == true)
parameters.Add(FlagStrings.TryReadingPregap);
}
// Use Anchor Volume Descriptor Pointer
if (IsFlagSupported(FlagStrings.UseAnchorVolumeDescriptorPointer))
{

View File

@@ -37,6 +37,7 @@ namespace MPF.ExecutionContexts.DiscImageCreator
public const string SkipSector = "/sk";
public const string SubchannelReadLevel = "/s";
public const string Tages = "/t";
public const string TryReadingPregap = "/trp";
public const string UseAnchorVolumeDescriptorPointer = "/avdp";
public const string VideoNow = "/vn";
public const string VideoNowColor = "/vnc";

View File

@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
@@ -45,15 +45,9 @@
<PackageReference Include="MinTasksExtensionsBridge" Version="0.3.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
</Project>

View File

@@ -143,19 +143,19 @@ namespace MPF.Frontend
/// <summary>
/// Get the current media type from drive letter
/// </summary>
/// <param name="system"></param>
/// <returns></returns>
public (MediaType?, string?) GetMediaType(RedumpSystem? system)
/// <param name="system">Currently selected system</param>
/// <returns>The detected media type, if possible</returns>
public MediaType? GetMediaType(RedumpSystem? system)
{
// Take care of the non-optical stuff first
switch (InternalDriveType)
{
case Frontend.InternalDriveType.Floppy:
return (MediaType.FloppyDisk, null);
return MediaType.FloppyDisk;
case Frontend.InternalDriveType.HardDisk:
return (MediaType.HardDisk, null);
return MediaType.HardDisk;
case Frontend.InternalDriveType.Removable:
return (MediaType.FlashDrive, null);
return MediaType.FlashDrive;
}
// Some systems should default to certain media types
@@ -168,18 +168,18 @@ namespace MPF.Frontend
case RedumpSystem.SegaSaturn:
case RedumpSystem.SonyPlayStation:
case RedumpSystem.VideoCD:
return (MediaType.CDROM, null);
return MediaType.CDROM;
// DVD
case RedumpSystem.DVDAudio:
case RedumpSystem.DVDVideo:
case RedumpSystem.MicrosoftXbox:
case RedumpSystem.MicrosoftXbox360:
return (MediaType.DVD, null);
return MediaType.DVD;
// HD-DVD
case RedumpSystem.HDDVDVideo:
return (MediaType.HDDVD, null);
return MediaType.HDDVD;
// Blu-ray
case RedumpSystem.BDVideo:
@@ -188,34 +188,34 @@ namespace MPF.Frontend
case RedumpSystem.SonyPlayStation3:
case RedumpSystem.SonyPlayStation4:
case RedumpSystem.SonyPlayStation5:
return (MediaType.BluRay, null);
return MediaType.BluRay;
// GameCube
case RedumpSystem.NintendoGameCube:
return (MediaType.NintendoGameCubeGameDisc, null);
return MediaType.NintendoGameCubeGameDisc;
// Wii
case RedumpSystem.NintendoWii:
return (MediaType.NintendoWiiOpticalDisc, null);
return MediaType.NintendoWiiOpticalDisc;
// WiiU
case RedumpSystem.NintendoWiiU:
return (MediaType.NintendoWiiUOpticalDisc, null);
return MediaType.NintendoWiiUOpticalDisc;
// PSP
case RedumpSystem.SonyPlayStationPortable:
return (MediaType.UMD, null);
return MediaType.UMD;
}
// Handle optical media by size and filesystem
if (TotalSize >= 0 && TotalSize <= 800_000_000 && (DriveFormat == "CDFS" || DriveFormat == "UDF"))
return (MediaType.CDROM, null);
return MediaType.CDROM;
else if (TotalSize > 800_000_000 && TotalSize <= 8_540_000_000 && (DriveFormat == "CDFS" || DriveFormat == "UDF"))
return (MediaType.DVD, null);
return MediaType.DVD;
else if (TotalSize > 8_540_000_000)
return (MediaType.BluRay, null);
return MediaType.BluRay;
return (null, "Could not determine media type!");
return null;
}
/// <summary>

View File

@@ -158,22 +158,22 @@ namespace MPF.Frontend
if (programFound == null && _internalProgram != InternalProgram.Aaru)
{
var processor = new Processors.Aaru(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.Aaru;
}
if (programFound == null && _internalProgram != InternalProgram.DiscImageCreator)
{
var processor = new Processors.DiscImageCreator(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.DiscImageCreator;
}
if (programFound == null && _internalProgram != InternalProgram.Redumper)
{
var processor = new Processors.Redumper(_system, _type);
(bool foundOtherFiles, _) = processor.FoundAllFiles(outputDirectory, outputFilename);
if (foundOtherFiles)
var missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count == 0)
programFound = InternalProgram.Redumper;
}
@@ -300,7 +300,7 @@ namespace MPF.Frontend
if (_processor == null)
return false;
return _processor.FoundAllFiles(outputDirectory, outputFilename).Item1;
return _processor.FoundAllFiles(outputDirectory, outputFilename).Count == 0;
}
/// <inheritdoc cref="BaseExecutionContext.GetDefaultExtension(MediaType?)"/>
@@ -424,7 +424,7 @@ namespace MPF.Frontend
public async Task<ResultEventArgs> VerifyAndSaveDumpOutput(
IProgress<ResultEventArgs>? resultProgress = null,
IProgress<ProtectionProgress>? protectionProgress = null,
Func<SubmissionInfo?, (bool?, SubmissionInfo?)>? processUserInfo = null,
ProcessUserInfoDelegate? processUserInfo = null,
SubmissionInfo? seedInfo = null)
{
if (_processor == null)
@@ -437,8 +437,8 @@ namespace MPF.Frontend
var outputFilename = Path.GetFileName(OutputPath);
// Check to make sure that the output had all the correct files
(bool foundFiles, List<string> missingFiles) = _processor.FoundAllFiles(outputDirectory, outputFilename);
if (!foundFiles)
List<string> missingFiles = _processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count > 0)
{
resultProgress?.Report(ResultEventArgs.Failure($"There were files missing from the output:\n{string.Join("\n", [.. missingFiles])}"));
return ResultEventArgs.Failure("Error! Please check output directory as dump may be incomplete!");
@@ -470,8 +470,7 @@ namespace MPF.Frontend
{
resultProgress?.Report(ResultEventArgs.Success("Waiting for additional disc information..."));
bool? filledInfo;
(filledInfo, submissionInfo) = processUserInfo(submissionInfo);
bool? filledInfo = processUserInfo(ref submissionInfo);
if (filledInfo == true)
resultProgress?.Report(ResultEventArgs.Success("Additional disc information added!"));
@@ -486,7 +485,7 @@ namespace MPF.Frontend
// Format the information for the text output
resultProgress?.Report(ResultEventArgs.Success("Formatting information..."));
(var formattedValues, var formatResult) = Formatter.FormatOutputData(submissionInfo, _options.EnableRedumpCompatibility);
var formattedValues = Formatter.FormatOutputData(submissionInfo, _options.EnableRedumpCompatibility, out string? formatResult);
if (formattedValues == null)
resultProgress?.Report(ResultEventArgs.Failure(formatResult));
else
@@ -497,7 +496,7 @@ namespace MPF.Frontend
// Write the text output
resultProgress?.Report(ResultEventArgs.Success("Writing submission information file..."));
(bool txtSuccess, string txtResult) = WriteOutputData(outputDirectory, filenameSuffix, formattedValues);
bool txtSuccess = WriteOutputData(outputDirectory, filenameSuffix, formattedValues, out string txtResult);
if (txtSuccess)
resultProgress?.Report(ResultEventArgs.Success(txtResult));
else
@@ -532,33 +531,47 @@ namespace MPF.Frontend
if (_options.CompressLogFiles)
{
resultProgress?.Report(ResultEventArgs.Success("Compressing log files..."));
(bool compressSuccess, string compressResult) = _processor?.CompressLogFiles(outputDirectory, filenameSuffix, outputFilename) ?? (false, "No processor provided!");
if (compressSuccess)
resultProgress?.Report(ResultEventArgs.Success(compressResult));
if (_processor == null)
{
resultProgress?.Report(ResultEventArgs.Failure("No processor provided!"));
}
else
resultProgress?.Report(ResultEventArgs.Failure(compressResult));
{
bool compressSuccess = _processor.CompressLogFiles(outputDirectory, filenameSuffix, outputFilename, out string compressResult);
if (compressSuccess)
resultProgress?.Report(ResultEventArgs.Success(compressResult));
else
resultProgress?.Report(ResultEventArgs.Failure(compressResult));
}
}
// Delete unnecessary files, if required
if (_options.DeleteUnnecessaryFiles)
{
resultProgress?.Report(ResultEventArgs.Success("Deleting unnecessary files..."));
(bool deleteSuccess, string deleteResult) = _processor?.DeleteUnnecessaryFiles(outputDirectory, outputFilename) ?? (false, "No processor provided!");
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
if (_processor == null)
{
resultProgress?.Report(ResultEventArgs.Failure("No processor provided!"));
}
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
{
bool deleteSuccess = _processor.DeleteUnnecessaryFiles(outputDirectory, outputFilename, out string deleteResult);
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
}
}
// Create PS3 IRD, if required
if (_options.CreateIRDAfterDumping && _system == RedumpSystem.SonyPlayStation3 && _type == MediaType.BluRay)
{
resultProgress?.Report(ResultEventArgs.Success("Creating IRD... please wait!"));
(bool deleteSuccess, string deleteResult) = await WriteIRD(OutputPath, submissionInfo?.Extras?.DiscKey, submissionInfo?.Extras?.DiscID, submissionInfo?.Extras?.PIC, submissionInfo?.SizeAndChecksums?.Layerbreak, submissionInfo?.SizeAndChecksums?.CRC32);
bool deleteSuccess = await WriteIRD(OutputPath, submissionInfo?.Extras?.DiscKey, submissionInfo?.Extras?.DiscID, submissionInfo?.Extras?.PIC, submissionInfo?.SizeAndChecksums?.Layerbreak, submissionInfo?.SizeAndChecksums?.CRC32);
if (deleteSuccess)
resultProgress?.Report(ResultEventArgs.Success(deleteResult));
resultProgress?.Report(ResultEventArgs.Success("IRD created!"));
else
resultProgress?.Report(ResultEventArgs.Failure(deleteResult));
resultProgress?.Report(ResultEventArgs.Failure("Failed to create IRD"));
}
resultProgress?.Report(ResultEventArgs.Success("Submission information process complete!"));
@@ -626,11 +639,14 @@ namespace MPF.Frontend
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <param name="lines">Preformatted list of lines to write out to the file</param>
/// <returns>True on success, false on error</returns>
private static (bool, string) WriteOutputData(string? outputDirectory, string? filenameSuffix, List<string>? lines)
private static bool WriteOutputData(string? outputDirectory, string? filenameSuffix, List<string>? lines, out string status)
{
// Check to see if the inputs are valid
if (lines == null)
return (false, "No formatted data found to write!");
{
status = "No formatted data found to write!";
return false;
}
// Now write out to a generic file
try
@@ -654,10 +670,12 @@ namespace MPF.Frontend
}
catch (Exception ex)
{
return (false, $"Writing could not complete: {ex}");
status = $"Writing could not complete: {ex}";
return false;
}
return (true, "Writing complete!");
status = "Writing complete!";
return true;
}
// MOVE TO REDUMPLIB
@@ -786,7 +804,12 @@ namespace MPF.Frontend
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <returns>True on success, false on error</returns>
private static async Task<(bool, string)> WriteIRD(string isoPath, string? discKeyString, string? discIDString, string? picString, long? layerbreak, string? crc32)
private static async Task<bool> WriteIRD(string isoPath,
string? discKeyString,
string? discIDString,
string? picString,
long? layerbreak,
string? crc32)
{
try
{
@@ -796,7 +819,7 @@ namespace MPF.Frontend
// Parse disc key from submission info (Required)
byte[]? discKey = ProcessingTool.ParseHexKey(discKeyString);
if (discKey == null)
return (false, "Failed to create IRD: No key provided");
return false;
// Parse Disc ID from submission info (Optional)
byte[]? discID = ProcessingTool.ParseDiscID(discIDString);
@@ -825,12 +848,12 @@ namespace MPF.Frontend
// Write IRD to file
ird.Write(irdPath);
return (true, "IRD created!");
return true;
}
catch (Exception)
{
// We don't care what the error is
return (false, "Failed to create IRD");
return false;
}
}

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
@@ -49,25 +49,21 @@
<PackageReference Include="MinTasksExtensionsBridge" Version="0.3.4" />
<PackageReference Include="MinThreadingBridge" Version="0.11.4" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="Microsoft.Net.Http" Version="2.2.29" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="Microsoft.Management.Infrastructure" Version="3.0.0" />
<PackageReference Include="System.Net.Http" Version="4.3.4" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="LibIRD" Version="0.9.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,11 @@
using SabreTools.RedumpLib.Data;
namespace MPF.Frontend
{
/// <summary>
/// Determines how user information is processed, if at all
/// </summary>
/// <param name="info">Submission info that may be overwritten</param>
/// <returns>True for successful updating, false or null otherwise</returns>
public delegate bool? ProcessUserInfoDelegate(ref SubmissionInfo? info);
}

View File

@@ -561,32 +561,37 @@ namespace MPF.Frontend.Tools
/// String representing the message to display the the user.
/// String representing the new release URL.
/// </returns>
public static (bool different, string message, string? url) CheckForNewVersion()
public static void CheckForNewVersion(out bool different, out string message, out string? url)
{
try
{
// Get current assembly version
var assemblyVersion = Assembly.GetEntryAssembly()?.GetName()?.Version;
if (assemblyVersion == null)
return (false, "Assembly version could not be determined", null);
{
different = false;
message = "Assembly version could not be determined";
url = null;
return;
}
string version = $"{assemblyVersion.Major}.{assemblyVersion.Minor}.{assemblyVersion.Build}";
// Get the latest tag from GitHub
var (tag, url) = GetRemoteVersionAndUrl();
bool different = version != tag && tag != null;
_ = GetRemoteVersionAndUrl(out string? tag, out url);
different = version != tag && tag != null;
string message = $"Local version: {version}"
message = $"Local version: {version}"
+ $"{Environment.NewLine}Remote version: {tag}"
+ (different
? $"{Environment.NewLine}The update URL has been added copied to your clipboard"
: $"{Environment.NewLine}You have the newest version!");
return (different, message, url);
}
catch (Exception ex)
{
return (false, ex.ToString(), null);
different = false;
message = ex.ToString();
url = null;
}
}
@@ -613,11 +618,12 @@ namespace MPF.Frontend.Tools
/// <summary>
/// Get the latest version of MPF from GitHub and the release URL
/// </summary>
private static (string? tag, string? url) GetRemoteVersionAndUrl()
private static bool GetRemoteVersionAndUrl(out string? tag, out string? url)
{
tag = null; url = null;
#if NET20 || NET35 || NET40
// Not supported in .NET Frameworks 2.0, 3.5, or 4.0
return (null, null);
return false;
#else
using var hc = new System.Net.Http.HttpClient();
#if NET452
@@ -625,22 +631,22 @@ namespace MPF.Frontend.Tools
#endif
// TODO: Figure out a better way than having this hardcoded...
string url = "https://api.github.com/repos/SabreTools/MPF/releases/latest";
var message = new System.Net.Http.HttpRequestMessage(System.Net.Http.HttpMethod.Get, url);
string releaseUrl = "https://api.github.com/repos/SabreTools/MPF/releases/latest";
var message = new System.Net.Http.HttpRequestMessage(System.Net.Http.HttpMethod.Get, releaseUrl);
message.Headers.Add("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0");
var latestReleaseJsonString = hc.SendAsync(message)?.ConfigureAwait(false).GetAwaiter().GetResult()
.Content?.ReadAsStringAsync().ConfigureAwait(false).GetAwaiter().GetResult();
if (latestReleaseJsonString == null)
return (null, null);
return false;
var latestReleaseJson = Newtonsoft.Json.Linq.JObject.Parse(latestReleaseJsonString);
if (latestReleaseJson == null)
return (null, null);
return false;
var latestTag = latestReleaseJson["tag_name"]?.ToString();
var releaseUrl = latestReleaseJson["html_url"]?.ToString();
tag = latestReleaseJson["tag_name"]?.ToString();
url = latestReleaseJson["html_url"]?.ToString();
return (latestTag, releaseUrl);
return true;
#endif
}

View File

@@ -28,7 +28,7 @@ namespace MPF.Frontend.Tools
{
// Help options
if (args.Length == 0 || args[0] == "-h" || args[0] == "-?")
return false;
return null;
// List options
if (args[0] == "-lc" || args[0] == "--listcodes")
@@ -38,7 +38,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(siteCode);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-lm" || args[0] == "--listmedia")
@@ -48,7 +48,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(mediaType);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-lp" || args[0] == "--listprograms")
@@ -58,7 +58,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(program);
}
Console.ReadLine();
return true;
}
else if (args[0] == "-ls" || args[0] == "--listsystems")
@@ -68,7 +68,7 @@ namespace MPF.Frontend.Tools
{
Console.WriteLine(system);
}
Console.ReadLine();
return true;
}
@@ -79,23 +79,36 @@ namespace MPF.Frontend.Tools
/// Process common arguments for all functionality
/// </summary>
/// <returns>True if all arguments pass, false otherwise</returns>
public static (bool, MediaType, RedumpSystem?, string?) ProcessCommonArguments(string[] args)
public static bool ProcessCommonArguments(string[] args, out MediaType mediaType, out RedumpSystem? system, out string? message)
{
// All other use requires at least 3 arguments
if (args.Length < 3)
return (false, MediaType.NONE, null, "Invalid number of arguments");
{
mediaType = MediaType.NONE;
system = null;
message = "Invalid number of arguments";
return false;
}
// Check the MediaType
var mediaType = ToMediaType(args[0].Trim('"'));
mediaType = ToMediaType(args[0].Trim('"'));
if (mediaType == MediaType.NONE)
return (false, MediaType.NONE, null, $"{args[0]} is not a recognized media type");
{
system = null;
message = $"{args[0]} is not a recognized media type";
return false;
}
// Check the RedumpSystem
var knownSystem = Extensions.ToRedumpSystem(args[1].Trim('"'));
if (knownSystem == null)
return (false, MediaType.NONE, null, $"{args[1]} is not a recognized system");
system = Extensions.ToRedumpSystem(args[1].Trim('"'));
if (system == null)
{
message = $"{args[1]} is not a recognized system";
return false;
}
return (true, mediaType, knownSystem, null);
message = null;
return true;
}
/// <summary>

View File

@@ -6,32 +6,10 @@ using System.Text.RegularExpressions;
using System.Threading.Tasks;
using BinaryObjectScanner;
#pragma warning disable SYSLIB1045 // Convert to 'GeneratedRegexAttribute'.
namespace MPF.Frontend.Tools
{
public static class ProtectionTool
{
/// <summary>
/// Get the current detected copy protection(s), if possible
/// </summary>
/// <param name="drive">Drive object representing the current drive</param>
/// <param name="options">Options object that determines what to scan</param>
/// <param name="progress">Optional progress callback</param>
/// <returns>Detected copy protection(s) if possible, null on error</returns>
public static async Task<(string?, Dictionary<string, List<string>>?)> GetCopyProtection(Drive? drive,
Frontend.Options options,
IProgress<ProtectionProgress>? progress = null)
{
if (options.ScanForProtection && drive?.Name != null)
{
(var protection, _) = await RunProtectionScanOnPath(drive.Name, options, progress);
return (FormatProtections(protection), protection);
}
return ("(CHECK WITH PROTECTIONID)", null);
}
/// <summary>
/// Run protection scan on a given path
/// </summary>
@@ -39,68 +17,37 @@ namespace MPF.Frontend.Tools
/// <param name="options">Options object that determines what to scan</param>
/// <param name="progress">Optional progress callback</param>
/// <returns>Set of all detected copy protections with an optional error string</returns>
public static async Task<(Dictionary<string, List<string>>?, string?)> RunProtectionScanOnPath(string path,
public static async Task<ProtectionDictionary> RunProtectionScanOnPath(string path,
Frontend.Options options,
IProgress<ProtectionProgress>? progress = null)
{
try
{
#if NET40
var found = await Task.Factory.StartNew(() =>
{
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
var found = await Task.Factory.StartNew(() =>
#else
var found = await Task.Run(() =>
{
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
var found = await Task.Run(() =>
#endif
// If nothing was returned, return
#if NET20 || NET35
if (found == null || found.Count == 0)
#else
if (found == null || found.IsEmpty)
#endif
return (null, null);
// Filter out any empty protections
var filteredProtections = found
#if NET20 || NET35
.Where(kvp => kvp.Value != null && kvp.Value.Count > 0)
#else
.Where(kvp => kvp.Value != null && !kvp.Value.IsEmpty)
#endif
.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value.OrderBy(s => s).ToList());
// Return the filtered set of protections
return (filteredProtections, null);
}
catch (Exception ex)
{
return (null, ex.ToString());
}
var scanner = new Scanner(
options.ScanArchivesForProtection,
scanContents: true, // Hardcoded value to avoid issues
scanGameEngines: false, // Hardcoded value to avoid issues
options.ScanPackersForProtection,
scanPaths: true, // Hardcoded value to avoid issues
options.IncludeDebugProtectionInformation,
progress);
return scanner.GetProtections(path);
});
// If nothing was returned, return
if (found == null || found.Count == 0)
return [];
// Filter out any empty protections
found.ClearEmptyKeys();
// Return the filtered set of protections
return found;
}
/// <summary>
@@ -108,10 +55,12 @@ namespace MPF.Frontend.Tools
/// </summary>
/// <param name="protections">Dictionary of file to list of protection mappings</param>
/// <returns>Detected protections, if any</returns>
public static string? FormatProtections(Dictionary<string, List<string>>? protections)
public static string? FormatProtections(ProtectionDictionary? protections)
{
// If the filtered list is empty in some way, return
if (protections == null || !protections.Any())
if (protections == null)
return "(CHECK WITH PROTECTIONID)";
else if (protections.Count == 0)
return "None found [OMIT FROM SUBMISSION]";
// Get an ordered list of distinct found protections
@@ -304,55 +253,115 @@ namespace MPF.Frontend.Tools
// SafeDisc
if (foundProtections.Any(p => p.StartsWith("SafeDisc")))
{
if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Confirmed this set of checks works with Redump entries 10430, 11347, 13230, 18614, 28257, 31149, 31824, 52606, 57721, 58455, 58573, 62935, 63941, 64255, 65569, 66005, 70504, 73502, 74520, 78048, 79729, 83468, 98589, and 101261.
// Best case scenario for SafeDisc 2+: A full SafeDisc version is found in a line starting with "Macrovision Protected Application". All other SafeDisc detections can be safely scrubbed.
// TODO: Scrub "Macrovision Protected Application, " from before the SafeDisc version.
if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && p.StartsWith("Macrovision Protected Application") && !p.Contains("SRV Tool APP")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !p.Contains("SRV Tool APP"))
.Where(p => p != "SafeDisc")
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\/4\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Next best case for SafeDisc 2+: A full SafeDisc version is found from the "SafeDisc SRV Tool APP".
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) && p.StartsWith("Macrovision Protected Application") && p.Contains("SRV Tool APP")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => p != "SafeDisc")
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled)))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\/4\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}/+", RegexOptions.Compiled) && !p.StartsWith("Macrovision Protection File")))
// Covers specific edge cases where older drivers are erroneously placed in discs with a newer version of SafeDisc, and the specific SafeDisc version is expunged.
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled) || Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled)) && foundProtections.Any(p => p == "SafeDisc 3+ (DVD)"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [1-2]\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
}
// Best case for SafeDisc 1.X: A full SafeDisc version is found that isn't part of a version range.
else if (foundProtections.Any(p => Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}$", RegexOptions.Compiled) && !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled))))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Security Driver"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite");
}
// Next best case for SafeDisc 1: A SafeDisc version range is found from "SECDRV.SYS".
// TODO: Scrub "Macrovision Security Driver {Version}" from before the SafeDisc version.
else if (foundProtections.Any(p => p.StartsWith("Macrovision Security Driver") && Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[1-2]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled) || Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}$")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite");
}
// Next best case for SafeDisc 2+: A SafeDisc version range is found from "SECDRV.SYS".
// TODO: Scrub "Macrovision Security Driver {Version}" from before the SafeDisc version.
else if (foundProtections.Any(p => p.StartsWith("Macrovision Security Driver")))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !p.StartsWith("Macrovision Protected Application [Version Expunged]"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-[0-9]\.[0-9]{2}\.[0-9]{3}", RegexOptions.Compiled)))
.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 1")
.Where(p => p != "SafeDisc 1/Lite")
.Where(p => p != "SafeDisc 2+");
.Where(p => p != "SafeDisc 2+")
.Where(p => p != "SafeDisc 3+ (DVD)");
}
// Only SafeDisc Lite is found.
else if (foundProtections.Any(p => p == "SafeDisc Lite"))
{
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => !(Regex.IsMatch(p, @"SafeDisc 1\.[0-9]{2}\.[0-9]{3}-1\.[0-9]{2}\.[0-9]{3}\/Lite", RegexOptions.Compiled)));
}
// Only SafeDisc 3+ is found.
else if (foundProtections.Any(p => p == "SafeDisc 3+ (DVD)"))
{
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => p != "SafeDisc 2+")
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)));
}
// Only SafeDisc 2+ is found.
else if (foundProtections.Any(p => p == "SafeDisc 2+"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
foundProtections = foundProtections.Where(p => p != "SafeDisc")
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => p != "SafeDisc");
}
else if (foundProtections.Any(p => p == "SafeDisc 1/Lite"))
{
foundProtections = foundProtections.Where(p => !p.StartsWith("Macrovision Protected Application"))
.Where(p => !p.StartsWith("Macrovision Protection File"))
.Where(p => p != "SafeDisc");
.Where(p => !(Regex.IsMatch(p, @"SafeDisc [0-9]\.[0-9]{2}\.[0-9]{3}\+", RegexOptions.Compiled)));
}
}

View File

@@ -57,8 +57,8 @@ namespace MPF.Frontend.Tools
string outputFilename = Path.GetFileName(outputPath);
// Check that all of the relevant files are there
(bool foundFiles, List<string> missingFiles) = processor.FoundAllFiles(outputDirectory, outputFilename);
if (!foundFiles)
List<string> missingFiles = processor.FoundAllFiles(outputDirectory, outputFilename);
if (missingFiles.Count > 0)
{
resultProgress?.Report(ResultEventArgs.Failure($"There were files missing from the output:\n{string.Join("\n", [.. missingFiles])}"));
resultProgress?.Report(ResultEventArgs.Failure($"This may indicate an issue with the hardware or media, including unsupported devices.\nPlease see dumping program documentation for more details."));
@@ -118,11 +118,23 @@ namespace MPF.Frontend.Tools
if (system.SupportsCopyProtectionScans())
{
resultProgress?.Report(ResultEventArgs.Success("Running copy protection scan... this might take a while!"));
var (protectionString, fullProtections) = await ProtectionTool.GetCopyProtection(drive, options, protectionProgress);
info.CopyProtection!.Protection += protectionString;
info.CopyProtection.FullProtections = fullProtections as Dictionary<string, List<string>?> ?? [];
resultProgress?.Report(ResultEventArgs.Success("Copy protection scan complete!"));
ProtectionDictionary? protections = null;
try
{
if (options.ScanForProtection && drive?.Name != null)
protections = await ProtectionTool.RunProtectionScanOnPath(drive.Name, options, protectionProgress);
var protectionString = ProtectionTool.FormatProtections(protections);
info.CopyProtection!.Protection += protectionString;
info.CopyProtection.FullProtections = ReformatProtectionDictionary(protections);
resultProgress?.Report(ResultEventArgs.Success("Copy protection scan complete!"));
}
catch (Exception ex)
{
resultProgress?.Report(ResultEventArgs.Failure(ex.ToString()));
}
}
// Set fields that may have automatic filling otherwise
@@ -212,17 +224,19 @@ namespace MPF.Frontend.Tools
continue;
}
(bool singleFound, var foundIds, string? result) = await Validator.ValidateSingleTrack(wc, info, sha1);
if (singleFound)
resultProgress?.Report(ResultEventArgs.Success(result));
var foundIds = await Validator.ValidateSingleTrack(wc, info, sha1);
if (foundIds != null && foundIds.Count == 1)
resultProgress?.Report(ResultEventArgs.Success($"Single match found for {sha1}"));
else if (foundIds != null && foundIds.Count != 1)
resultProgress?.Report(ResultEventArgs.Success($"Multiple matches found for {sha1}"));
else
resultProgress?.Report(ResultEventArgs.Failure(result));
resultProgress?.Report(ResultEventArgs.Failure($"No matches found for {sha1}"));
// Add the found IDs to the map
foundIdSets.Add(foundIds?.ToArray() ?? []);
// Ensure that all tracks are found
allFound &= singleFound;
allFound &= (foundIds != null && foundIds.Count == 1);
}
// If all tracks were found, check if there are any fully-matched IDs
@@ -249,23 +263,20 @@ namespace MPF.Frontend.Tools
// If we don't have any matches but we have a universal hash
if (!info.PartiallyMatchedIDs.Any() && info.CommonDiscInfo?.CommentsSpecialFields?.ContainsKey(SiteCode.UniversalHash) == true)
{
#if NET40
var validateTask = Validator.ValidateUniversalHash(wc, info);
validateTask.Wait();
(bool singleFound, var foundIds, string? result) = validateTask.Result;
#else
(bool singleFound, var foundIds, string? result) = await Validator.ValidateUniversalHash(wc, info);
#endif
if (singleFound)
resultProgress?.Report(ResultEventArgs.Success(result));
string sha1 = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
var foundIds = await Validator.ValidateUniversalHash(wc, info);
if (foundIds != null && foundIds.Count == 1)
resultProgress?.Report(ResultEventArgs.Success($"Single match found for universal hash {sha1}"));
else if (foundIds != null && foundIds.Count != 1)
resultProgress?.Report(ResultEventArgs.Success($"Multiple matches found for universal hash {sha1}"));
else
resultProgress?.Report(ResultEventArgs.Failure(result));
resultProgress?.Report(ResultEventArgs.Failure($"No matches found for universal hash {sha1}"));
// Ensure that the hash is found
allFound = singleFound;
allFound = (foundIds != null && foundIds.Count == 1);
// If we found a match, then the disc is a match
if (singleFound && foundIds != null)
if ((foundIds != null && foundIds.Count == 1) && foundIds != null)
fullyMatchedIDs = foundIds;
else
fullyMatchedIDs = [];
@@ -900,6 +911,27 @@ namespace MPF.Frontend.Tools
info.VersionAndEditions.Version = valueFunc(drive) ?? string.Empty;
}
/// <summary>
/// Reformat a protection dictionary for submission info
/// </summary>
/// <param name="oldDict">ProtectionDictionary to format</param>
/// <returns>Reformatted dictionary on success, empty on error</returns>
private static Dictionary<string, List<string>?> ReformatProtectionDictionary(ProtectionDictionary? oldDict)
{
// Null or empty protections return empty
if (oldDict == null || oldDict.Count == 0)
return [];
// Reformat each set into a List
var newDict = new Dictionary<string, List<string>?>();
foreach (string key in oldDict.Keys)
{
newDict[key] = [.. oldDict[key]];
}
return newDict;
}
#endregion
}
}

View File

@@ -445,7 +445,7 @@ namespace MPF.Frontend.ViewModels
/// Performs MPF.Check functionality
/// </summary>
/// <returns>An error message if failed, otherwise string.Empty/null</returns>
public async Task<string?> CheckDump(Func<SubmissionInfo?, (bool?, SubmissionInfo?)> processUserInfo)
public async Task<string?> CheckDump(ProcessUserInfoDelegate processUserInfo)
{
if (string.IsNullOrEmpty(InputPath))
return "Invalid Input path";

View File

@@ -68,7 +68,7 @@ namespace MPF.Frontend.ViewModels
/// <summary>
/// Function to process user information
/// </summary>
private Func<SubmissionInfo?, (bool?, SubmissionInfo?)>? _processUserInfo;
private ProcessUserInfoDelegate? _processUserInfo;
#endregion
@@ -566,7 +566,7 @@ namespace MPF.Frontend.ViewModels
public void Init(
Action<LogLevel, string> loggerAction,
Func<string, string, int, bool, bool?> displayUserMessage,
Func<SubmissionInfo?, (bool?, SubmissionInfo?)> processUserInfo)
ProcessUserInfoDelegate processUserInfo)
{
// Set the callbacks
_logger = loggerAction;
@@ -773,15 +773,13 @@ namespace MPF.Frontend.ViewModels
/// <summary>
/// Check for available updates
/// </summary>
public (bool, string, string?) CheckForUpdates()
public void CheckForUpdates(out bool different, out string message, out string? url)
{
(bool different, string message, var url) = FrontendTool.CheckForNewVersion();
FrontendTool.CheckForNewVersion(out different, out message, out url);
SecretLogLn(message);
if (url == null)
message = "An exception occurred while checking for versions, please try again later. See the log window for more details.";
return (different, message, url);
}
/// <summary>
@@ -1189,16 +1187,12 @@ namespace MPF.Frontend.ViewModels
else if (this.CurrentDrive.MarkedActive)
{
VerboseLog($"Trying to detect media type for drive {this.CurrentDrive.Name} [{this.CurrentDrive.DriveFormat}] using size and filesystem.. ");
(MediaType? detectedMediaType, var errorMessage) = this.CurrentDrive.GetMediaType(this.CurrentSystem);
// If we got an error message, post it to the log
if (errorMessage != null)
VerboseLogLn($"Message from detecting media type: {errorMessage}");
MediaType? detectedMediaType = this.CurrentDrive.GetMediaType(this.CurrentSystem);
// If we got either an error or no media, default to the current System default
if (detectedMediaType == null)
{
VerboseLogLn($"Unable to detect, defaulting to {defaultMediaType.LongName()}.");
VerboseLogLn($"Could not detect media type, defaulting to {defaultMediaType.LongName()}.");
CurrentMediaType = defaultMediaType;
}
else
@@ -1838,9 +1832,9 @@ namespace MPF.Frontend.ViewModels
/// Scan and show copy protection for the current disc
/// </summary>
#if NET40
public (string?, string?) ScanAndShowProtection()
public string? ScanAndShowProtection()
#else
public async Task<(string?, string?)> ScanAndShowProtection()
public async Task<string?> ScanAndShowProtection()
#endif
{
// Determine current environment, just in case
@@ -1848,7 +1842,10 @@ namespace MPF.Frontend.ViewModels
// If we don't have a valid drive
if (this.CurrentDrive?.Name == null)
return (null, "No valid drive found!");
{
ErrorLogLn("No valid drive found!");
return null;
}
VerboseLogLn($"Scanning for copy protection in {this.CurrentDrive.Name}");
@@ -1861,35 +1858,32 @@ namespace MPF.Frontend.ViewModels
var progress = new Progress<ProtectionProgress>();
progress.ProgressChanged += ProgressUpdated;
try
{
#if NET40
var protectionTask = ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
protectionTask.Wait();
var (protections, error) = protectionTask.Result;
var protectionTask = ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
protectionTask.Wait();
var protections = protectionTask.Result;
#else
var (protections, error) = await ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
var protections = await ProtectionTool.RunProtectionScanOnPath(this.CurrentDrive.Name, this.Options, progress);
#endif
var output = ProtectionTool.FormatProtections(protections);
// If SmartE is detected on the current disc, remove `/sf` from the flags for DIC only -- Disabled until further notice
//if (Env.InternalProgram == InternalProgram.DiscImageCreator && output.Contains("SmartE"))
//{
// ((ExecutionContexts.DiscImageCreator.ExecutionContext)Env.ExecutionContext)[ExecutionContexts.DiscImageCreator.FlagStrings.ScanFileProtect] = false;
// if (this.Options.VerboseLogging)
// this.Logger.VerboseLogLn($"SmartE detected, removing {ExecutionContexts.DiscImageCreator.FlagStrings.ScanFileProtect} from parameters");
//}
if (string.IsNullOrEmpty(error))
var output = ProtectionTool.FormatProtections(protections);
LogLn($"Detected the following protections in {this.CurrentDrive.Name}:\r\n\r\n{output}");
else
ErrorLogLn($"Path could not be scanned! Exception information:\r\n\r\n{error}");
this.Status = tempContent;
this.StartStopButtonEnabled = ShouldEnableDumpingButton();
this.MediaScanButtonEnabled = true;
this.UpdateVolumeLabelEnabled = true;
this.CopyProtectScanButtonEnabled = true;
this.Status = tempContent;
this.StartStopButtonEnabled = ShouldEnableDumpingButton();
this.MediaScanButtonEnabled = true;
this.UpdateVolumeLabelEnabled = true;
this.CopyProtectScanButtonEnabled = true;
return (output, error);
return output;
}
catch (Exception ex)
{
ErrorLogLn($"Path could not be scanned! Exception information:\r\n\r\n{ex}");
return null;
}
}
/// <summary>

View File

@@ -119,11 +119,16 @@ namespace MPF.Frontend.ViewModels
#region UI Commands
/// <summary>
/// Test Redump login credentials
/// Get the human-readable result for a Redump login result
/// </summary>
public static async Task<(bool?, string?)> TestRedumpLogin(string username, string password)
public static string GetRedumpLoginResult(bool? success)
{
return await RedumpClient.ValidateCredentials(username, password);
return success switch
{
true => "Redump username and password accepted!",
false => "Redump username and password denied!",
null => "An error occurred validating your credentials!",
};
}
/// <summary>

View File

@@ -186,7 +186,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{

View File

@@ -55,9 +55,10 @@ namespace MPF.Processors
// <summary>
/// Generate a list of all output files generated
/// </summary>
/// <param name="baseFilename">Base filename to use for checking</param>
/// <param name="baseDirectory">Base filename and path to use for checking</param>
/// <param name="baseFilename">Base filename and path to use for checking</param>
/// <returns>List of all output files, empty otherwise</returns>
internal abstract List<OutputFile> GetOutputFiles(string baseFilename);
internal abstract List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename);
#endregion
@@ -71,10 +72,11 @@ namespace MPF.Processors
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <returns>True if the process succeeded, false otherwise</returns>
public (bool, string) CompressLogFiles(string? outputDirectory, string? filenameSuffix, string outputFilename)
public bool CompressLogFiles(string? outputDirectory, string? filenameSuffix, string outputFilename, out string status)
{
#if NET20 || NET35 || NET40
return (false, "Log compression is not available for this framework version");
status = "Log compression is not available for this framework version";
return false;
#else
// Prepare the necessary paths
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
@@ -93,7 +95,10 @@ namespace MPF.Processors
// Don't create an archive if there are no paths
if (!zippableFiles.Any() && !generatedFiles.Any())
return (true, "No files to compress!");
{
status = "No files to compress!";
return true;
}
// If the file already exists, we want to delete the old one
try
@@ -103,7 +108,8 @@ namespace MPF.Processors
}
catch
{
return (false, "Could not delete old archive!");
status = "Could not delete old archive!";
return false;
}
// Add the log files to the archive and delete the uncompressed file after
@@ -115,11 +121,13 @@ namespace MPF.Processors
_ = AddToArchive(zf, zippableFiles, outputDirectory, true);
_ = AddToArchive(zf, generatedFiles, outputDirectory, false);
return (true, "Compression complete!");
status = "Compression complete!";
return true;
}
catch (Exception ex)
{
return (false, $"Compression could not complete: {ex}");
status = $"Compression could not complete: {ex}";
return false;
}
finally
{
@@ -135,7 +143,7 @@ namespace MPF.Processors
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <returns>True if the process succeeded, false otherwise</returns>
public (bool, string) DeleteUnnecessaryFiles(string? outputDirectory, string outputFilename)
public bool DeleteUnnecessaryFiles(string? outputDirectory, string outputFilename, out string status)
{
// Prepare the necessary paths
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
@@ -149,7 +157,10 @@ namespace MPF.Processors
var files = GetDeleteableFilePaths(combinedBase);
if (!files.Any())
return (true, "No files to delete!");
{
status = "No files to delete!";
return true;
}
// Attempt to delete all of the files
foreach (string file in files)
@@ -161,7 +172,8 @@ namespace MPF.Processors
catch { }
}
return (true, "Deletion complete!");
status = "Deletion complete!";
return true;
}
/// <summary>
@@ -170,10 +182,10 @@ namespace MPF.Processors
/// <param name="outputDirectory">Output folder to write to</param>
/// <param name="outputFilename">Output filename to use as the base path</param>
/// <param name="processor">Processor object representing how to process the outputs</param>
/// <returns>Tuple of true if all required files exist, false otherwise and a list representing missing files</returns>
public (bool, List<string>) FoundAllFiles(string? outputDirectory, string outputFilename)
/// <returns>A list representing missing files, empty if none</returns>
public List<string> FoundAllFiles(string? outputDirectory, string outputFilename)
{
// First, sanitized the output filename to strip off any potential extension
// Sanitize the output filename to strip off any potential extension
outputFilename = Path.GetFileNameWithoutExtension(outputFilename);
// Then get the base path for all checking
@@ -194,12 +206,12 @@ namespace MPF.Processors
/// <returns>Dictiionary of artifact keys to Base64-encoded values, if possible</param>
public Dictionary<string, string> GenerateArtifacts(string basePath)
{
// Get the base filename and directory from the base path
string baseFilename = Path.GetFileName(basePath);
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseFilename);
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
@@ -216,20 +228,20 @@ namespace MPF.Processors
// Skip non-existent files
foreach (string filename in outputFile.Filenames)
{
string outputFilePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(outputFilePath))
string possibleFile = Path.Combine(baseDirectory, filename);
if (!File.Exists(possibleFile))
continue;
// Get binary artifacts as a byte array
if (outputFile.IsBinaryArtifact)
{
byte[] data = File.ReadAllBytes(filename);
byte[] data = File.ReadAllBytes(possibleFile);
string str = Convert.ToBase64String(data);
artifacts.Add(outputFile.ArtifactKey, str);
}
else
{
string? data = ProcessingTool.GetFullFile(filename);
string? data = ProcessingTool.GetFullFile(possibleFile);
string str = ProcessingTool.GetBase64(data) ?? string.Empty;
artifacts.Add(outputFile.ArtifactKey, str);
}
@@ -316,17 +328,17 @@ namespace MPF.Processors
/// Validate if all required output files exist
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>Tuple of true if all required files exist, false otherwise and a list representing missing files</returns>
private (bool, List<string>) CheckRequiredFiles(string basePath)
/// <returns>A list representing missing files, empty if none</returns>
private List<string> CheckRequiredFiles(string basePath)
{
// Get the base filename and directory from the base path
string baseFilename = Path.GetFileName(basePath);
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseFilename);
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return (false, ["Media and system combination not supported"]);
return ["Media and system combination not supported"];
// Check for the log file
bool logArchiveExists = false;
@@ -381,18 +393,22 @@ namespace MPF.Processors
#endif
}
return (!missingFiles.Any(), missingFiles);
return missingFiles;
}
/// <summary>
/// Generate a list of all deleteable filenames
/// </summary>
/// <param name="baseFilename">Base filename to use for generation</param>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all deleteable filenames, empty otherwise</returns>
private List<string> GetDeleteableFilenames(string baseFilename)
private List<string> GetDeleteableFilenames(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseFilename);
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
@@ -408,12 +424,11 @@ namespace MPF.Processors
/// <returns>List of all deleteable file paths, empty otherwise</returns>
private List<string> GetDeleteableFilePaths(string basePath)
{
// Get the base filename and directory from the base path
string baseFilename = Path.GetFileName(basePath);
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
// Get the list of deleteable files
var deleteableFilenames = GetDeleteableFilenames(baseFilename);
var deleteableFilenames = GetDeleteableFilenames(basePath);
if (deleteableFilenames.Count == 0)
return [];
@@ -422,11 +437,11 @@ namespace MPF.Processors
foreach (var filename in deleteableFilenames)
{
// Skip non-existent files
string outputFilePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(outputFilePath))
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
deleteableFiles.Add(outputFilePath);
deleteableFiles.Add(possiblePath);
}
return deleteableFiles;
@@ -458,10 +473,10 @@ namespace MPF.Processors
/// <summary>
/// Generate a list of all MPF-specific log files generated
/// </summary>
/// <param name="outputDirectory">Output folder to write to</param>
/// <param name="basePath">Base directory to use for checking</param>
/// <param name="filenameSuffix">Optional suffix to append to the filename</param>
/// <returns>List of all log file paths, empty otherwise</returns>
private static List<string> GetGeneratedFilePaths(string? outputDirectory, string? filenameSuffix)
private static List<string> GetGeneratedFilePaths(string? baseDirectory, string? filenameSuffix)
{
// Get the list of generated files
var generatedFilenames = GetGeneratedFilenames(filenameSuffix);
@@ -469,18 +484,18 @@ namespace MPF.Processors
return [];
// Ensure the output directory
outputDirectory ??= string.Empty;
baseDirectory ??= string.Empty;
// Return only files that exist
var generatedFiles = new List<string>();
foreach (var filename in generatedFilenames)
{
// Skip non-existent files
string outputFilePath = Path.Combine(outputDirectory, filename);
if (!File.Exists(outputFilePath))
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
generatedFiles.Add(outputFilePath);
generatedFiles.Add(possiblePath);
}
return generatedFiles;
@@ -489,18 +504,22 @@ namespace MPF.Processors
/// <summary>
/// Generate a list of all zippable filenames
/// </summary>
/// <param name="baseFilename">Base filename to use for generation</param>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>List of all zippable filenames, empty otherwise</returns>
private List<string> GetZippableFilenames(string baseFilename)
private List<string> GetZippableFilenames(string basePath)
{
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
string baseFilename = Path.GetFileNameWithoutExtension(basePath);
// Get the list of output files
var outputFiles = GetOutputFiles(baseFilename);
var outputFiles = GetOutputFiles(baseDirectory, baseFilename);
if (outputFiles.Count == 0)
return [];
// Filter down to deleteable files
var deleteableFiles = outputFiles.Where(of => of.IsZippable);
return deleteableFiles.SelectMany(of => of.Filenames).ToList();
// Filter down to zippable files
var zippableFiles = outputFiles.Where(of => of.IsZippable);
return zippableFiles.SelectMany(of => of.Filenames).ToList();
}
/// <summary>
@@ -510,12 +529,11 @@ namespace MPF.Processors
/// <returns>List of all zippable file paths, empty otherwise</returns>
private List<string> GetZippableFilePaths(string basePath)
{
// Get the base filename and directory from the base path
string baseFilename = Path.GetFileName(basePath);
// Split the base path for matching
string baseDirectory = Path.GetDirectoryName(basePath) ?? string.Empty;
// Get the list of zippable files
var zippableFilenames = GetZippableFilenames(baseFilename);
var zippableFilenames = GetZippableFilenames(basePath);
if (zippableFilenames.Count == 0)
return [];
@@ -524,11 +542,11 @@ namespace MPF.Processors
foreach (var filename in zippableFilenames)
{
// Skip non-existent files
string outputFilePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(outputFilePath))
string possiblePath = Path.Combine(baseDirectory, filename);
if (!File.Exists(possiblePath))
continue;
zippableFiles.Add(outputFilePath);
zippableFiles.Add(possiblePath);
}
return zippableFiles;

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.Models.Logiqx;
@@ -67,7 +66,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{
@@ -174,7 +173,9 @@ namespace MPF.Processors
if (hex == null)
return null;
return Regex.Replace(hex, ".{32}", "$0\n");
// First separate into lines then into blocks of 4 hex digits
string bca = Regex.Replace(hex, ".{32}", "$0\n");
return Regex.Replace(bca, "[0-9a-fA-F]{4}", "$0 ");
}
catch
{

View File

@@ -52,18 +52,13 @@ namespace MPF.Processors
_existsFunc = existsFunc;
}
/// <summary>
/// Indicates if an output file exists in a base directory
/// </summary>
/// <param name="baseDirectory">Base directory to check in</param>
/// <inheritdoc/>
public override bool Exists(string baseDirectory)
{
// If the base directory is invalid
if (string.IsNullOrEmpty(baseDirectory))
return false;
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
foreach (string filename in Filenames)
{
// Check for invalid filenames
@@ -72,8 +67,8 @@ namespace MPF.Processors
try
{
string possiblePath = Path.Combine(baseDirectory, filename);
if (_existsFunc(possiblePath))
string possibleFile = Path.Combine(baseDirectory, filename);
if (_existsFunc(possibleFile))
return true;
}
catch { }

View File

@@ -75,7 +75,7 @@ namespace MPF.Processors
info = Builder.EnsureAllSections(info);
// Get the dumping program and version
var (dicCmd, dicVersion) = GetCommandFilePathAndVersion(basePath);
var dicVersion = GetCommandFilePathAndVersion(basePath, out var dicCmd);
info.DumpingInfo!.DumpingProgram ??= string.Empty;
info.DumpingInfo.DumpingProgram += $" {dicVersion ?? "Unknown Version"}";
info.DumpingInfo.DumpingDate = ProcessingTool.GetFileModifiedDate(dicCmd)?.ToString("yyyy-MM-dd HH:mm:ss");
@@ -328,12 +328,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -364,12 +367,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -384,12 +390,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -404,12 +413,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -424,12 +436,15 @@ namespace MPF.Processors
if (!string.IsNullOrEmpty(info.Extras.Header))
info.Extras.Header = string.Join("\n", info.Extras.Header.Split('\n').Take(16).ToArray());
if (GetGDROMBuildInfo(info.Extras.Header, out var gdSerial, out var gdVersion, out var gdDate))
if (GetGDROMBuildInfo(info.Extras.Header,
out var serial,
out var version,
out var date))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = gdSerial ?? string.Empty;
info.VersionAndEditions!.Version = gdVersion ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = gdDate ?? string.Empty;
info.CommonDiscInfo!.CommentsSpecialFields![SiteCode.InternalSerialName] = serial ?? string.Empty;
info.VersionAndEditions!.Version = version ?? string.Empty;
info.CommonDiscInfo.EXEDateBuildDate = date ?? string.Empty;
}
}
@@ -480,7 +495,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{
@@ -572,7 +587,7 @@ namespace MPF.Processors
new([$"{baseFilename} (Track 1)(-LBA).sub", $"{baseFilename} (Track 01)(-LBA).sub"], OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"track1_lba_sub"),
new($"{baseFilename} (Track AA).sub", OutputFileFlags.Binary
new([$"{baseFilename} (Track AA).sub", $"{baseFilename} (Lead-out)(Track AA).sub"], OutputFileFlags.Binary
| OutputFileFlags.Zippable,
"track_aa_sub"),
];
@@ -713,12 +728,13 @@ namespace MPF.Processors
/// Get the command file path and extract the version from it
/// </summary>
/// <param name="basePath">Base filename and path to use for checking</param>
/// <returns>Tuple of file path and version as strings, both null on error</returns>
private static (string?, string?) GetCommandFilePathAndVersion(string basePath)
/// <returns>The version as a string, both null on error</returns>
private static string? GetCommandFilePathAndVersion(string basePath, out string? commandPath)
{
// If we have an invalid base path, we can do nothing
commandPath = null;
if (string.IsNullOrEmpty(basePath))
return (null, null);
return null;
// Generate the matching regex based on the base path
string baseFilename = Path.GetFileName(basePath);
@@ -727,17 +743,16 @@ namespace MPF.Processors
// Find the first match for the command file
var parentDirectory = Path.GetDirectoryName(basePath);
if (string.IsNullOrEmpty(parentDirectory))
return (null, null);
return null;
var currentFiles = Directory.GetFiles(parentDirectory);
var commandPath = currentFiles.FirstOrDefault(f => cmdFilenameRegex.IsMatch(f));
if (string.IsNullOrEmpty(commandPath))
return (null, null);
commandPath = currentFiles.FirstOrDefault(f => cmdFilenameRegex.IsMatch(f));
if (string.IsNullOrEmpty(value: commandPath))
return null;
// Extract the version string
var match = cmdFilenameRegex.Match(commandPath);
string version = match.Groups[1].Value;
return (commandPath, version);
return match.Groups[1].Value;
}
#endregion
@@ -1887,15 +1902,36 @@ namespace MPF.Processors
try
{
// Fast forward to the offsets
// Get a list for all found offsets
var offsets = new List<string>();
// Loop over all possible offsets
using var sr = File.OpenText(disc);
while (sr.ReadLine()?.Trim()?.StartsWith("========== Offset") == false) ;
sr.ReadLine(); // Combined Offset
sr.ReadLine(); // Drive Offset
sr.ReadLine(); // Separator line
while (!sr.EndOfStream)
{
// Fast forward to the offsets
while (sr.ReadLine()?.Trim()?.StartsWith("========== Offset") == false) ;
if (sr.EndOfStream)
break;
sr.ReadLine(); // Combined Offset
sr.ReadLine(); // Drive Offset
sr.ReadLine(); // Separator line
// Now that we're at the offsets, attempt to get the sample offset
string offset = sr.ReadLine()?.Split(' ')?.LastOrDefault() ?? string.Empty;
offsets.Add(offset);
}
// Deduplicate the offsets
offsets = offsets
.Where(s => !string.IsNullOrEmpty(s))
.Distinct()
.ToList();
// Now that we're at the offsets, attempt to get the sample offset
return sr.ReadLine()?.Split(' ')?.LastOrDefault();
return string.Join("; ", [.. offsets]);
}
catch
{

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
@@ -48,20 +48,16 @@
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="System.IO.Compression" Version="4.3.0" />
</ItemGroup>
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`)) AND !$(TargetFramework.StartsWith(`net40`))">
<PackageReference Include="IndexRange" Version="1.0.3" />
</ItemGroup>
<ItemGroup Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))">
<PackageReference Include="System.IO.Compression.ZipFile" Version="4.3.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="psxt001z.Library" Version="0.21.0-rc1" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.6.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.2" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
<PackageReference Include="SabreTools.Serialization" Version="1.6.9" />
</ItemGroup>
</Project>

View File

@@ -195,12 +195,10 @@ namespace MPF.Processors
/// <param name="baseDirectory">Base directory to check in</param>
public virtual bool Exists(string baseDirectory)
{
// If the base directory is invalid
if (string.IsNullOrEmpty(baseDirectory))
return false;
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
foreach (string filename in Filenames)
{
// Check for invalid filenames
@@ -209,8 +207,8 @@ namespace MPF.Processors
try
{
string possiblePath = Path.Combine(baseDirectory, filename);
if (File.Exists(possiblePath))
string possibleFile = Path.Combine(baseDirectory, filename);
if (File.Exists(possibleFile))
return true;
}
catch { }

View File

@@ -76,7 +76,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{

View File

@@ -361,7 +361,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{
@@ -385,7 +385,7 @@ namespace MPF.Processors
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile($"{baseFilename}.log", OutputFileFlags.Required,
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new($"{baseFilename}.pma", OutputFileFlags.Binary
| OutputFileFlags.Zippable,
@@ -406,8 +406,15 @@ namespace MPF.Processors
"toc"),
];
// Get the base path for cuesheet reading
string basePath;
if (string.IsNullOrEmpty(baseDirectory))
basePath = baseFilename;
else
basePath = Path.Combine(baseDirectory, baseFilename);
// Include .hash and .skeleton for all files in cuesheet
var cueSheet = SabreTools.Serialization.Deserializers.CueSheet.DeserializeFile($"{baseFilename}.cue");
var cueSheet = SabreTools.Serialization.Deserializers.CueSheet.DeserializeFile($"{basePath}.cue");
if (cueSheet?.Files != null)
{
int trackId = 1;
@@ -450,7 +457,7 @@ namespace MPF.Processors
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile($"{baseFilename}.log", OutputFileFlags.Required,
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new([$"{baseFilename}.manufacturer", $"{baseFilename}.0.manufacturer"], OutputFileFlags.Required
| OutputFileFlags.Binary
@@ -501,7 +508,7 @@ namespace MPF.Processors
| OutputFileFlags.Artifact
| OutputFileFlags.Zippable,
"log"),
new CustomOutputFile($"{baseFilename}.log", OutputFileFlags.Required,
new CustomOutputFile([$"{baseFilename}.dat", $"{baseFilename}.log"], OutputFileFlags.Required,
DatfileExists),
new([$"{baseFilename}.physical", $"{baseFilename}.0.physical"], OutputFileFlags.Required
| OutputFileFlags.Binary
@@ -777,15 +784,16 @@ namespace MPF.Processors
/// <returns>True if error counts could be retrieved, false otherwise</returns>
private static bool GetErrorCount(string log, out long redumpErrors, out long c2Errors)
{
// Set the default values for error counts
redumpErrors = -1; c2Errors = -1;
// If the file doesn't exist, we can't get info from it
if (!File.Exists(log))
{
redumpErrors = -1; c2Errors = -1;
return false;
}
try
{
redumpErrors = 0; c2Errors = 0;
using var sr = File.OpenText(log);
// Find the error counts
@@ -799,7 +807,9 @@ namespace MPF.Processors
if (line.StartsWith("C2:"))
{
string[] parts = line.Split(' ');
if (!long.TryParse(parts[1], out c2Errors))
if (long.TryParse(parts[1], out long c2TrackErrors))
c2Errors += c2TrackErrors;
else
c2Errors = -1;
}
@@ -807,17 +817,24 @@ namespace MPF.Processors
else if (line.StartsWith("REDUMP.ORG errors:"))
{
string[] parts = line!.Split(' ');
if (!long.TryParse(parts[2], out redumpErrors))
if (long.TryParse(parts[2], out long redumpTrackErrors))
redumpErrors += redumpTrackErrors;
else
redumpErrors = -1;
}
// If either value is -1, exit the loop
if (c2Errors == -1 || redumpErrors == -1)
break;
}
// If the Redump error count is -1, then an issue occurred
return redumpErrors != -1;
// If either error count is -1, then an issue occurred
return c2Errors != -1 && redumpErrors != -1;
}
catch
{
// We don't care what the exception is right now
redumpErrors = -1; c2Errors = -1;
return false;
}
}
@@ -855,6 +872,10 @@ namespace MPF.Processors
{
buildDate = line.Substring("build date: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line.StartsWith("serial:"))
{
serial = line.Substring("serial: ".Length).Trim();
@@ -867,17 +888,13 @@ namespace MPF.Processors
{
region = line.Substring("regions: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line.StartsWith("header:"))
{
line = sr.ReadLine()?.TrimStart();
while (line?.StartsWith("00") == true)
{
headerString += line + "\n";
line = sr.ReadLine()?.Trim();
line = sr.ReadLine()?.TrimStart();
}
}
else
@@ -1435,6 +1452,10 @@ namespace MPF.Processors
{
buildDate = line.Substring("build date: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line.StartsWith("serial:"))
{
serial = line.Substring("serial: ".Length).Trim();
@@ -1447,17 +1468,13 @@ namespace MPF.Processors
{
region = line.Substring("regions: ".Length).Trim();
}
else if (line.StartsWith("version:"))
{
version = line.Substring("version: ".Length).Trim();
}
else if (line?.StartsWith("header:") == true)
{
line = sr.ReadLine()?.TrimStart();
while (line?.StartsWith("00") == true)
{
headerString += line + "\n";
line = sr.ReadLine()?.Trim();
line = sr.ReadLine()?.TrimStart();
}
}
else

View File

@@ -47,12 +47,10 @@ namespace MPF.Processors
/// <inheritdoc/>
public override bool Exists(string baseDirectory)
{
// If the base directory is invalid
if (string.IsNullOrEmpty(baseDirectory))
return false;
// Ensure the directory exists
if (!Directory.Exists(baseDirectory))
return false;
// Get list of all files in directory
var directoryFiles = Directory.GetFiles(baseDirectory);
foreach (string file in directoryFiles)

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Logiqx;
using SabreTools.RedumpLib;
@@ -78,7 +77,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{

View File

@@ -150,7 +150,7 @@ namespace MPF.Processors
}
/// <inheritdoc/>
internal override List<OutputFile> GetOutputFiles(string baseFilename)
internal override List<OutputFile> GetOutputFiles(string? baseDirectory, string baseFilename)
{
switch (Type)
{

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
@@ -15,22 +15,22 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0-release-24177-07" />
<PackageReference Include="Microsoft.CodeCoverage" Version="17.11.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.13.0" />
<PackageReference Include="xunit.assert" Version="2.8.0" />
<PackageReference Include="xunit.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.core" Version="2.8.0" />
<PackageReference Include="xunit.extensibility.execution" Version="2.8.0" />
<PackageReference Include="xunit.runner.console" Version="2.8.0">
<PackageReference Include="xunit.analyzers" Version="1.16.0" />
<PackageReference Include="xunit.assert" Version="2.9.2" />
<PackageReference Include="xunit.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.2" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.2" />
<PackageReference Include="xunit.runner.console" Version="2.9.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<PropertyGroup>
<!-- Assembly Properties -->
@@ -16,7 +16,7 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseWindowsForms>true</UseWindowsForms>
<UseWPF>true</UseWPF>
<VersionPrefix>3.2.2</VersionPrefix>
<VersionPrefix>3.2.3</VersionPrefix>
<!-- Package Properties -->
<AssemblyName>MPF</AssemblyName>
@@ -37,8 +37,7 @@
<ReferenceWpfLunaTheme>false</ReferenceWpfLunaTheme>
<ReferenceWpfRoyaleTheme>false</ReferenceWpfRoyaleTheme>
</PropertyGroup>
<PropertyGroup
Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`)) OR $(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`)) OR $(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<ReferenceWpfAeroTheme>false</ReferenceWpfAeroTheme>
<ReferenceWpfAero2Theme>true</ReferenceWpfAero2Theme>
<ReferenceWpfAeroLiteTheme>false</ReferenceWpfAeroLiteTheme>
@@ -73,10 +72,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.13" GeneratePathProperty="true">
<PackageReference Include="BinaryObjectScanner" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="3.1.16" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.1" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.4.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -181,7 +181,7 @@ namespace MPF.UI.Windows
/// </summary>
/// <param name="submissionInfo">SubmissionInfo object to display and possibly change</param>
/// <returns>Dialog open result</returns>
public (bool?, SubmissionInfo?) ShowDiscInformationWindow(SubmissionInfo? submissionInfo)
public bool? ShowDiscInformationWindow(ref SubmissionInfo? submissionInfo)
{
var discInformationWindow = new DiscInformationWindow(CheckDumpViewModel.Options, submissionInfo)
{
@@ -199,7 +199,7 @@ namespace MPF.UI.Windows
if (result == true)
submissionInfo = (discInformationWindow.DiscInformationViewModel.SubmissionInfo.Clone() as SubmissionInfo)!;
return (result, submissionInfo!);
return result;
}
#endregion

View File

@@ -237,7 +237,7 @@ namespace MPF.UI.Windows
/// <param name="showIfSame">True to show the box even if it's the same, false to only show if it's different</param>
public void CheckForUpdates(bool showIfSame)
{
(bool different, string message, var url) = MainViewModel.CheckForUpdates();
MainViewModel.CheckForUpdates(out bool different, out string message, out var url);
// If we have a new version, put it in the clipboard
if (different && !string.IsNullOrEmpty(url))
@@ -287,8 +287,8 @@ namespace MPF.UI.Windows
public void ShowDebugDiscInfoWindow()
{
var submissionInfo = MainViewModel.CreateDebugSubmissionInfo();
var result = ShowDiscInformationWindow(submissionInfo);
Formatter.ProcessSpecialFields(result.Item2);
_ = ShowDiscInformationWindow(ref submissionInfo);
Formatter.ProcessSpecialFields(submissionInfo);
}
/// <summary>
@@ -296,7 +296,7 @@ namespace MPF.UI.Windows
/// </summary>
/// <param name="submissionInfo">SubmissionInfo object to display and possibly change</param>
/// <returns>Dialog open result</returns>
public (bool?, SubmissionInfo?) ShowDiscInformationWindow(SubmissionInfo? submissionInfo)
public bool? ShowDiscInformationWindow(ref SubmissionInfo? submissionInfo)
{
if (MainViewModel.Options.ShowDiscEjectReminder)
CustomMessageBox.Show(this, "It is now safe to eject the disc", "Eject", MessageBoxButton.OK, MessageBoxImage.Information);
@@ -317,7 +317,7 @@ namespace MPF.UI.Windows
if (result == true)
submissionInfo = (discInformationWindow.DiscInformationViewModel.SubmissionInfo.Clone() as SubmissionInfo)!;
return (result, submissionInfo!);
return result;
}
/// <summary>
@@ -337,7 +337,8 @@ namespace MPF.UI.Windows
WindowStartupLocation = WindowStartupLocation.CenterOwner,
};
checkDumpWindow.Closed += delegate {
checkDumpWindow.Closed += delegate
{
// Unhide Main window after Check window has been closed
this.Show();
this.Activate();
@@ -362,7 +363,8 @@ namespace MPF.UI.Windows
WindowStartupLocation = WindowStartupLocation.CenterOwner,
};
createIRDWindow.Closed += delegate {
createIRDWindow.Closed += delegate
{
// Unhide Main window after Create IRD window has been closed
this.Show();
this.Activate();
@@ -540,14 +542,14 @@ namespace MPF.UI.Windows
#endif
{
#if NET40
var (output, error) = MainViewModel.ScanAndShowProtection();
var output = MainViewModel.ScanAndShowProtection();
#else
var (output, error) = await MainViewModel.ScanAndShowProtection();
var output = await MainViewModel.ScanAndShowProtection();
#endif
if (!MainViewModel.LogPanelExpanded)
{
if (!string.IsNullOrEmpty(output) && string.IsNullOrEmpty(error))
if (!string.IsNullOrEmpty(output))
CustomMessageBox.Show(this, output, "Detected Protection(s)", MessageBoxButton.OK, MessageBoxImage.Information);
else
CustomMessageBox.Show(this, "An exception occurred, see the log for details", "Error!", MessageBoxButton.OK, MessageBoxImage.Error);

View File

@@ -6,6 +6,7 @@ using System.Windows.Controls;
using System.Windows.Forms;
using MPF.Frontend;
using MPF.Frontend.ViewModels;
using SabreTools.RedumpLib.Web;
using WPFCustomMessageBox;
#pragma warning disable IDE1006 // Naming Styles
@@ -201,10 +202,11 @@ namespace MPF.UI.Windows
private async Task ValidateRedumpCredentials()
{
#if NET35
(bool? success, string? message) = await OptionsViewModel.TestRedumpLogin(_RedumpUsernameTextBox!.Text, _RedumpPasswordBox!.Password);
bool? success = await RedumpClient.ValidateCredentials(_RedumpUsernameTextBox!.Text, _RedumpPasswordBox!.Password);
#else
(bool? success, string? message) = await OptionsViewModel.TestRedumpLogin(RedumpUsernameTextBox.Text, RedumpPasswordBox.Password);
bool? success = await RedumpClient.ValidateCredentials(RedumpUsernameTextBox.Text, RedumpPasswordBox.Password);
#endif
string message = OptionsViewModel.GetRedumpLoginResult(success);
if (success == true)
CustomMessageBox.Show(this, message, "Success", MessageBoxButton.OK, MessageBoxImage.Information);

View File

@@ -1,5 +1,5 @@
# version format
version: 3.2.2-{build}
version: 3.2.3-{build}
# pull request template
pull_requests: