Compare commits

..

40 Commits
2.6.3 ... 2.6.4

Author SHA1 Message Date
Matt Nadareski
def499769f Bump version 2023-09-25 22:01:33 -04:00
Matt Nadareski
5e1e61a441 Fix tests that have been broken for a while 2023-09-25 21:53:34 -04:00
Matt Nadareski
4896a12ec5 Add .NET 7 as a build target (not AppVeyor) 2023-09-25 21:49:49 -04:00
Matt Nadareski
b28ad5d3cd Move MainWindow 2023-09-25 21:39:28 -04:00
Matt Nadareski
4cdbbcedf0 Move options into MainViewModel 2023-09-25 21:29:55 -04:00
Matt Nadareski
6ab63ba651 Make logger a local reference 2023-09-25 21:21:51 -04:00
Matt Nadareski
d36c5099f3 Set parent on MainViewModel init 2023-09-25 21:16:43 -04:00
Matt Nadareski
8ba8347a0c Make Options non-cloneable 2023-09-25 21:11:14 -04:00
Matt Nadareski
4c5184eac5 More decoupling App 2023-09-25 21:03:29 -04:00
Matt Nadareski
9914b94716 Fix build 2023-09-25 20:55:52 -04:00
Matt Nadareski
150e69eca5 Begin decoupling App 2023-09-25 16:58:14 -04:00
Matt Nadareski
f09923974b Move OptionsWindow 2023-09-25 15:54:18 -04:00
Matt Nadareski
d98bc28930 Move to csproj tag for internals 2023-09-25 15:47:27 -04:00
Matt Nadareski
ee4a7ab653 Move LogOutput 2023-09-25 15:43:01 -04:00
Matt Nadareski
6b20aee320 Remove log formatting code 2023-09-25 15:38:33 -04:00
Matt Nadareski
a5849325f3 Remove App references from LogViewModel 2023-09-25 15:36:50 -04:00
Matt Nadareski
86d2d83fbe Remove EnableLogFormatting 2023-09-25 15:27:25 -04:00
Matt Nadareski
308e0d6937 Remove EnableProgressProcessing 2023-09-25 15:25:45 -04:00
Matt Nadareski
3541bca2d0 Move WPFCustomMessageBox 2023-09-25 15:18:46 -04:00
Matt Nadareski
2496099532 Move Constants 2023-09-25 15:17:45 -04:00
Matt Nadareski
6001395181 Move RedumpSystemComboBoxItem 2023-09-25 15:12:05 -04:00
Matt Nadareski
fa2192a284 Not building against .NET Standard 2023-09-25 15:09:21 -04:00
Matt Nadareski
f09155936e Update changelog 2023-09-18 15:40:01 -04:00
Markus Persson
a07fca6a7b Fix timestamp (#551)
Timestamp used 12-hour clock. Fixed to 24-hour.
2023-09-18 12:36:29 -07:00
Matt Nadareski
5fe7a1dac8 Migrate to Nuget package for PIC 2023-09-13 16:50:13 -04:00
Matt Nadareski
16ed2f9595 Migrate to Nuget package for cuesheets 2023-09-13 16:36:51 -04:00
Matt Nadareski
9004d2bc7b Migrate to Nuget package for XMID 2023-09-13 16:16:47 -04:00
Matt Nadareski
cc98b38290 Remove dd for Windows (fixes #544) 2023-09-07 10:36:33 -04:00
Matt Nadareski
18ef0cddff Merge branch 'master' of https://github.com/SabreTools/MPF 2023-09-05 09:26:53 -04:00
Deterous
d9ca55d96c Fix serial parsing from #542 (#549)
Serial numbers begin at serial[5]
2023-09-05 06:26:31 -07:00
Matt Nadareski
816c94de58 Migrate to Nuget package for Redump 2023-09-05 00:08:09 -04:00
Matt Nadareski
c2ba58148a Retrofit README 2023-09-02 15:24:03 -04:00
Matt Nadareski
d9533f2448 Update changelog 2023-08-31 11:14:41 -04:00
Deterous
5126d1854c Fix PlayStation serial code region parsing (#542)
* Fix PlayStation serial code region parsing

- Do not assume S_J_ and S_P_ serials are Japanese
- Fix typo: PABX -> PBPX
- PCXC are Japanese
- PUBX are USA

* Remove S_J_ serial case

* Make S_P_ serial case comment clearer

* Retain PABX serial just in case

* Parse S_P_ serials to detect Asia/Korea

* C# style substring

* String literals
2023-08-31 08:10:42 -07:00
Matt Nadareski
6baaf132a7 Fix dumping path in DD 2023-08-29 21:07:01 -04:00
Matt Nadareski
98a30e6558 Swap order of operations for changing program 2023-08-28 12:16:53 -04:00
Matt Nadareski
0912b78568 Handle extension changing only 2023-08-28 11:24:58 -04:00
Matt Nadareski
d92a1d566d Add helper for changing dumping program from UI 2023-08-26 23:17:49 -04:00
Matt Nadareski
ff40d18ed3 Fix speed setting in Aaru (fixes #539) 2023-08-26 23:10:17 -04:00
Matt Nadareski
4d9cd85ba6 Add CD Projekt ID field (fixes #530) 2023-08-26 22:57:44 -04:00
93 changed files with 1144 additions and 12579 deletions

View File

@@ -1,3 +1,41 @@
### 2.6.4 (2023-09-25)
- Add CD Projekt ID field
- Fix speed setting in Aaru
- Add helper for changing dumping program from UI
- Handle extension changing only
- Swap order of operations for changing program
- Fix dumping path in DD
- Fix PlayStation serial code region parsing (Deterous)
- Retrofit README
- Migrate to Nuget package for Redump
- Remove dd for Windows
- Migrate to Nuget package for XMID
- Migrate to Nuget package for cuesheets
- Migrate to Nuget package for PIC
- Fix timestamp (Ragowit)
- Not building against .NET Standard
- Move RedumpSystemComboBoxItem
- Move Constants
- Move WPFCustomMessageBox
- Remove EnableProgressProcessing
- Remove EnableLogFormatting
- Remove App references from LogViewModel
- Remove log formatting code
- Move LogOutput
- Move to csproj tag for internals
- Move OptionsWindow
- Begin decoupling App
- Fix build
- More decoupling App
- Make Options non-cloneable
- Set parent on MainViewModel init
- Make logger a local reference
- Move options into MainViewModel
- Move MainWindow
- Add .NET 7 as a build target (not AppVeyor)
- Fix tests that have been broken for a while
### 2.6.3 (2023-08-15)
- Update redumper to build 195
@@ -824,7 +862,7 @@
- edccchk now run on all CD-Roms
- Discs unsupported by Windows are now regonized
- Extra \ when accepting default save has been removed.
### 1.02b (2018-05-18)
- Added missing DLL
@@ -837,4 +875,4 @@
### 1.01d (2018-05-18)
-Combine IBM PC-CD options, misc fixes.
-Combine IBM PC-CD options, misc fixes.

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<OutputType>Exe</OutputType>
<Title>MPF Check</Title>
@@ -10,9 +10,7 @@
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
@@ -27,10 +25,16 @@
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BurnOutSharp" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="2.8.0" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
@@ -41,10 +45,4 @@
<Content Include="$(PkgBurnOutSharp)\content\**" PackagePath="contentFiles\any\any;content" CopyToOutputDirectory="Always" PackageCopyToOutput="true" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -5,8 +5,8 @@ using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Library;
using RedumpLib.Data;
using RedumpLib.Web;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace MPF.Check
{
@@ -38,7 +38,7 @@ namespace MPF.Check
protectionProgress.ProgressChanged += ProgressUpdated;
// Validate the supplied credentials
#if NET48 || NETSTANDARD2_1
#if NET48
(bool? _, string message) = RedumpWebClient.ValidateCredentials(options?.RedumpUsername, options?.RedumpPassword);
#else
(bool? _, string message) = RedumpHttpClient.ValidateCredentials(options?.RedumpUsername, options?.RedumpPassword).ConfigureAwait(false).GetAwaiter().GetResult();

View File

@@ -6,7 +6,7 @@ using System.Reflection;
using IMAPI2;
#endif
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Converters
{
@@ -98,7 +98,7 @@ namespace MPF.Core.Converters
if (!LongNameMethods.TryGetValue(sourceType, out MethodInfo method))
{
method = typeof(RedumpLib.Data.Extensions).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });
method = typeof(SabreTools.RedumpLib.Data.Extensions).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });
if (method == null)
method = typeof(EnumConverter).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });
@@ -130,8 +130,6 @@ namespace MPF.Core.Converters
case InternalProgram.Aaru:
return "Aaru";
case InternalProgram.DD:
return "dd";
case InternalProgram.DiscImageCreator:
return "DiscImageCreator";
case InternalProgram.Redumper:
@@ -182,8 +180,6 @@ namespace MPF.Core.Converters
case "dicreator":
case "discimagecreator":
return InternalProgram.DiscImageCreator;
case "dd":
return InternalProgram.DD;
case "rd":
case "redumper":
return InternalProgram.Redumper;

View File

@@ -1,6 +1,6 @@
using System.Collections.Generic;
using System.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{

View File

@@ -6,7 +6,7 @@ using Microsoft.Management.Infrastructure;
using Microsoft.Management.Infrastructure.Generic;
using MPF.Core.Converters;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
#if NETFRAMEWORK
using IMAPI2;
#endif

View File

@@ -20,7 +20,6 @@
// Dumping support
Aaru,
DD,
DiscImageCreator,
Redumper,

View File

@@ -2,13 +2,13 @@
using System.Collections;
using System.Collections.Generic;
using MPF.Core.Converters;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{
public class Options : IDictionary<string, string>, ICloneable
public class Options : IDictionary<string, string>
{
private readonly Dictionary<string, string> _settings;
private Dictionary<string, string> _settings;
#region Internal Program
@@ -30,15 +30,6 @@ namespace MPF.Core.Data
set { _settings["DiscImageCreatorPath"] = value; }
}
/// <summary>
/// Path to dd for Windows
/// </summary>
public string DDPath
{
get { return GetStringSetting(_settings, "DDPath", "Programs\\DD\\dd.exe"); }
set { _settings["DDPath"] = value; }
}
/// <summary>
/// Path to Redumper
/// </summary>
@@ -529,30 +520,6 @@ namespace MPF.Core.Data
set { _settings["OpenLogWindowAtStartup"] = value.ToString(); }
}
/// <summary>
/// Enable fancy formatting of log statements
/// Disables EnableProgressProcessing if disabled
/// </summary>
/// <remarks>
/// This is mainly for outputting redirected console outputs. Not many
/// other bits of the logs include any specially handled outputs.
/// </remarks>
public bool EnableLogFormatting
{
get { return GetBooleanSetting(_settings, "EnableLogFormatting", false); }
set { _settings["EnableLogFormatting"] = value.ToString(); }
}
/// <summary>
/// Enable progress bar updating based on log text
/// Disabled if EnableLogFormatting is disabled
/// </summary>
public bool EnableProgressProcessing
{
get { return GetBooleanSetting(_settings, "EnableProgressProcessing", false); }
set { _settings["EnableProgressProcessing"] = value.ToString(); }
}
#endregion
#region Redump Login Information
@@ -587,11 +554,21 @@ namespace MPF.Core.Data
}
/// <summary>
/// Create a clone of the object
/// Constructor taking an existing Options object
/// </summary>
public object Clone()
/// <param name="source"></param>
public Options(Options source)
{
return new Options(new Dictionary<string, string>(_settings));
_settings = new Dictionary<string, string>(source._settings);
}
/// <summary>
/// Set all fields from an existing Options object
/// </summary>
/// <param name="source"></param>
public void SetFromExisting(Options source)
{
_settings = new Dictionary<string, string>(source._settings);
}
#region Helpers

View File

@@ -1,38 +0,0 @@
namespace MPF.Core.Data
{
/// <summary>
/// Disc Information and Emergency Brake data shall be read from the PIC zone. DI units that
/// contain physical information shall be returned.Emergency Brake data shall be returned.The
/// information shall be collected from the layer specified in the Layer field of the CDB. If any data
/// can be returned, 4 100 bytes shall be returned.
/// </summary>
/// <see href="https://www.t10.org/ftp/t10/document.05/05-206r0.pdf"/>
/// <see href="https://github.com/aaru-dps/Aaru.Decoders/blob/devel/Bluray/DI.cs"/>
public class PICDiscInformation
{
#region Fields
/// <summary>
/// 2048 bytes for BD-ROM, 3584 bytes for BD-R/RE
/// </summary>
/// <remarks>Big-endian format</remarks>
public ushort DataStructureLength { get; set; }
/// <summary>
/// Should be 0x00
/// </summary>
public byte Reserved0 { get; set; }
/// <summary>
/// Should be 0x00
/// </summary>
public byte Reserved1 { get; set; }
/// <summary>
/// Disc information and emergency brake units
/// </summary>
public PICDiscInformationUnit[] Units { get; set; }
#endregion
}
}

View File

@@ -1,114 +0,0 @@
namespace MPF.Core.Data
{
/// <see href="https://www.t10.org/ftp/t10/document.05/05-206r0.pdf"/>
/// <see href="https://github.com/aaru-dps/Aaru.Decoders/blob/devel/Bluray/DI.cs"/>
public class PICDiscInformationUnit
{
#region Fields
#region Header
/// <summary>
/// Disc Information Identifier "DI"
/// Emergency Brake Identifier "EB"
/// </summary>
public string DiscInformationIdentifier { get; set; }
/// <summary>
/// Disc Information Format
/// </summary>
public byte DiscInformationFormat { get; set; }
/// <summary>
/// Number of DI units in each DI block
/// </summary>
public byte NumberOfUnitsInBlock { get; set; }
/// <summary>
/// Should be 0x00
/// </summary>
public byte Reserved0 { get; set; }
/// <summary>
/// DI unit Sequence Number
/// </summary>
public byte SequenceNumber { get; set; }
/// <summary>
/// Number of bytes in use in this DI unit
/// </summary>
public byte BytesInUse { get; set; }
/// <summary>
/// Should be 0x00
/// </summary>
public byte Reserved1 { get; set; }
#endregion
// TODO: Write models for the dependent contents, if possible
#region Body
/// <summary>
/// Disc Type Identifier
/// = "BDO" for BD-ROM
/// = "BDU" for BD-ROM Ultra
/// = "BDW" for BD-RE
/// = "BDR" for BD-R
/// </summary>
public string DiscTypeIdentifier { get; set; }
/// <summary>
/// Disc Size/Class/Version
/// </summary>
public byte DiscSizeClassVersion { get; set; }
/// <summary>
/// DI Unit Format dependent contents
/// </summary>
/// <remarks>52 bytes for BD-ROM, 100 bytes for BD-R/RE</remarks>
public byte[] FormatDependentContents { get; set; }
#endregion
#region Trailer (BD-R/RE only)
/// <summary>
/// Disc Manufacturer ID
/// </summary>
/// <remarks>6 bytes</remarks>
public byte[] DiscManufacturerID { get; set; }
/// <summary>
/// Media Type ID
/// </summary>
/// <remarks>3 bytes</remarks>
public byte[] MediaTypeID { get; set; }
/// <summary>
/// Time Stamp
/// </summary>
public ushort TimeStamp { get; set; }
/// <summary>
/// Product Revision Number
/// </summary>
public byte ProductRevisionNumber { get; set; }
#endregion
#endregion
#region Constants
public const string DiscTypeIdentifierROM = "BDO";
public const string DiscTypeIdentifierROMUltra = "BDU";
public const string DiscTypeIdentifierReWritable = "BDW";
public const string DiscTypeIdentifierRecordable = "BDR";
#endregion
}
}

View File

@@ -1,32 +1,10 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{
/// <summary>
/// Contains information specific to an XGD disc
/// </summary>
/// <remarks>
/// XGD1 XMID Format Information:
///
/// AABBBCCD
/// - AA => The two-ASCII-character publisher identifier (see GetPublisher for details)
/// - BBB => Game ID
/// - CC => Version number
/// - D => Region identifier (see GetRegion for details)
///
/// XGD2/3 XeMID Format Information:
///
/// AABCCCDDEFFGHH(IIIIIIII)
/// - AA => The two-ASCII-character publisher identifier (see GetPublisher for details)
/// - B => Platform identifier; 2 indicates Xbox 360.
/// - CCC => Game ID
/// - DD => SKU number (unique per SKU of a title)
/// - E => Region identifier (see GetRegion for details)
/// - FF => Base version; usually starts at 01 (can be 1 or 2 characters)
/// - G => Media type identifier (see GetMediaSubtype for details)
/// - HH => Disc number stored in [disc number][total discs] format
/// - IIIIIIII => 8-hex-digit certification submission identifier; usually on test discs only
/// </remarks>
public class XgdInfo
{
#region Fields
@@ -39,75 +17,17 @@ namespace MPF.Core.Data
/// <summary>
/// Raw XMID/XeMID string that all other information is derived from
/// </summary>
public string XMID { get; private set; }
public string RawXMID { get; private set; }
/// <summary>
/// 2-character publisher identifier
/// XGD1 XMID
/// </summary>
public string PublisherIdentifier { get; private set; }
public SabreTools.Models.Xbox.XMID XMID { get; private set; }
/// <summary>
/// Platform disc is made for, 2 indicates Xbox 360
/// XGD2/3 XeMID
/// </summary>
public char? PlatformIdentifier { get; private set; }
/// <summary>
/// Game ID
/// </summary>
public string GameID { get; private set; }
/// <summary>
/// For XGD1: Internal version number
/// For XGD2/3: Title-specific SKU
/// </summary>
public string SKU { get; private set; }
/// <summary>
/// Region identifier character
/// </summary>
public char RegionIdentifier { get; private set; }
/// <summary>
/// Base version of executables, usually starts at 01
/// </summary>
/// <remarks>
/// TODO: Check if this is always 2 characters for XGD2/3
/// </remarks>
public string BaseVersion { get; private set; }
/// <summary>
/// Media subtype identifier
/// </summary>
public char MediaSubtypeIdentifier { get; private set; }
/// <summary>
/// Disc number stored in [disc number][total discs] format
/// </summary>
public string DiscNumberIdentifier { get; private set; }
/// <summary>
/// 8-hex-digit certification submission identifier; usually on test discs only
/// </summary>
public string CertificationSubmissionIdentifier { get; private set; }
#endregion
#region Auto-Generated Information
/// <summary>
/// Human-readable name derived from the publisher identifier
/// </summary>
public string PublisherName => GetPublisher(this.PublisherIdentifier);
/// <summary>
/// Internally represented region
/// </summary>
public Region? InternalRegion => GetRegion(this.RegionIdentifier);
/// <summary>
/// Human-readable subtype derived from the media identifier
/// </summary>
public string MediaSubtype => GetMediaSubtype(this.MediaSubtypeIdentifier);
public SabreTools.Models.Xbox.XeMID XeMID { get; private set; }
#endregion
@@ -115,24 +35,23 @@ namespace MPF.Core.Data
/// Populate a set of XGD information from a Master ID (XMID/XeMID) string
/// </summary>
/// <param name="xmid">XMID/XeMID string representing the DMI information</param>
/// <param name="validate">True if value validation should be performed, false otherwise</param>
public XgdInfo(string xmid, bool validate = false)
public XgdInfo(string xmid)
{
this.Initialized = false;
if (string.IsNullOrWhiteSpace(xmid))
return;
this.XMID = xmid.TrimEnd('\0');
if (string.IsNullOrWhiteSpace(this.XMID))
this.RawXMID = xmid.TrimEnd('\0');
if (string.IsNullOrWhiteSpace(this.RawXMID))
return;
// XGD1 information is 8 characters
if (this.XMID.Length == 8)
this.Initialized = ParseXGD1XMID(this.XMID, validate);
if (this.RawXMID.Length == 8)
this.Initialized = ParseXGD1XMID(this.RawXMID);
// XGD2/3 information is semi-variable length
else if (this.XMID.Length == 13 || this.XMID.Length == 14 || this.XMID.Length == 21 || this.XMID.Length == 22)
this.Initialized = ParseXGD23XeMID(this.XMID, validate);
else if (this.RawXMID.Length == 13 || this.RawXMID.Length == 14 || this.RawXMID.Length == 21 || this.RawXMID.Length == 22)
this.Initialized = ParseXGD23XeMID(this.RawXMID);
}
/// <summary>
@@ -147,12 +66,12 @@ namespace MPF.Core.Data
try
{
// XGD1 doesn't use PlatformIdentifier
if (this.PlatformIdentifier == null)
return $"{this.PublisherIdentifier}-{this.GameID}";
if (XMID != null)
return $"{XMID.PublisherIdentifier}-{XMID.GameID}";
// XGD2/3 uses a specific identifier
else if (this.PlatformIdentifier == '2')
return $"{this.PublisherIdentifier}-{this.PlatformIdentifier}{this.GameID}";
else if (XeMID?.PlatformIdentifier == '2')
return $"{XeMID.PublisherIdentifier}-{XeMID.PlatformIdentifier}{XeMID.GameID}";
return null;
}
@@ -175,12 +94,12 @@ namespace MPF.Core.Data
try
{
// XGD1 doesn't use PlatformIdentifier
if (this.PlatformIdentifier == null)
return $"1.{this.SKU}";
if (XMID != null)
return $"1.{XMID.VersionNumber}";
// XGD2/3 uses a specific identifier
else if (this.PlatformIdentifier == '2')
return $"1.{this.SKU}";
else if (XeMID?.PlatformIdentifier == '2')
return $"1.{XeMID.SKU}";
return null;
}
@@ -193,217 +112,55 @@ namespace MPF.Core.Data
/// <summary>
/// Parse an XGD1 XMID string
/// </summary>
/// <param name="xmid">XMID string to attempt to parse</param>
/// <param name="validate">True if value validation should be performed, false otherwise</param>
/// <param name="rawXmid">XMID string to attempt to parse</param>
/// <returns>True if the XMID could be parsed, false otherwise</returns>
private bool ParseXGD1XMID(string xmid, bool validate)
private bool ParseXGD1XMID(string rawXmid)
{
if (xmid == null || xmid.Length != 8)
return false;
try
{
var xmid = new SabreTools.Serialization.Files.XMID().Deserialize(rawXmid);
if (xmid == null)
return false;
this.PublisherIdentifier = xmid.Substring(0, 2);
if (validate && string.IsNullOrEmpty(this.PublisherName))
this.XMID = xmid;
return true;
}
catch
{
return false;
this.GameID = xmid.Substring(2, 3);
this.SKU = xmid.Substring(5, 2);
this.RegionIdentifier = xmid[7];
if (validate && this.InternalRegion == null)
return false;
return true;
}
}
/// <summary>
/// Parse an XGD2/3 XeMID string
/// </summary>
/// <param name="xemid">XeMID string to attempt to parse</param>
/// <param name="validate">True if value validation should be performed, false otherwise</param>
/// <param name="rawXemid">XeMID string to attempt to parse</param>
/// <returns>True if the XeMID could be parsed, false otherwise</returns>
private bool ParseXGD23XeMID(string xemid, bool validate)
private bool ParseXGD23XeMID(string rawXemid)
{
if (xemid == null
|| (xemid.Length != 13 && xemid.Length != 14
&& xemid.Length != 21 && xemid.Length != 22))
return false;
this.PublisherIdentifier = xemid.Substring(0, 2);
if (validate && string.IsNullOrEmpty(this.PublisherName))
return false;
this.PlatformIdentifier = xemid[2];
if (validate && this.PlatformIdentifier != '2')
return false;
this.GameID = xemid.Substring(3, 3);
this.SKU = xemid.Substring(6, 2);
this.RegionIdentifier = xemid[8];
if (validate && this.InternalRegion == null)
return false;
if (xemid.Length == 13 || xemid.Length == 21)
try
{
this.BaseVersion = xemid.Substring(9, 1);
this.MediaSubtypeIdentifier = xemid[10];
if (validate && string.IsNullOrEmpty(this.MediaSubtype))
var xemid = new SabreTools.Serialization.Files.XeMID().Deserialize(rawXemid);
if (xemid == null)
return false;
this.DiscNumberIdentifier = xemid.Substring(11, 2);
this.XeMID = xemid;
return true;
}
else if (xemid.Length == 14 || xemid.Length == 22)
catch
{
this.BaseVersion = xemid.Substring(9, 2);
this.MediaSubtypeIdentifier = xemid[11];
if (validate && string.IsNullOrEmpty(this.MediaSubtype))
return false;
this.DiscNumberIdentifier = xemid.Substring(12, 2);
return false;
}
if (xemid.Length == 21)
this.CertificationSubmissionIdentifier = xemid.Substring(13);
else if (xemid.Length == 22)
this.CertificationSubmissionIdentifier = xemid.Substring(14);
return true;
}
#region Helpers
/// <summary>
/// Determine the XGD type based on the XGD2/3 media type identifier character
/// </summary>
/// <param name="mediaTypeIdentifier">Character denoting the media type</param>
/// <returns>Media subtype as a string, if possible</returns>
private static string GetMediaSubtype(char mediaTypeIdentifier)
{
switch (mediaTypeIdentifier)
{
case 'F': return "XGD3";
case 'X': return "XGD2";
case 'Z': return "Games on Demand / Marketplace Demo";
default: return null;
}
}
/// <summary>
/// Get the full name of the publisher from the 2-character identifier
/// </summary>
/// <param name="publisherIdentifier">Case-sensitive 2-character identifier</param>
/// <returns>Publisher name, if possible</returns>
/// <see cref="https://xboxdevwiki.net/Xbe#Title_ID"/>
private static string GetPublisher(string publisherIdentifier)
{
switch (publisherIdentifier)
{
case "AC": return "Acclaim Entertainment";
case "AH": return "ARUSH Entertainment";
case "AQ": return "Aqua System";
case "AS": return "ASK";
case "AT": return "Atlus";
case "AV": return "Activision";
case "AY": return "Aspyr Media";
case "BA": return "Bandai";
case "BL": return "Black Box";
case "BM": return "BAM! Entertainment";
case "BR": return "Broccoli Co.";
case "BS": return "Bethesda Softworks";
case "BU": return "Bunkasha Co.";
case "BV": return "Buena Vista Games";
case "BW": return "BBC Multimedia";
case "BZ": return "Blizzard";
case "CC": return "Capcom";
case "CK": return "Kemco Corporation"; // TODO: Confirm
case "CM": return "Codemasters";
case "CV": return "Crave Entertainment";
case "DC": return "DreamCatcher Interactive";
case "DX": return "Davilex";
case "EA": return "Electronic Arts (EA)";
case "EC": return "Encore inc";
case "EL": return "Enlight Software";
case "EM": return "Empire Interactive";
case "ES": return "Eidos Interactive";
case "FI": return "Fox Interactive";
case "FS": return "From Software";
case "GE": return "Genki Co.";
case "GV": return "Groove Games";
case "HE": return "Tru Blu (Entertainment division of Home Entertainment Suppliers)";
case "HP": return "Hip games";
case "HU": return "Hudson Soft";
case "HW": return "Highwaystar";
case "IA": return "Mad Catz Interactive";
case "IF": return "Idea Factory";
case "IG": return "Infogrames";
case "IL": return "Interlex Corporation";
case "IM": return "Imagine Media";
case "IO": return "Ignition Entertainment";
case "IP": return "Interplay Entertainment";
case "IX": return "InXile Entertainment"; // TODO: Confirm
case "JA": return "Jaleco";
case "JW": return "JoWooD";
case "KB": return "Kemco"; // TODO: Confirm
case "KI": return "Kids Station Inc."; // TODO: Confirm
case "KN": return "Konami";
case "KO": return "KOEI";
case "KU": return "Kobi and / or GAE (formerly Global A Entertainment)"; // TODO: Confirm
case "LA": return "LucasArts";
case "LS": return "Black Bean Games (publishing arm of Leader S.p.A.)";
case "MD": return "Metro3D";
case "ME": return "Medix";
case "MI": return "Microïds";
case "MJ": return "Majesco Entertainment";
case "MM": return "Myelin Media";
case "MP": return "MediaQuest"; // TODO: Confirm
case "MS": return "Microsoft Game Studios";
case "MW": return "Midway Games";
case "MX": return "Empire Interactive"; // TODO: Confirm
case "NK": return "NewKidCo";
case "NL": return "NovaLogic";
case "NM": return "Namco";
case "OX": return "Oxygen Interactive";
case "PC": return "Playlogic Entertainment";
case "PL": return "Phantagram Co., Ltd.";
case "RA": return "Rage";
case "SA": return "Sammy";
case "SC": return "SCi Games";
case "SE": return "SEGA";
case "SN": return "SNK";
case "SS": return "Simon & Schuster";
case "SU": return "Success Corporation";
case "SW": return "Swing! Deutschland";
case "TA": return "Takara";
case "TC": return "Tecmo";
case "TD": return "The 3DO Company (or just 3DO)";
case "TK": return "Takuyo";
case "TM": return "TDK Mediactive";
case "TQ": return "THQ";
case "TS": return "Titus Interactive";
case "TT": return "Take-Two Interactive Software";
case "US": return "Ubisoft";
case "VC": return "Victor Interactive Software";
case "VN": return "Vivendi Universal (just took Interplays publishing rights)"; // TODO: Confirm
case "VU": return "Vivendi Universal Games";
case "VV": return "Vivendi Universal Games"; // TODO: Confirm
case "WE": return "Wanadoo Edition";
case "WR": return "Warner Bros. Interactive Entertainment"; // TODO: Confirm
case "XI": return "XPEC Entertainment and Idea Factory";
case "XK": return "Xbox kiosk disk?"; // TODO: Confirm
case "XL": return "Xbox special bundled or live demo disk?"; // TODO: Confirm
case "XM": return "Evolved Games"; // TODO: Confirm
case "XP": return "XPEC Entertainment";
case "XR": return "Panorama";
case "YB": return "YBM Sisa (South-Korea)";
case "ZD": return "Zushi Games (formerly Zoo Digital Publishing)";
default: return null;
}
}
/// <summary>
/// Determine the region based on the XGD serial character
/// </summary>
/// <param name="region">Character denoting the region</param>
/// <returns>Region, if possible</returns>
private static Region? GetRegion(char region)
public static Region? GetRegion(char region)
{
switch (region)
{

View File

@@ -1,14 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
@@ -41,13 +39,12 @@
</COMReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Management.Infrastructure" Version="2.0.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.2" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="7.0.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Utilities
{

View File

@@ -2,7 +2,7 @@
using System.Reflection;
using MPF.Core.Data;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Utilities
{

View File

@@ -1,238 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// The audio or data files filetype
/// </summary>
public enum CueFileType
{
/// <summary>
/// Intel binary file (least significant byte first). Use for data files.
/// </summary>
BINARY,
/// <summary>
/// Motorola binary file (most significant byte first). Use for data files.
/// </summary>
MOTOROLA,
/// <summary>
/// Audio AIFF file (44.1KHz 16-bit stereo)
/// </summary>
AIFF,
/// <summary>
/// Audio WAVE file (44.1KHz 16-bit stereo)
/// </summary>
WAVE,
/// <summary>
/// Audio MP3 file (44.1KHz 16-bit stereo)
/// </summary>
MP3,
}
/// <summary>
/// Represents a single FILE in a cuesheet
/// </summary>
public class CueFile
{
/// <summary>
/// filename
/// </summary>
public string FileName { get; set; }
/// <summary>
/// filetype
/// </summary>
public CueFileType FileType { get; set; }
/// <summary>
/// List of TRACK in FILE
/// </summary>
public List<CueTrack> Tracks { get; set; }
/// <summary>
/// Create an empty FILE
/// </summary>
public CueFile()
{
}
/// <summary>
/// Fill a FILE from an array of lines
/// </summary>
/// <param name="fileName">File name to set</param>
/// <param name="fileType">File type to set</param>
/// <param name="cueLines">Lines array to pull from</param>
/// <param name="i">Reference to index in array</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public CueFile(string fileName, string fileType, string[] cueLines, ref int i, bool throwOnError = false)
{
if (cueLines == null)
{
if (throwOnError)
throw new ArgumentNullException(nameof(cueLines));
return;
}
else if (i < 0 || i > cueLines.Length)
{
if (throwOnError)
throw new IndexOutOfRangeException();
return;
}
// Set the current fields
this.FileName = fileName.Trim('"');
this.FileType = GetFileType(fileType);
// Increment to start
i++;
for (; i < cueLines.Length; i++)
{
string line = cueLines[i].Trim();
string[] splitLine = line.Split(' ');
// If we have an empty line, we skip
if (string.IsNullOrWhiteSpace(line))
continue;
switch (splitLine[0])
{
// Read comments
case "REM":
// We ignore all comments for now
break;
// Read track information
case "TRACK":
if (splitLine.Length < 3)
{
if (throwOnError)
throw new FormatException($"TRACK line malformed: {line}");
continue;
}
if (this.Tracks == null)
this.Tracks = new List<CueTrack>();
var track = new CueTrack(splitLine[1], splitLine[2], cueLines, ref i);
if (track == default)
{
if (throwOnError)
throw new FormatException($"TRACK line malformed: {line}");
continue;
}
this.Tracks.Add(track);
break;
// Default means return
default:
i--;
return;
}
}
}
/// <summary>
/// Write the FILE out to a stream
/// </summary>
/// <param name="sw">StreamWriter to write to</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public void Write(StreamWriter sw, bool throwOnError = false)
{
// If we don't have any tracks, it's invalid
if (this.Tracks == null)
{
if (throwOnError)
throw new ArgumentNullException(nameof(this.Tracks));
return;
}
else if (this.Tracks.Count == 0)
{
if (throwOnError)
throw new ArgumentException("No tracks provided to write");
return;
}
sw.WriteLine($"FILE \"{this.FileName}\" {FromFileType(this.FileType)}");
foreach (var track in Tracks)
{
track.Write(sw);
}
}
/// <summary>
/// Get the file type from a given string
/// </summary>
/// <param name="fileType">String to get value from</param>
/// <returns>CueFileType, if possible</returns>
private CueFileType GetFileType(string fileType)
{
switch (fileType.ToLowerInvariant())
{
case "binary":
return CueFileType.BINARY;
case "motorola":
return CueFileType.MOTOROLA;
case "aiff":
return CueFileType.AIFF;
case "wave":
return CueFileType.WAVE;
case "mp3":
return CueFileType.MP3;
default:
return CueFileType.BINARY;
}
}
/// <summary>
/// Get the string from a given file type
/// </summary>
/// <param name="fileType">CueFileType to get value from</param>
/// <returns>String, if possible (default BINARY)</returns>
private string FromFileType(CueFileType fileType)
{
switch (fileType)
{
case CueFileType.BINARY:
return "BINARY";
case CueFileType.MOTOROLA:
return "MOTOROLA";
case CueFileType.AIFF:
return "AIFF";
case CueFileType.WAVE:
return "WAVE";
case CueFileType.MP3:
return "MP3";
default:
return string.Empty;
}
}
}
}

View File

@@ -1,163 +0,0 @@
using System;
using System.IO;
using System.Linq;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// Represents a single INDEX in a TRACK
/// </summary>
public class CueIndex
{
/// <summary>
/// INDEX number, between 0 and 99
/// </summary>
public int Index { get; set; }
/// <summary>
/// Starting time of INDEX in minutes
/// </summary>
public int Minutes { get; set; }
/// <summary>
/// Starting time of INDEX in seconds
/// </summary>
/// <remarks>There are 60 seconds in a minute</remarks>
public int Seconds { get; set; }
/// <summary>
/// Starting time of INDEX in frames.
/// </summary>
/// <remarks>There are 75 frames per second</remarks>
public int Frames { get; set; }
/// <summary>
/// Create an empty INDEX
/// </summary>
public CueIndex()
{
}
/// <summary>
/// Fill a INDEX from an array of lines
/// </summary>
/// <param name="index">Index to set</param>
/// <param name="startTime">Start time to set</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public CueIndex(string index, string startTime, bool throwOnError = false)
{
// Set the current fields
if (!int.TryParse(index, out int parsedIndex))
{
if (throwOnError)
throw new ArgumentException($"Index was not a number: {index}");
return;
}
else if (parsedIndex < 0 || parsedIndex > 99)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Index must be between 0 and 99: {parsedIndex}");
return;
}
// Ignore empty lines
if (string.IsNullOrWhiteSpace(startTime))
{
if (throwOnError)
throw new ArgumentException("Start time was null or whitespace");
return;
}
// Ignore lines that don't contain the correct information
if (startTime.Length != 8 || startTime.Count(c => c == ':') != 2)
{
if (throwOnError)
throw new FormatException($"Start time was not in a recognized format: {startTime}");
return;
}
// Split the line
string[] splitTime = startTime.Split(':');
if (splitTime.Length != 3)
{
if (throwOnError)
throw new FormatException($"Start time was not in a recognized format: {startTime}");
return;
}
// Parse the lengths
int[] lengthSegments = new int[3];
// Minutes
if (!int.TryParse(splitTime[0], out lengthSegments[0]))
{
if (throwOnError)
throw new FormatException($"Minutes segment was not a number: {splitTime[0]}");
return;
}
else if (lengthSegments[0] < 0)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Minutes segment must be 0 or greater: {lengthSegments[0]}");
return;
}
// Seconds
if (!int.TryParse(splitTime[1], out lengthSegments[1]))
{
if (throwOnError)
throw new FormatException($"Seconds segment was not a number: {splitTime[1]}");
return;
}
else if (lengthSegments[1] < 0 || lengthSegments[1] > 60)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Seconds segment must be between 0 and 60: {lengthSegments[1]}");
return;
}
// Frames
if (!int.TryParse(splitTime[2], out lengthSegments[2]))
{
if (throwOnError)
throw new FormatException($"Frames segment was not a number: {splitTime[2]}");
return;
}
else if (lengthSegments[2] < 0 || lengthSegments[2] > 75)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Frames segment must be between 0 and 75: {lengthSegments[2]}");
return;
}
// Set the values
this.Index = parsedIndex;
this.Minutes = lengthSegments[0];
this.Seconds = lengthSegments[1];
this.Frames = lengthSegments[2];
}
/// <summary>
/// Write the INDEX out to a stream
/// </summary>
/// <param name="sw">StreamWriter to write to</param>
public void Write(StreamWriter sw)
{
sw.WriteLine($" INDEX {this.Index:D2} {this.Minutes:D2}:{this.Seconds:D2}:{this.Frames:D2}");
}
}
}

View File

@@ -1,250 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// Represents a single cuesheet
/// </summary>
public class CueSheet
{
/// <summary>
/// CATALOG
/// </summary>
public string Catalog { get; set; }
/// <summary>
/// CDTEXTFILE
/// </summary>
public string CdTextFile { get; set; }
/// <summary>
/// PERFORMER
/// </summary>
public string Performer { get; set; }
/// <summary>
/// SONGWRITER
/// </summary>
public string Songwriter { get; set; }
/// <summary>
/// TITLE
/// </summary>
public string Title { get; set; }
/// <summary>
/// List of FILE in cuesheet
/// </summary>
public List<CueFile> Files { get; set; }
/// <summary>
/// Create an empty cuesheet
/// </summary>
public CueSheet()
{
}
/// <summary>
/// Create a cuesheet from a file, if possible
/// </summary>
/// <param name="filename"></param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public CueSheet(string filename, bool throwOnError = false)
{
// Check that the file exists
if (!File.Exists(filename))
return;
// Check the extension
string ext = Path.GetExtension(filename).TrimStart('.');
if (!string.Equals(ext, "cue", StringComparison.OrdinalIgnoreCase)
&& !string.Equals(ext, "txt", StringComparison.OrdinalIgnoreCase))
{
return;
}
// Open the file and begin reading
string[] cueLines = File.ReadAllLines(filename);
for (int i = 0; i < cueLines.Length; i++)
{
string line = cueLines[i].Trim();
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
string[] splitLine = Regex
.Matches(line, @"[^\s""]+|""[^""]*""")
.Cast<Match>()
.Select(m => m.Groups[0].Value)
.ToArray();
// If we have an empty line, we skip
if (string.IsNullOrWhiteSpace(line))
continue;
switch (splitLine[0])
{
// Read comments
case "REM":
// We ignore all comments for now
break;
// Read MCN
case "CATALOG":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"CATALOG line malformed: {line}");
continue;
}
this.Catalog = splitLine[1];
break;
// Read external CD-Text file path
case "CDTEXTFILE":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"CDTEXTFILE line malformed: {line}");
continue;
}
this.CdTextFile = splitLine[1];
break;
// Read CD-Text enhanced performer
case "PERFORMER":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"PERFORMER line malformed: {line}");
continue;
}
this.Performer = splitLine[1];
break;
// Read CD-Text enhanced songwriter
case "SONGWRITER":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"SONGWRITER line malformed: {line}");
continue;
}
this.Songwriter = splitLine[1];
break;
// Read CD-Text enhanced title
case "TITLE":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"TITLE line malformed: {line}");
continue;
}
this.Title = splitLine[1];
break;
// Read file information
case "FILE":
if (splitLine.Length < 3)
{
if (throwOnError)
throw new FormatException($"FILE line malformed: {line}");
continue;
}
if (this.Files == null)
this.Files = new List<CueFile>();
var file = new CueFile(splitLine[1], splitLine[2], cueLines, ref i);
if (file == default)
{
if (throwOnError)
throw new FormatException($"FILE line malformed: {line}");
continue;
}
this.Files.Add(file);
break;
}
}
}
/// <summary>
/// Write the cuesheet out to a file
/// </summary>
/// <param name="filename">File path to write to</param>
public void Write(string filename)
{
using (var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.ReadWrite))
{
Write(fs);
}
}
/// <summary>
/// Write the cuesheet out to a stream
/// </summary>
/// <param name="stream">Stream to write to</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public void Write(Stream stream, bool throwOnError = false)
{
// If we don't have any files, it's invalid
if (this.Files == null)
{
if (throwOnError)
throw new ArgumentNullException(nameof(this.Files));
return;
}
else if (this.Files.Count == 0)
{
if (throwOnError)
throw new ArgumentException("No files provided to write");
return;
}
using (var sw = new StreamWriter(stream, Encoding.ASCII, 1024, true))
{
if (!string.IsNullOrEmpty(this.Catalog))
sw.WriteLine($"CATALOG {this.Catalog}");
if (!string.IsNullOrEmpty(this.CdTextFile))
sw.WriteLine($"CDTEXTFILE {this.CdTextFile}");
if (!string.IsNullOrEmpty(this.Performer))
sw.WriteLine($"PERFORMER {this.Performer}");
if (!string.IsNullOrEmpty(this.Songwriter))
sw.WriteLine($"SONGWRITER {this.Songwriter}");
if (!string.IsNullOrEmpty(this.Title))
sw.WriteLine($"TITLE {this.Title}");
foreach (var file in Files)
{
file.Write(sw);
}
}
}
}
}

View File

@@ -1,554 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// Track datatype
/// </summary>
public enum CueTrackDataType
{
/// <summary>
/// AUDIO, Audio/Music (2352)
/// </summary>
AUDIO,
/// <summary>
/// CDG, Karaoke CD+G (2448)
/// </summary>
CDG,
/// <summary>
/// MODE1/2048, CD-ROM Mode1 Data (cooked)
/// </summary>
MODE1_2048,
/// <summary>
/// MODE1/2352 CD-ROM Mode1 Data (raw)
/// </summary>
MODE1_2352,
/// <summary>
/// MODE2/2336, CD-ROM XA Mode2 Data
/// </summary>
MODE2_2336,
/// <summary>
/// MODE2/2352, CD-ROM XA Mode2 Data
/// </summary>
MODE2_2352,
/// <summary>
/// CDI/2336, CD-I Mode2 Data
/// </summary>
CDI_2336,
/// <summary>
/// CDI/2352, CD-I Mode2 Data
/// </summary>
CDI_2352,
}
/// <summary>
/// Special subcode flags within a track
/// </summary>
[Flags]
public enum CueTrackFlag
{
/// <summary>
/// DCP, Digital copy permitted
/// </summary>
DCP = 1 << 0,
/// <summary>
/// 4CH, Four channel audio
/// </summary>
FourCH = 1 << 1,
/// <summary>
/// PRE, Pre-emphasis enabled (audio tracks only)
/// </summary>
PRE = 1 << 2,
/// <summary>
/// SCMS, Serial Copy Management System (not supported by all recorders)
/// </summary>
SCMS = 1 << 3,
/// <summary>
/// DATA, set for data files. This flag is set automatically based on the tracks filetype
/// </summary>
DATA = 1 << 4,
}
/// <summary>
/// Represents a single TRACK in a FILE
/// </summary>
public class CueTrack
{
/// <summary>
/// Track number. The range is 1 to 99.
/// </summary>
public int Number { get; set; }
/// <summary>
/// Track datatype
/// </summary>
public CueTrackDataType DataType { get; set; }
/// <summary>
/// FLAGS
/// </summary>
public CueTrackFlag Flags { get; set; }
/// <summary>
/// ISRC
/// </summary>
/// <remarks>12 characters in length</remarks>
public string ISRC { get; set; }
/// <summary>
/// PERFORMER
/// </summary>
public string Performer { get; set; }
/// <summary>
/// SONGWRITER
/// </summary>
public string Songwriter { get; set; }
/// <summary>
/// TITLE
/// </summary>
public string Title { get; set; }
/// <summary>
/// PREGAP
/// </summary>
public PreGap PreGap { get; set; }
/// <summary>
/// List of INDEX in TRACK
/// </summary>
/// <remarks>Must start with 0 or 1 and then sequential</remarks>
public List<CueIndex> Indices { get; set; }
/// <summary>
/// POSTGAP
/// </summary>
public PostGap PostGap { get; set; }
/// <summary>
/// Create an empty TRACK
/// </summary>
public CueTrack()
{
}
/// <summary>
/// Fill a TRACK from an array of lines
/// </summary>
/// <param name="number">Number to set</param>
/// <param name="dataType">Data type to set</param>
/// <param name="cueLines">Lines array to pull from</param>
/// <param name="i">Reference to index in array</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public CueTrack(string number, string dataType, string[] cueLines, ref int i, bool throwOnError = false)
{
if (cueLines == null)
{
if (throwOnError)
throw new ArgumentNullException(nameof(cueLines));
return;
}
else if (i < 0 || i > cueLines.Length)
{
if (throwOnError)
throw new IndexOutOfRangeException();
return;
}
// Set the current fields
if (!int.TryParse(number, out int parsedNumber))
{
if (throwOnError)
throw new ArgumentException($"Number was not a number: {number}");
return;
}
else if (parsedNumber < 1 || parsedNumber > 99)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Index must be between 1 and 99: {parsedNumber}");
return;
}
this.Number = parsedNumber;
this.DataType = GetDataType(dataType);
// Increment to start
i++;
for (; i < cueLines.Length; i++)
{
string line = cueLines[i].Trim();
string[] splitLine = line.Split(' ');
// If we have an empty line, we skip
if (string.IsNullOrWhiteSpace(line))
continue;
switch (splitLine[0])
{
// Read comments
case "REM":
// We ignore all comments for now
break;
// Read flag information
case "FLAGS":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"FLAGS line malformed: {line}");
continue;
}
this.Flags = GetFlags(splitLine);
break;
// Read International Standard Recording Code
case "ISRC":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"ISRC line malformed: {line}");
continue;
}
this.ISRC = splitLine[1];
break;
// Read CD-Text enhanced performer
case "PERFORMER":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"PERFORMER line malformed: {line}");
continue;
}
this.Performer = splitLine[1];
break;
// Read CD-Text enhanced songwriter
case "SONGWRITER":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"SONGWRITER line malformed: {line}");
continue;
}
this.Songwriter = splitLine[1];
break;
// Read CD-Text enhanced title
case "TITLE":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"TITLE line malformed: {line}");
continue;
}
this.Title = splitLine[1];
break;
// Read pregap information
case "PREGAP":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"PREGAP line malformed: {line}");
continue;
}
var pregap = new PreGap(splitLine[1]);
if (pregap == default)
{
if (throwOnError)
throw new FormatException($"PREGAP line malformed: {line}");
continue;
}
this.PreGap = pregap;
break;
// Read index information
case "INDEX":
if (splitLine.Length < 3)
{
if (throwOnError)
throw new FormatException($"INDEX line malformed: {line}");
continue;
}
if (this.Indices == null)
this.Indices = new List<CueIndex>();
var index = new CueIndex(splitLine[1], splitLine[2]);
if (index == default)
{
if (throwOnError)
throw new FormatException($"INDEX line malformed: {line}");
continue;
}
this.Indices.Add(index);
break;
// Read postgap information
case "POSTGAP":
if (splitLine.Length < 2)
{
if (throwOnError)
throw new FormatException($"POSTGAP line malformed: {line}");
continue;
}
var postgap = new PostGap(splitLine[1]);
if (postgap == default)
{
if (throwOnError)
throw new FormatException($"POSTGAP line malformed: {line}");
continue;
}
this.PostGap = postgap;
break;
// Default means return
default:
i--;
return;
}
}
}
/// <summary>
/// Write the TRACK out to a stream
/// </summary>
/// <param name="sw">StreamWriter to write to</param
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public void Write(StreamWriter sw, bool throwOnError = false)
{
// If we don't have any indices, it's invalid
if (this.Indices == null)
{
if (throwOnError)
throw new ArgumentNullException(nameof(this.Indices));
return;
}
else if (this.Indices.Count == 0)
{
if (throwOnError)
throw new ArgumentException("No indices provided to write");
return;
}
sw.WriteLine($" TRACK {this.Number:D2} {FromDataType(this.DataType)}");
if (this.Flags != 0)
sw.WriteLine($" FLAGS {FromFlags(this.Flags)}");
if (!string.IsNullOrEmpty(this.ISRC))
sw.WriteLine($"ISRC {this.ISRC}");
if (!string.IsNullOrEmpty(this.Performer))
sw.WriteLine($"PERFORMER {this.Performer}");
if (!string.IsNullOrEmpty(this.Songwriter))
sw.WriteLine($"SONGWRITER {this.Songwriter}");
if (!string.IsNullOrEmpty(this.Title))
sw.WriteLine($"TITLE {this.Title}");
if (this.PreGap != null)
this.PreGap.Write(sw);
foreach (var index in Indices)
{
index.Write(sw);
}
if (this.PostGap != null)
this.PostGap.Write(sw);
}
/// <summary>
/// Get the data type from a given string
/// </summary>
/// <param name="dataType">String to get value from</param>
/// <returns>CueTrackDataType, if possible (default AUDIO)</returns>
private CueTrackDataType GetDataType(string dataType)
{
switch (dataType.ToLowerInvariant())
{
case "audio":
return CueTrackDataType.AUDIO;
case "cdg":
return CueTrackDataType.CDG;
case "mode1/2048":
return CueTrackDataType.MODE1_2048;
case "mode1/2352":
return CueTrackDataType.MODE1_2352;
case "mode2/2336":
return CueTrackDataType.MODE2_2336;
case "mode2/2352":
return CueTrackDataType.MODE2_2352;
case "cdi/2336":
return CueTrackDataType.CDI_2336;
case "cdi/2352":
return CueTrackDataType.CDI_2352;
default:
return CueTrackDataType.AUDIO;
}
}
/// <summary>
/// Get the string from a given data type
/// </summary>
/// <param name="dataType">CueTrackDataType to get value from</param>
/// <returns>string, if possible</returns>
private string FromDataType(CueTrackDataType dataType)
{
switch (dataType)
{
case CueTrackDataType.AUDIO:
return "AUDIO";
case CueTrackDataType.CDG:
return "CDG";
case CueTrackDataType.MODE1_2048:
return "MODE1/2048";
case CueTrackDataType.MODE1_2352:
return "MODE1/2352";
case CueTrackDataType.MODE2_2336:
return "MODE2/2336";
case CueTrackDataType.MODE2_2352:
return "MODE2/2352";
case CueTrackDataType.CDI_2336:
return "CDI/2336";
case CueTrackDataType.CDI_2352:
return "CDI/2352";
default:
return string.Empty;
}
}
/// <summary>
/// Get the flag value for an array of strings
/// </summary>
/// <param name="flagStrings">Possible flags as strings</param>
/// <returns>CueTrackFlag value representing the strings, if possible</returns>
private CueTrackFlag GetFlags(string[] flagStrings)
{
CueTrackFlag flag = 0;
foreach (string flagString in flagStrings)
{
switch (flagString.ToLowerInvariant())
{
case "flags":
// No-op since this is the start of the line
break;
case "dcp":
flag |= CueTrackFlag.DCP;
break;
case "4ch":
flag |= CueTrackFlag.FourCH;
break;
case "pre":
flag |= CueTrackFlag.PRE;
break;
case "scms":
flag |= CueTrackFlag.SCMS;
break;
case "data":
flag |= CueTrackFlag.DATA;
break;
}
}
return flag;
}
/// <summary>
/// Get the string value for a set of track flags
/// </summary>
/// <param name="flags">CueTrackFlag to get value from</param>
/// <returns>String value representing the CueTrackFlag, if possible</returns>
private string FromFlags(CueTrackFlag flags)
{
string outputFlagString = string.Empty;
if (flags.HasFlag(CueTrackFlag.DCP))
outputFlagString += "DCP ";
if (flags.HasFlag(CueTrackFlag.FourCH))
outputFlagString += "4CH ";
if (flags.HasFlag(CueTrackFlag.PRE))
outputFlagString += "PRE ";
if (flags.HasFlag(CueTrackFlag.SCMS))
outputFlagString += "SCMS ";
if (flags.HasFlag(CueTrackFlag.DATA))
outputFlagString += "DATA ";
return outputFlagString.Trim();
}
}
}

View File

@@ -1,31 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup>
<NrtRevisionFormat>$(Version)-{chash:8}</NrtRevisionFormat>
<NrtResolveSimpleAttributes>true</NrtResolveSimpleAttributes>
<NrtShowRevision>false</NrtShowRevision>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -1,139 +0,0 @@
using System;
using System.IO;
using System.Linq;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// Represents POSTGAP information of a track
/// </summary>
public class PostGap
{
/// <summary>
/// Length of POSTGAP in minutes
/// </summary>
public int Minutes { get; set; }
/// <summary>
/// Length of POSTGAP in seconds
/// </summary>
/// <remarks>There are 60 seconds in a minute</remarks>
public int Seconds { get; set; }
/// <summary>
/// Length of POSTGAP in frames.
/// </summary>
/// <remarks>There are 75 frames per second</remarks>
public int Frames { get; set; }
/// Create an empty POSTGAP
/// </summary>
public PostGap()
{
}
/// <summary>
/// Create a POSTGAP from a mm:ss:ff length
/// </summary>
/// <param name="length">String to get length information from</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public PostGap(string length, bool throwOnError = false)
{
// Ignore empty lines
if (string.IsNullOrWhiteSpace(length))
{
if (throwOnError)
throw new ArgumentException("Length was null or whitespace");
return;
}
// Ignore lines that don't contain the correct information
if (length.Length != 8 || length.Count(c => c == ':') != 2)
{
if (throwOnError)
throw new FormatException($"Length was not in a recognized format: {length}");
return;
}
// Split the line
string[] splitLength = length.Split(':');
if (splitLength.Length != 3)
{
if (throwOnError)
throw new FormatException($"Length was not in a recognized format: {length}");
return;
}
// Parse the lengths
int[] lengthSegments = new int[3];
// Minutes
if (!int.TryParse(splitLength[0], out lengthSegments[0]))
{
if (throwOnError)
throw new FormatException($"Minutes segment was not a number: {splitLength[0]}");
return;
}
else if (lengthSegments[0] < 0)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Minutes segment must be 0 or greater: {lengthSegments[0]}");
return;
}
// Seconds
if (!int.TryParse(splitLength[1], out lengthSegments[1]))
{
if (throwOnError)
throw new FormatException($"Seconds segment was not a number: {splitLength[1]}");
return;
}
else if (lengthSegments[1] < 0 || lengthSegments[1] > 60)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Seconds segment must be between 0 and 60: {lengthSegments[1]}");
return;
}
// Frames
if (!int.TryParse(splitLength[2], out lengthSegments[2]))
{
if (throwOnError)
throw new FormatException($"Frames segment was not a number: {splitLength[2]}");
return;
}
else if (lengthSegments[2] < 0 || lengthSegments[2] > 75)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Frames segment must be between 0 and 75: {lengthSegments[2]}");
return;
}
// Set the values
this.Minutes = lengthSegments[0];
this.Seconds = lengthSegments[1];
this.Frames = lengthSegments[2];
}
/// <summary>
/// Write the POSTGAP out to a stream
/// </summary>
/// <param name="sw">StreamWriter to write to</param>
public void Write(StreamWriter sw)
{
sw.WriteLine($" POSTGAP {this.Minutes:D2}:{this.Seconds:D2}:{this.Frames:D2}");
}
}
}

View File

@@ -1,140 +0,0 @@
using System;
using System.IO;
using System.Linq;
/// <remarks>
/// Information sourced from http://web.archive.org/web/20070221154246/http://www.goldenhawk.com/download/cdrwin.pdf
/// </remarks>
namespace MPF.CueSheets
{
/// <summary>
/// Represents PREGAP information of a track
/// </summary>
public class PreGap
{
/// <summary>
/// Length of PREGAP in minutes
/// </summary>
public int Minutes { get; set; }
/// <summary>
/// Length of PREGAP in seconds
/// </summary>
/// <remarks>There are 60 seconds in a minute</remarks>
public int Seconds { get; set; }
/// <summary>
/// Length of PREGAP in frames.
/// </summary>
/// <remarks>There are 75 frames per second</remarks>
public int Frames { get; set; }
/// <summary>
/// Create an empty PREGAP
/// </summary>
public PreGap()
{
}
/// <summary>
/// Create a PREGAP from a mm:ss:ff length
/// </summary>
/// <param name="length">String to get length information from</param>
/// <param name="throwOnError">True if errors throw an exception, false otherwise</param>
public PreGap(string length, bool throwOnError = false)
{
// Ignore empty lines
if (string.IsNullOrWhiteSpace(length))
{
if (throwOnError)
throw new ArgumentException("Length was null or whitespace");
return;
}
// Ignore lines that don't contain the correct information
if (length.Length != 8 || length.Count(c => c == ':') != 2)
{
if (throwOnError)
throw new FormatException($"Length was not in a recognized format: {length}");
return;
}
// Split the line
string[] splitLength = length.Split(':');
if (splitLength.Length != 3)
{
if (throwOnError)
throw new FormatException($"Length was not in a recognized format: {length}");
return;
}
// Parse the lengths
int[] lengthSegments = new int[3];
// Minutes
if (!int.TryParse(splitLength[0], out lengthSegments[0]))
{
if (throwOnError)
throw new FormatException($"Minutes segment was not a number: {splitLength[0]}");
return;
}
else if (lengthSegments[0] < 0)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Minutes segment must be 0 or greater: {lengthSegments[0]}");
return;
}
// Seconds
if (!int.TryParse(splitLength[1], out lengthSegments[1]))
{
if (throwOnError)
throw new FormatException($"Seconds segment was not a number: {splitLength[1]}");
return;
}
else if (lengthSegments[1] < 0 || lengthSegments[1] > 60)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Seconds segment must be between 0 and 60: {lengthSegments[1]}");
return;
}
// Frames
if (!int.TryParse(splitLength[2], out lengthSegments[2]))
{
if (throwOnError)
throw new FormatException($"Frames segment was not a number: {splitLength[2]}");
return;
}
else if (lengthSegments[2] < 0 || lengthSegments[2] > 75)
{
if (throwOnError)
throw new IndexOutOfRangeException($"Frames segment must be between 0 and 75: {lengthSegments[2]}");
return;
}
// Set the values
this.Minutes = lengthSegments[0];
this.Seconds = lengthSegments[1];
this.Frames = lengthSegments[2];
}
/// <summary>
/// Write the PREGAP out to a stream
/// </summary>
/// <param name="sw">StreamWriter to write to</param>
public void Write(StreamWriter sw)
{
sw.WriteLine($" PREGAP {this.Minutes:D2}:{this.Seconds:D2}:{this.Frames:D2}");
}
}
}

View File

@@ -7,7 +7,7 @@ using BurnOutSharp;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Modules;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Library
{
@@ -169,10 +169,6 @@ namespace MPF.Library
this.Parameters = new Modules.Aaru.Parameters(parameters) { ExecutablePath = Options.AaruPath };
break;
case InternalProgram.DD:
this.Parameters = new Modules.DD.Parameters(parameters) { ExecutablePath = Options.DDPath };
break;
case InternalProgram.DiscImageCreator:
this.Parameters = new Modules.DiscImageCreator.Parameters(parameters) { ExecutablePath = Options.DiscImageCreatorPath };
break;
@@ -226,10 +222,6 @@ namespace MPF.Library
Parameters = new Modules.Aaru.Parameters(System, Type, Drive.Letter, this.OutputPath, driveSpeed, Options);
break;
case InternalProgram.DD:
Parameters = new Modules.DD.Parameters(System, Type, Drive.Letter, this.OutputPath, driveSpeed, Options);
break;
case InternalProgram.DiscImageCreator:
Parameters = new Modules.DiscImageCreator.Parameters(System, Type, Drive.Letter, this.OutputPath, driveSpeed, Options);
break;

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Drawing.Drawing2D;
using System.IO;
using System.IO.Compression;
using System.Linq;
@@ -14,8 +13,9 @@ using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Modules;
using Newtonsoft.Json;
using RedumpLib.Data;
using RedumpLib.Web;
using SabreTools.Models.PIC;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
using Formatting = Newtonsoft.Json.Formatting;
namespace MPF.Library
@@ -84,9 +84,17 @@ namespace MPF.Library
Serial = options.AddPlaceholders ? Template.RequiredIfExistsValue : string.Empty,
Barcode = options.AddPlaceholders ? Template.OptionalValue : string.Empty,
Contents = string.Empty,
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>(),
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>(),
#endif
Comments = string.Empty,
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>(),
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>(),
#endif
},
VersionAndEditions = new VersionAndEditionsSection()
{
@@ -101,7 +109,7 @@ namespace MPF.Library
// Get a list of matching IDs for each line in the DAT
if (!string.IsNullOrEmpty(info.TracksAndWriteOffsets.ClrMameProData) && options.HasRedumpLogin)
#if NET48 || NETSTANDARD2_1
#if NET48
FillFromRedump(options, info, resultProgress);
#else
_ = await FillFromRedump(options, info, resultProgress);
@@ -569,7 +577,7 @@ namespace MPF.Library
}
}
#endregion
#endregion
#region Information Output
@@ -616,7 +624,7 @@ namespace MPF.Library
foreach (string file in files)
{
string entryName = file.Substring(outputDirectory.Length).TrimStart(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
#if NET48 || NETSTANDARD2_1
#if NET48
zf.CreateEntryFromFile(file, entryName, CompressionLevel.Optimal);
#else
zf.CreateEntryFromFile(file, entryName, CompressionLevel.SmallestSize);
@@ -912,13 +920,13 @@ namespace MPF.Library
return $"{mediaType.LongName()}-128";
else if (layerbreak2 != default)
return $"{mediaType.LongName()}-100";
else if (layerbreak != default && picIdentifier == PICDiscInformationUnit.DiscTypeIdentifierROMUltra)
else if (layerbreak != default && picIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && size > 53_687_063_712)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default)
return $"{mediaType.LongName()}-50";
else if (picIdentifier == PICDiscInformationUnit.DiscTypeIdentifierROMUltra)
else if (picIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-33";
else if (size > 26_843_531_856)
return $"{mediaType.LongName()}-33";
@@ -1626,13 +1634,13 @@ namespace MPF.Library
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == PICDiscInformationUnit.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == PICDiscInformationUnit.DiscTypeIdentifierROMUltra)
else if (info.SizeAndChecksums.PICIdentifier == SabreTools.Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
@@ -1889,7 +1897,7 @@ namespace MPF.Library
/// <param name="wc">RedumpWebClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="id">Redump disc ID to retrieve</param>
#if NET48 || NETSTANDARD2_1
#if NET48
private static bool FillFromId(RedumpWebClient wc, SubmissionInfo info, int id)
{
string discData = wc.DownloadSingleSiteID(id);
@@ -1904,7 +1912,7 @@ namespace MPF.Library
#endif
// Title, Disc Number/Letter, Disc Title
var match = Constants.TitleRegex.Match(discData);
var match = SabreTools.RedumpLib.Data.Constants.TitleRegex.Match(discData);
if (match.Success)
{
string title = WebUtility.HtmlDecode(match.Groups[1].Value);
@@ -1914,7 +1922,7 @@ namespace MPF.Library
if (firstParenLocation >= 0)
{
info.CommonDiscInfo.Title = title.Substring(0, firstParenLocation);
var subMatches = Constants.DiscNumberLetterRegex.Matches(title);
var subMatches = SabreTools.RedumpLib.Data.Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match subMatch in subMatches)
{
var subMatchValue = subMatch.Groups[1].Value;
@@ -1936,14 +1944,14 @@ namespace MPF.Library
}
// Foreign Title
match = Constants.ForeignTitleRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
else
info.CommonDiscInfo.ForeignTitleNonLatin = null;
// Category
match = Constants.CategoryRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
@@ -1952,13 +1960,13 @@ namespace MPF.Library
// Region
if (info.CommonDiscInfo.Region == null)
{
match = Constants.RegionRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.RegionRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Region = Extensions.ToRegion(match.Groups[1].Value);
}
// Languages
var matches = Constants.LanguagesRegex.Matches(discData);
var matches = SabreTools.RedumpLib.Data.Constants.LanguagesRegex.Matches(discData);
if (matches.Count > 0)
{
List<Language?> tempLanguages = new List<Language?>();
@@ -1977,7 +1985,7 @@ namespace MPF.Library
// Error count
if (string.IsNullOrEmpty(info.CommonDiscInfo.ErrorsCount))
{
match = Constants.ErrorCountRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.ErrorCountRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.ErrorsCount = match.Groups[1].Value;
}
@@ -1985,13 +1993,13 @@ namespace MPF.Library
// Version
if (info.VersionAndEditions.Version == null)
{
match = Constants.VersionRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.VersionRegex.Match(discData);
if (match.Success)
info.VersionAndEditions.Version = $"(VERIFY THIS) {WebUtility.HtmlDecode(match.Groups[1].Value)}";
}
// Dumpers
matches = Constants.DumpersRegex.Matches(discData);
matches = SabreTools.RedumpLib.Data.Constants.DumpersRegex.Matches(discData);
if (matches.Count > 0)
{
// Start with any currently listed dumpers
@@ -2011,7 +2019,7 @@ namespace MPF.Library
// TODO: Unify handling of fields that can include site codes (Comments/Contents)
// Comments
match = Constants.CommentsRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.CommentsRegex.Match(discData);
if (match.Success)
{
// Process the old comments block
@@ -2050,6 +2058,10 @@ namespace MPF.Library
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
if (!commentLine.Contains(siteCode.ShortName()))
continue;
@@ -2096,12 +2108,12 @@ namespace MPF.Library
}
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields.ContainsKey(siteCode))
info.CommonDiscInfo.CommentsSpecialFields[siteCode] = $"(VERIFY THIS) {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
if (!info.CommonDiscInfo.CommentsSpecialFields.ContainsKey(siteCode.Value))
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
// Otherwise, append the value to the existing key
else
info.CommonDiscInfo.CommentsSpecialFields[siteCode] += $", {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] += $", {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
break;
}
@@ -2111,10 +2123,10 @@ namespace MPF.Library
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode] += "\n";
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode] += commentLine;
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
{
@@ -2128,7 +2140,7 @@ namespace MPF.Library
}
// Contents
match = Constants.ContentsRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.ContentsRegex.Match(discData);
if (match.Success)
{
// Process the old contents block
@@ -2167,6 +2179,10 @@ namespace MPF.Library
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
if (!contentLine.Contains(siteCode.ShortName()))
continue;
@@ -2175,8 +2191,8 @@ namespace MPF.Library
lastSiteCode = siteCode;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.ContentsSpecialFields.ContainsKey(siteCode))
info.CommonDiscInfo.ContentsSpecialFields[siteCode] = $"(VERIFY THIS) {contentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
if (!info.CommonDiscInfo.ContentsSpecialFields.ContainsKey(siteCode.Value))
info.CommonDiscInfo.ContentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {contentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
// A subset of tags can be multiline
addToLast = IsMultiLine(siteCode);
@@ -2191,10 +2207,10 @@ namespace MPF.Library
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode] += "\n";
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode] += contentLine;
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
{
@@ -2208,7 +2224,7 @@ namespace MPF.Library
}
// Added
match = Constants.AddedRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.AddedRegex.Match(discData);
if (match.Success)
{
if (DateTime.TryParse(match.Groups[1].Value, out DateTime added))
@@ -2218,7 +2234,7 @@ namespace MPF.Library
}
// Last Modified
match = Constants.LastModifiedRegex.Match(discData);
match = SabreTools.RedumpLib.Data.Constants.LastModifiedRegex.Match(discData);
if (match.Success)
{
if (DateTime.TryParse(match.Groups[1].Value, out DateTime lastModified))
@@ -2236,7 +2252,7 @@ namespace MPF.Library
/// <param name="options">Options object representing user-defined options</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
#if NET48 || NETSTANDARD2_1
#if NET48
private static bool FillFromRedump(Core.Data.Options options, SubmissionInfo info, IProgress<Result> resultProgress = null)
#else
private async static Task<bool> FillFromRedump(Core.Data.Options options, SubmissionInfo info, IProgress<Result> resultProgress = null)
@@ -2246,14 +2262,14 @@ namespace MPF.Library
info.DumpersAndStatus.Dumpers = new string[] { options.RedumpUsername };
info.PartiallyMatchedIDs = new List<int>();
#if NET48 || NETSTANDARD2_1
#if NET48
using (RedumpWebClient wc = new RedumpWebClient())
#else
using (RedumpHttpClient wc = new RedumpHttpClient())
#endif
{
// Login to Redump
#if NET48 || NETSTANDARD2_1
#if NET48
bool? loggedIn = wc.Login(options.RedumpUsername, options.RedumpPassword);
#else
bool? loggedIn = await wc.Login(options.RedumpUsername, options.RedumpPassword);
@@ -2298,7 +2314,7 @@ namespace MPF.Library
continue;
}
#if NET48 || NETSTANDARD2_1
#if NET48
(bool singleFound, List<int> foundIds) = ValidateSingleTrack(wc, info, hashData, resultProgress);
#else
(bool singleFound, List<int> foundIds) = await ValidateSingleTrack(wc, info, hashData, resultProgress);
@@ -2325,7 +2341,7 @@ namespace MPF.Library
// If we don't have any matches but we have a universal hash
if (!info.PartiallyMatchedIDs.Any() && info.CommonDiscInfo.CommentsSpecialFields.ContainsKey(SiteCode.UniversalHash))
{
#if NET48 || NETSTANDARD2_1
#if NET48
(bool singleFound, List<int> foundIds) = ValidateUniversalHash(wc, info, resultProgress);
#else
(bool singleFound, List<int> foundIds) = await ValidateUniversalHash(wc, info, resultProgress);
@@ -2365,7 +2381,7 @@ namespace MPF.Library
for (int i = 0; i < totalMatchedIDsCount; i++)
{
// Skip if the track count doesn't match
#if NET48 || NETSTANDARD2_1
#if NET48
if (!ValidateTrackCount(wc, fullyMatchedIDs[i], trackCount))
continue;
#else
@@ -2375,7 +2391,7 @@ namespace MPF.Library
// Fill in the fields from the existing ID
resultProgress?.Report(Result.Success($"Filling fields from existing ID {fullyMatchedIDs[i]}..."));
#if NET48 || NETSTANDARD2_1
#if NET48
FillFromId(wc, info, fullyMatchedIDs[i]);
#else
_ = await FillFromId(wc, info, fullyMatchedIDs[i]);
@@ -2440,7 +2456,7 @@ namespace MPF.Library
/// <param name="query">Query string to attempt to search for</param>
/// <param name="filterForwardSlashes">True to filter forward slashes, false otherwise</param>
/// <returns>All disc IDs for the given query, null on error</returns>
#if NET48 || NETSTANDARD2_1
#if NET48
private static List<int> ListSearchResults(RedumpWebClient wc, string query, bool filterForwardSlashes = true)
#else
private async static Task<List<int>> ListSearchResults(RedumpHttpClient wc, string query, bool filterForwardSlashes = true)
@@ -2466,10 +2482,10 @@ namespace MPF.Library
int pageNumber = 1;
while (true)
{
#if NET48 || NETSTANDARD2_1
List<int> pageIds = wc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
#if NET48
List<int> pageIds = wc.CheckSingleSitePage(string.Format(SabreTools.RedumpLib.Data.Constants.QuickSearchUrl, query, pageNumber++));
#else
List<int> pageIds = await wc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
List<int> pageIds = await wc.CheckSingleSitePage(string.Format(SabreTools.RedumpLib.Data.Constants.QuickSearchUrl, query, pageNumber++));
#endif
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
@@ -2493,7 +2509,7 @@ namespace MPF.Library
/// <param name="hashData">DAT-formatted hash data to parse out</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
#if NET48 || NETSTANDARD2_1
#if NET48
private static (bool, List<int>) ValidateSingleTrack(RedumpWebClient wc, SubmissionInfo info, string hashData, IProgress<Result> resultProgress = null)
#else
private async static Task<(bool, List<int>)> ValidateSingleTrack(RedumpHttpClient wc, SubmissionInfo info, string hashData, IProgress<Result> resultProgress = null)
@@ -2507,7 +2523,7 @@ namespace MPF.Library
}
// Get all matching IDs for the track
#if NET48 || NETSTANDARD2_1
#if NET48
List<int> newIds = ListSearchResults(wc, sha1);
#else
List<int> newIds = await ListSearchResults(wc, sha1);
@@ -2540,7 +2556,7 @@ namespace MPF.Library
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>True if the track was found, false otherwise; List of found values, if possible</returns>
#if NET48 || NETSTANDARD2_1
#if NET48
private static (bool, List<int>) ValidateUniversalHash(RedumpWebClient wc, SubmissionInfo info, IProgress<Result> resultProgress = null)
#else
private async static Task<(bool, List<int>)> ValidateUniversalHash(RedumpHttpClient wc, SubmissionInfo info, IProgress<Result> resultProgress = null)
@@ -2558,7 +2574,7 @@ namespace MPF.Library
universalHash = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
// Get all matching IDs for the hash
#if NET48 || NETSTANDARD2_1
#if NET48
List<int> newIds = ListSearchResults(wc, universalHash, filterForwardSlashes: false);
#else
List<int> newIds = await ListSearchResults(wc, universalHash, filterForwardSlashes: false);
@@ -2591,14 +2607,14 @@ namespace MPF.Library
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="localCount">Local count of tracks for the current disc</param>
/// <returns>True if the track count matches, false otherwise</returns>
#if NET48 || NETSTANDARD2_1
#if NET48
private static bool ValidateTrackCount(RedumpWebClient wc, int id, int localCount)
#else
private async static Task<bool> ValidateTrackCount(RedumpHttpClient wc, int id, int localCount)
#endif
{
// If we can't pull the remote data, we can't match
#if NET48 || NETSTANDARD2_1
#if NET48
string discData = wc.DownloadSingleSiteID(id);
#else
string discData = await wc.DownloadSingleSiteID(id);
@@ -2607,7 +2623,7 @@ namespace MPF.Library
return false;
// Discs with only 1 track don't have a track count listed
var match = Constants.TrackCountRegex.Match(discData);
var match = SabreTools.RedumpLib.Data.Constants.TrackCountRegex.Match(discData);
if (!match.Success && localCount == 1)
return true;
else if (!match.Success)
@@ -2697,7 +2713,11 @@ namespace MPF.Library
/// Order comment code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
#if NET48
private static List<KeyValuePair<SiteCode?, string>> OrderCommentTags(Dictionary<SiteCode?, string> tags)
#else
private static List<KeyValuePair<SiteCode?, string>> OrderCommentTags(Dictionary<SiteCode, string> tags)
#endif
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
@@ -2741,6 +2761,8 @@ namespace MPF.Library
if (tags.ContainsKey(SiteCode.BBFCRegistrationNumber))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.BBFCRegistrationNumber, tags[SiteCode.BBFCRegistrationNumber]));
if (tags.ContainsKey(SiteCode.CDProjektID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.CDProjektID, tags[SiteCode.CDProjektID]));
if (tags.ContainsKey(SiteCode.DiscHologramID))
sorted.Add(new KeyValuePair<SiteCode?, string>(SiteCode.DiscHologramID, tags[SiteCode.DiscHologramID]));
if (tags.ContainsKey(SiteCode.DNASDiscID))
@@ -2818,7 +2840,11 @@ namespace MPF.Library
/// Order content code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
#if NET48
private static List<KeyValuePair<SiteCode?, string>> OrderContentTags(Dictionary<SiteCode?, string> tags)
#else
private static List<KeyValuePair<SiteCode?, string>> OrderContentTags(Dictionary<SiteCode, string> tags)
#endif
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
@@ -2857,6 +2883,6 @@ namespace MPF.Library
return sorted;
}
#endregion
#endregion
}
}

View File

@@ -1,15 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<AssemblyName>MPF.Library</AssemblyName>
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
@@ -24,7 +22,6 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Modules\MPF.Modules.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
@@ -32,6 +29,7 @@
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.IO.Compression" Version="4.3.0" />
<PackageReference Include="System.IO.Compression.ZipFile" Version="4.3.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Aaru
{

View File

@@ -9,8 +9,8 @@ using System.Xml.Schema;
using System.Xml.Serialization;
using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.CueSheets;
using RedumpLib.Data;
using SabreTools.Models.CueSheets;
using SabreTools.RedumpLib.Data;
using Schemas;
namespace MPF.Modules.Aaru
@@ -199,7 +199,7 @@ namespace MPF.Modules.Aaru
// TODO: Determine if there's an Aaru version anywhere
info.DumpingInfo.DumpingProgram = EnumConverter.LongName(this.InternalProgram);
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + ".cicm.xml")?.ToString("yyyy-MM-dd hh:mm:ss");
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + ".cicm.xml")?.ToString("yyyy-MM-dd HH:mm:ss");
// Deserialize the sidecar, if possible
var sidecar = GenerateSidecar(basePath + ".cicm.xml");
@@ -1828,7 +1828,7 @@ namespace MPF.Modules.Aaru
// Speed
byteValue = ProcessInt8Parameter(parts, null, FlagStrings.SpeedLong, ref i);
if (byteValue == null && byteValue != SByte.MinValue)
if (byteValue != null && byteValue != SByte.MinValue)
SpeedValue = byteValue;
#endregion
@@ -2441,10 +2441,10 @@ namespace MPF.Modules.Aaru
// Required variables
uint totalTracks = 0;
CueSheet cueSheet = new CueSheet
var cueFiles = new List<CueFile>();
var cueSheet = new CueSheet
{
Performer = string.Join(", ", cicmSidecar.Performer ?? new string[0]),
Files = new List<CueFile>(),
};
// Only care about OpticalDisc types
@@ -2482,13 +2482,13 @@ namespace MPF.Modules.Aaru
{
FileName = GenerateTrackName(basePath, (int)totalTracks, cueTrack.Number, opticalDisc.DiscType),
FileType = CueFileType.BINARY,
Tracks = new List<CueTrack>(),
};
// Add index data
var cueTracks = new List<CueTrack>();
if (track.Indexes != null && track.Indexes.Length > 0)
{
cueTrack.Indices = new List<CueIndex>();
var cueIndicies = new List<CueIndex>();
// Loop through each index
foreach (TrackIndexType trackIndex in track.Indexes)
@@ -2502,36 +2502,60 @@ namespace MPF.Modules.Aaru
// Pregap information
if (trackIndex.Value < 0)
cueTrack.PreGap = new PreGap(timeString);
{
string[] timeStringSplit = timeString.Split(':');
cueTrack.PreGap = new PreGap
{
Minutes = int.Parse(timeStringSplit[0]),
Seconds = int.Parse(timeStringSplit[1]),
Frames = int.Parse(timeStringSplit[2]),
};
}
// Individual indexes
else
cueTrack.Indices.Add(new CueIndex(trackIndex.index.ToString(), timeString));
{
string[] timeStringSplit = timeString.Split(':');
cueIndicies.Add(new CueIndex
{
Index = trackIndex.index,
Minutes = int.Parse(timeStringSplit[0]),
Seconds = int.Parse(timeStringSplit[1]),
Frames = int.Parse(timeStringSplit[2]),
});
}
}
cueTrack.Indices = cueIndicies.ToArray();
}
else
{
// Default if index data missing from sidecar
cueTrack.Indices = new List<CueIndex>()
cueTrack.Indices = new CueIndex[]
{
new CueIndex("01", "00:00:00"),
new CueIndex
{
Index = 1,
Minutes = 0,
Seconds = 0,
Frames = 0,
},
};
}
// Add the track to the file
cueFile.Tracks.Add(cueTrack);
cueTracks.Add(cueTrack);
// Add the file to the cuesheet
cueSheet.Files.Add(cueFile);
cueFiles.Add(cueFile);
}
}
// If we have a cuesheet to write out, do so
cueSheet.Files = cueFiles.ToArray();
if (cueSheet != null && cueSheet != default)
{
MemoryStream ms = new MemoryStream();
cueSheet.Write(ms);
ms.Position = 0;
var ms = new SabreTools.Serialization.Streams.CueSheet().Serialize(cueSheet);
using (var sr = new StreamReader(ms))
{
return sr.ReadToEnd();

View File

@@ -12,7 +12,8 @@ using System.Xml.Serialization;
using MPF.Core.Data;
using MPF.Core.Hashing;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.Models.PIC;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules
{
@@ -1164,66 +1165,13 @@ namespace MPF.Modules
/// Gets disc information from a PIC file
/// </summary>
/// <param name="pic">Path to a PIC.bin file</param>
/// <returns>Filled PICDiscInformation on success, null on error</returns>
/// <returns>Filled DiscInformation on success, null on error</returns>
/// <remarks>This omits the emergency brake information, if it exists</remarks>
protected static PICDiscInformation GetDiscInformation(string pic)
protected static DiscInformation GetDiscInformation(string pic)
{
try
{
using (BinaryReader br = new BinaryReader(File.OpenRead(pic)))
{
var di = new PICDiscInformation();
// Read the initial disc information
di.DataStructureLength = br.ReadUInt16BigEndian();
di.Reserved0 = br.ReadByte();
di.Reserved1 = br.ReadByte();
// Create a list for the units
var diUnits = new List<PICDiscInformationUnit>();
// Loop and read all available units
for (int i = 0; i < 32; i++)
{
var unit = new PICDiscInformationUnit();
// We only accept Disc Information units, not Emergency Brake or other
unit.DiscInformationIdentifier = Encoding.ASCII.GetString(br.ReadBytes(2));
if (unit.DiscInformationIdentifier != "DI")
break;
unit.DiscInformationFormat = br.ReadByte();
unit.NumberOfUnitsInBlock = br.ReadByte();
unit.Reserved0 = br.ReadByte();
unit.SequenceNumber = br.ReadByte();
unit.BytesInUse = br.ReadByte();
unit.Reserved1 = br.ReadByte();
unit.DiscTypeIdentifier = Encoding.ASCII.GetString(br.ReadBytes(3));
unit.DiscSizeClassVersion = br.ReadByte();
switch (unit.DiscTypeIdentifier)
{
case PICDiscInformationUnit.DiscTypeIdentifierROM:
case PICDiscInformationUnit.DiscTypeIdentifierROMUltra:
unit.FormatDependentContents = br.ReadBytes(52);
break;
case PICDiscInformationUnit.DiscTypeIdentifierReWritable:
case PICDiscInformationUnit.DiscTypeIdentifierRecordable:
unit.FormatDependentContents = br.ReadBytes(100);
unit.DiscManufacturerID = br.ReadBytes(6);
unit.MediaTypeID = br.ReadBytes(3);
unit.TimeStamp = br.ReadUInt16();
unit.ProductRevisionNumber = br.ReadByte();
break;
}
diUnits.Add(unit);
}
// Assign the units and return
di.Units = diUnits.ToArray();
return di;
}
return new SabreTools.Serialization.Files.PIC().Deserialize(pic);
}
catch
{
@@ -1412,7 +1360,7 @@ namespace MPF.Modules
/// </summary>
/// <param name="di">Disc information containing unformatted data</param>
/// <returns>True if layerbreak info was set, false otherwise</returns>
protected static bool GetLayerbreaks(PICDiscInformation di, out long? layerbreak1, out long? layerbreak2, out long? layerbreak3)
protected static bool GetLayerbreaks(DiscInformation di, out long? layerbreak1, out long? layerbreak2, out long? layerbreak3)
{
// Set the default values
layerbreak1 = null; layerbreak2 = null; layerbreak3 = null;
@@ -1432,24 +1380,24 @@ namespace MPF.Modules
// Layerbreak 1 (2+ layers)
if (di.Units.Length >= 2)
{
long offset = ReadFromArrayBigEndian(di.Units[0].FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[0].FormatDependentContents, 0x10);
long offset = ReadFromArrayBigEndian(di.Units[0].Body.FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[0].Body.FormatDependentContents, 0x10);
layerbreak1 = value - offset + 2;
}
// Layerbreak 2 (3+ layers)
if (di.Units.Length >= 3)
{
long offset = ReadFromArrayBigEndian(di.Units[1].FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[1].FormatDependentContents, 0x10);
long offset = ReadFromArrayBigEndian(di.Units[1].Body.FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[1].Body.FormatDependentContents, 0x10);
layerbreak2 = layerbreak1 + value - offset + 2;
}
// Layerbreak 3 (4 layers)
if (di.Units.Length >= 4)
{
long offset = ReadFromArrayBigEndian(di.Units[2].FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[2].FormatDependentContents, 0x10);
long offset = ReadFromArrayBigEndian(di.Units[2].Body.FormatDependentContents, 0x0C);
long value = ReadFromArrayBigEndian(di.Units[2].Body.FormatDependentContents, 0x10);
layerbreak3 = layerbreak2 + value - offset + 2;
}
@@ -1461,14 +1409,14 @@ namespace MPF.Modules
/// </summary>
/// <param name="di">Disc information containing the data</param>
/// <returns>String representing the PIC identifier, null on error</returns>
protected static string GetPICIdentifier(PICDiscInformation di)
protected static string GetPICIdentifier(DiscInformation di)
{
// If we don't have valid disc information, we can't do anything
if (di?.Units == null || di.Units.Length <= 1)
return null;
// We assume the identifier is consistent across all units
return di.Units[0].DiscTypeIdentifier;
return di.Units[0].Body.DiscTypeIdentifier;
}
/// <summary>
@@ -1849,10 +1797,33 @@ namespace MPF.Modules
case 'A': return Region.Asia;
case 'C': return Region.China;
case 'E': return Region.Europe;
case 'J': return Region.JapanKorea;
case 'K': return Region.SouthKorea;
case 'P': return Region.Japan;
case 'U': return Region.UnitedStatesOfAmerica;
case 'P':
// Region of S_P_ serials may be Japan, Asia, or SouthKorea
switch (serial[3])
{
case 'S':
// Check first two digits of S_PS serial
switch (serial.Substring(5, 2))
{
case "46": return Region.SouthKorea;
case "56": return Region.SouthKorea;
case "51": return Region.Asia;
case "55": return Region.Asia;
default: return Region.Japan;
}
case 'M':
// Check first three digits of S_PM serial
switch (serial.Substring(5, 3))
{
case "645": return Region.SouthKorea;
case "675": return Region.SouthKorea;
case "885": return Region.SouthKorea;
default: return Region.Japan; // Remaining S_PM serials may be Japan or Asia
}
default: return Region.Japan;
}
}
}
@@ -1864,10 +1835,18 @@ namespace MPF.Modules
else if (serial.StartsWith("PABX"))
return null;
// Region appears entirely random
else if (serial.StartsWith("PBPX"))
return null;
// Japan-only special serial
else if (serial.StartsWith("PCBX"))
return Region.Japan;
// Japan-only special serial
else if (serial.StartsWith("PCXC"))
return Region.Japan;
// Single disc known, Japan
else if (serial.StartsWith("PDBX"))
return Region.Japan;
@@ -1876,6 +1855,10 @@ namespace MPF.Modules
else if (serial.StartsWith("PEBX"))
return Region.Europe;
// Single disc known, USA
else if (serial.StartsWith("PUBX"))
return Region.UnitedStatesOfAmerica;
return null;
}

View File

@@ -5,7 +5,7 @@ using System.Linq;
using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.CleanRip
{
@@ -64,7 +64,7 @@ namespace MPF.Modules.CleanRip
{
// TODO: Determine if there's a CleanRip version anywhere
info.DumpingInfo.DumpingProgram = EnumConverter.LongName(this.InternalProgram);
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + "-dumpinfo.txt")?.ToString("yyyy-MM-dd hh:mm:ss");
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + "-dumpinfo.txt")?.ToString("yyyy-MM-dd HH:mm:ss");
Datafile datafile = GenerateCleanripDatafile(basePath + ".iso", basePath + "-dumpinfo.txt");

View File

@@ -1,32 +0,0 @@
namespace MPF.Modules.DD
{
/// <summary>
/// Top-level commands for DD
/// </summary>
public static class CommandStrings
{
public const string NONE = "";
public const string List = "--list";
}
/// <summary>
/// Dumping flags for DD
/// </summary>
public static class FlagStrings
{
// Boolean flags
public const string Progress = "--progress";
public const string Size = "--size";
// Int64 flags
public const string BlockSize = "bs";
public const string Count = "count";
public const string Seek = "seek";
public const string Skip = "skip";
// String flags
public const string Filter = "--filter";
public const string InputFile = "if";
public const string OutputFile = "of";
}
}

View File

@@ -1,22 +0,0 @@
using RedumpLib.Data;
namespace MPF.Modules.DD
{
public static class Converters
{
#region Cross-enumeration conversions
/// <summary>
/// Get the default extension for a given disc type
/// </summary>
/// <param name="type">MediaType value to check</param>
/// <returns>Valid extension (with leading '.'), null on error</returns>
public static string Extension(MediaType? type)
{
// DD has a single, unified output format by default
return ".bin";
}
#endregion
}
}

View File

@@ -1,417 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
namespace MPF.Modules.DD
{
/// <summary>
/// Represents a generic set of DD parameters
/// </summary>
public class Parameters : BaseParameters
{
#region Generic Dumping Information
/// <inheritdoc/>
public override string InputPath => InputFileValue?.TrimStart('\\', '?');
/// <inheritdoc/>
public override string OutputPath => OutputFileValue;
/// <inheritdoc/>
/// <inheritdoc/>
public override int? Speed
{
get { return 1; }
set { }
}
#endregion
#region Metadata
/// <inheritdoc/>
public override InternalProgram InternalProgram => InternalProgram.DD;
#endregion
#region Flag Values
public long? BlockSizeValue { get; set; }
public long? CountValue { get; set; }
// fixed, removable, disk, partition
public string FilterValue { get; set; }
public string InputFileValue { get; set; }
public string OutputFileValue { get; set; }
public long? SeekValue { get; set; }
public long? SkipValue { get; set; }
#endregion
/// <inheritdoc/>
public Parameters(string parameters) : base(parameters) { }
/// <inheritdoc/>
public Parameters(RedumpSystem? system, MediaType? type, char driveLetter, string filename, int? driveSpeed, Options options)
: base(system, type, driveLetter, filename, driveSpeed, options)
{
}
#region BaseParameters Implementations
/// <inheritdoc/>
public override (bool, List<string>) CheckAllOutputFilesExist(string basePath, bool preCheck)
{
// TODO: Figure out what sort of output files are expected... just `.bin`?
return (true, new List<string>());
}
/// <inheritdoc/>
public override void GenerateSubmissionInfo(SubmissionInfo info, Options options, string basePath, Drive drive, bool includeArtifacts)
{
// TODO: Fill in submission info specifics for DD
string outputDirectory = Path.GetDirectoryName(basePath);
// TODO: Determine if there's a DD version anywhere
info.DumpingInfo.DumpingProgram = EnumConverter.LongName(this.InternalProgram);
info.DumpingInfo.DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd hh:mm:ss");
switch (this.Type)
{
// Determine type-specific differences
}
switch (this.System)
{
case RedumpSystem.KonamiPython2:
if (GetPlayStationExecutableInfo(drive?.Letter, out string pythonTwoSerial, out Region? pythonTwoRegion, out string pythonTwoDate))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = pythonTwoSerial ?? string.Empty;
info.CommonDiscInfo.Region = info.CommonDiscInfo.Region ?? pythonTwoRegion;
info.CommonDiscInfo.EXEDateBuildDate = pythonTwoDate;
}
info.VersionAndEditions.Version = GetPlayStation2Version(drive?.Letter) ?? string.Empty;
break;
case RedumpSystem.SonyPlayStation:
if (GetPlayStationExecutableInfo(drive?.Letter, out string playstationSerial, out Region? playstationRegion, out string playstationDate))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = playstationSerial ?? string.Empty;
info.CommonDiscInfo.Region = info.CommonDiscInfo.Region ?? playstationRegion;
info.CommonDiscInfo.EXEDateBuildDate = playstationDate;
}
break;
case RedumpSystem.SonyPlayStation2:
if (GetPlayStationExecutableInfo(drive?.Letter, out string playstationTwoSerial, out Region? playstationTwoRegion, out string playstationTwoDate))
{
// Ensure internal serial is pulled from local data
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = playstationTwoSerial ?? string.Empty;
info.CommonDiscInfo.Region = info.CommonDiscInfo.Region ?? playstationTwoRegion;
info.CommonDiscInfo.EXEDateBuildDate = playstationTwoDate;
}
info.VersionAndEditions.Version = GetPlayStation2Version(drive?.Letter) ?? string.Empty;
break;
case RedumpSystem.SonyPlayStation3:
info.VersionAndEditions.Version = GetPlayStation3Version(drive?.Letter) ?? string.Empty;
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = GetPlayStation3Serial(drive?.Letter) ?? string.Empty;
break;
case RedumpSystem.SonyPlayStation4:
info.VersionAndEditions.Version = GetPlayStation4Version(drive?.Letter) ?? string.Empty;
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = GetPlayStation4Serial(drive?.Letter) ?? string.Empty;
break;
case RedumpSystem.SonyPlayStation5:
info.VersionAndEditions.Version = GetPlayStation5Version(drive?.Letter) ?? string.Empty;
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.InternalSerialName] = GetPlayStation5Serial(drive?.Letter) ?? string.Empty;
break;
}
}
/// <inheritdoc/>
public override string GenerateParameters()
{
List<string> parameters = new List<string>();
if (BaseCommand == null)
BaseCommand = CommandStrings.NONE;
if (!string.IsNullOrEmpty(BaseCommand))
parameters.Add(BaseCommand);
#region Boolean flags
// Progress
if (IsFlagSupported(FlagStrings.Progress))
{
if (this[FlagStrings.Progress] == true)
parameters.Add($"{FlagStrings.Progress}");
}
// Size
if (IsFlagSupported(FlagStrings.Size))
{
if (this[FlagStrings.Size] == true)
parameters.Add($"{FlagStrings.Size}");
}
#endregion
#region Int64 flags
// Block Size
if (IsFlagSupported(FlagStrings.BlockSize))
{
if (this[FlagStrings.BlockSize] == true && BlockSizeValue != null)
parameters.Add($"{FlagStrings.BlockSize}={BlockSizeValue}");
}
// Count
if (IsFlagSupported(FlagStrings.Count))
{
if (this[FlagStrings.Count] == true && CountValue != null)
parameters.Add($"{FlagStrings.Count}={CountValue}");
}
// Seek
if (IsFlagSupported(FlagStrings.Seek))
{
if (this[FlagStrings.Seek] == true && SeekValue != null)
parameters.Add($"{FlagStrings.Seek}={SeekValue}");
}
// Skip
if (IsFlagSupported(FlagStrings.Skip))
{
if (this[FlagStrings.Skip] == true && SkipValue != null)
parameters.Add($"{FlagStrings.Skip}={SkipValue}");
}
#endregion
#region String flags
// Filter
if (IsFlagSupported(FlagStrings.Filter))
{
if (this[FlagStrings.Filter] == true && FilterValue != null)
parameters.Add($"{FlagStrings.Filter}={FilterValue}");
}
// Input File
if (IsFlagSupported(FlagStrings.InputFile))
{
if (this[FlagStrings.InputFile] == true && InputFileValue != null)
parameters.Add($"{FlagStrings.InputFile}=\"{InputFileValue}\"");
else
return null;
}
// Output File
if (IsFlagSupported(FlagStrings.OutputFile))
{
if (this[FlagStrings.OutputFile] == true && OutputFileValue != null)
parameters.Add($"{FlagStrings.OutputFile}=\"{OutputFileValue}\"");
else
return null;
}
#endregion
return string.Join(" ", parameters);
}
/// <inheritdoc/>
public override Dictionary<string, List<string>> GetCommandSupport()
{
return new Dictionary<string, List<string>>()
{
[CommandStrings.NONE] = new List<string>()
{
FlagStrings.BlockSize,
FlagStrings.Count,
FlagStrings.Filter,
FlagStrings.InputFile,
FlagStrings.OutputFile,
FlagStrings.Progress,
FlagStrings.Seek,
FlagStrings.Size,
FlagStrings.Skip,
},
[CommandStrings.List] = new List<string>()
{
},
};
}
/// <inheritdoc/>
public override string GetDefaultExtension(MediaType? mediaType) => Converters.Extension(mediaType);
/// <inheritdoc/>
public override bool IsDumpingCommand()
{
switch (this.BaseCommand)
{
case CommandStrings.List:
return false;
default:
return true;
}
}
/// <inheritdoc/>
protected override void ResetValues()
{
BaseCommand = CommandStrings.NONE;
flags = new Dictionary<string, bool?>();
BlockSizeValue = null;
CountValue = null;
InputFileValue = null;
OutputFileValue = null;
SeekValue = null;
SkipValue = null;
}
/// <inheritdoc/>
protected override void SetDefaultParameters(char driveLetter, string filename, int? driveSpeed, Options options)
{
BaseCommand = CommandStrings.NONE;
this[FlagStrings.InputFile] = true;
InputFileValue = $"\\\\?\\{driveLetter}:";
this[FlagStrings.OutputFile] = true;
OutputFileValue = filename;
// TODO: Add more common block sizes
this[FlagStrings.BlockSize] = true;
switch (this.Type)
{
case MediaType.FloppyDisk:
BlockSizeValue = 1440 * 1024;
break;
default:
BlockSizeValue = 1024 * 1024 * 1024;
break;
}
this[FlagStrings.Progress] = true;
this[FlagStrings.Size] = true;
}
/// <inheritdoc/>
protected override bool ValidateAndSetParameters(string parameters)
{
BaseCommand = CommandStrings.NONE;
// The string has to be valid by itself first
if (string.IsNullOrWhiteSpace(parameters))
return false;
// Now split the string into parts for easier validation
// https://stackoverflow.com/questions/14655023/split-a-string-that-has-white-spaces-unless-they-are-enclosed-within-quotes
parameters = parameters.Trim();
List<string> parts = Regex.Matches(parameters, @"[\""].+?[\""]|[^ ]+")
.Cast<Match>()
.Select(m => m.Value)
.ToList();
// Determine what the commandline should look like given the first item
int start = 0;
if (parts[0] == CommandStrings.List)
{
BaseCommand = parts[0];
start = 1;
}
// Loop through all auxilary flags, if necessary
for (int i = start; i < parts.Count; i++)
{
// Flag read-out values
long? longValue = null;
string stringValue = null;
// Keep a count of keys to determine if we should break out to filename handling or not
int keyCount = Keys.Count();
#region Boolean flags
// Progress
ProcessFlagParameter(parts, FlagStrings.Progress, ref i);
// Size
ProcessFlagParameter(parts, FlagStrings.Size, ref i);
#endregion
#region Int64 flags
// Block Size
longValue = ProcessInt64Parameter(parts, FlagStrings.BlockSize, ref i);
if (longValue != null)
BlockSizeValue = longValue;
// Count
longValue = ProcessInt64Parameter(parts, FlagStrings.Count, ref i);
if (longValue != null)
CountValue = longValue;
// Seek
longValue = ProcessInt64Parameter(parts, FlagStrings.Seek, ref i);
if (longValue != null)
SeekValue = longValue;
// Skip
longValue = ProcessInt64Parameter(parts, FlagStrings.Skip, ref i);
if (longValue != null)
SkipValue = longValue;
#endregion
#region String flags
// Filter (fixed, removable, disk, partition)
stringValue = ProcessStringParameter(parts, FlagStrings.Filter, ref i);
if (!string.IsNullOrEmpty(stringValue))
FilterValue = stringValue;
// Input File
stringValue = ProcessStringParameter(parts, FlagStrings.InputFile, ref i);
if (!string.IsNullOrEmpty(stringValue))
InputFileValue = stringValue;
// Output File
stringValue = ProcessStringParameter(parts, FlagStrings.OutputFile, ref i);
if (!string.IsNullOrEmpty(stringValue))
OutputFileValue = stringValue;
#endregion
}
return true;
}
#endregion
}
}

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DiscImageCreator
{

View File

@@ -2,13 +2,12 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.CueSheets;
using RedumpLib.Data;
using SabreTools.Models.PIC;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DiscImageCreator
{
@@ -391,7 +390,7 @@ namespace MPF.Modules.DiscImageCreator
// Get the dumping program and version
(string dicCmd, string dicVersion) = GetCommandFilePathAndVersion(basePath);
info.DumpingInfo.DumpingProgram = $"{EnumConverter.LongName(this.InternalProgram)} {dicVersion ?? "Unknown Version"}";
info.DumpingInfo.DumpingDate = GetFileModifiedDate(dicCmd)?.ToString("yyyy-MM-dd hh:mm:ss");
info.DumpingInfo.DumpingDate = GetFileModifiedDate(dicCmd)?.ToString("yyyy-MM-dd HH:mm:ss");
// Fill in the hardware data
if (GetHardwareInfo($"{basePath}_drive.txt", out string manufacturer, out string model, out string firmware))
@@ -552,11 +551,11 @@ namespace MPF.Modules.DiscImageCreator
XgdInfo xgd1Info = new XgdInfo(xgd1XMID);
if (xgd1Info?.Initialized == true)
{
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.XMID] = xgd1Info.XMID;
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.XMID] = xgd1Info.RawXMID;
info.CommonDiscInfo.Serial = xgd1Info.GetSerial() ?? string.Empty;
if (!options.EnableRedumpCompatibility)
info.VersionAndEditions.Version = xgd1Info.GetVersion() ?? string.Empty;
info.CommonDiscInfo.Region = xgd1Info.InternalRegion;
info.CommonDiscInfo.Region = XgdInfo.GetRegion(xgd1Info.XMID.RegionIdentifier);
}
// If we have the new, external DAT
@@ -595,11 +594,11 @@ namespace MPF.Modules.DiscImageCreator
XgdInfo xgd23Info = new XgdInfo(xgd23XeMID);
if (xgd23Info?.Initialized == true)
{
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.XeMID] = xgd23Info.XMID;
info.CommonDiscInfo.CommentsSpecialFields[SiteCode.XeMID] = xgd23Info.RawXMID;
info.CommonDiscInfo.Serial = xgd23Info.GetSerial() ?? string.Empty;
if (!options.EnableRedumpCompatibility)
info.VersionAndEditions.Version = xgd23Info.GetVersion() ?? string.Empty;
info.CommonDiscInfo.Region = xgd23Info.InternalRegion;
info.CommonDiscInfo.Region = XgdInfo.GetRegion(xgd23Info.XeMID.RegionIdentifier);
}
// If we have the new, external DAT

View File

@@ -1,14 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
@@ -22,11 +20,12 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.CueSheets\MPF.CueSheets.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.2" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Redumper
{

View File

@@ -6,7 +6,7 @@ using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Redumper
{
@@ -247,7 +247,7 @@ namespace MPF.Modules.Redumper
{
// Get the dumping program and version
info.DumpingInfo.DumpingProgram = $"{EnumConverter.LongName(this.InternalProgram)} {GetVersion($"{basePath}.log") ?? "Unknown Version"}";
info.DumpingInfo.DumpingDate = GetFileModifiedDate($"{basePath}.log")?.ToString("yyyy-MM-dd hh:mm:ss");
info.DumpingInfo.DumpingDate = GetFileModifiedDate($"{basePath}.log")?.ToString("yyyy-MM-dd HH:mm:ss");
// Fill in the hardware data
if (GetHardwareInfo($"{basePath}.log", out string manufacturer, out string model, out string firmware))

View File

@@ -4,7 +4,7 @@ using System.IO;
using System.Linq;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.UmdImageCreator
{
@@ -65,7 +65,7 @@ namespace MPF.Modules.UmdImageCreator
{
// TODO: Determine if there's a UMDImageCreator version anywhere
info.DumpingInfo.DumpingProgram = EnumConverter.LongName(this.InternalProgram);
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + "_disc.txt")?.ToString("yyyy-MM-dd hh:mm:ss");
info.DumpingInfo.DumpingDate = GetFileModifiedDate(basePath + "_disc.txt")?.ToString("yyyy-MM-dd HH:mm:ss");
// Extract info based generically on MediaType
switch (this.Type)

View File

@@ -22,13 +22,13 @@ namespace MPF.Test.Core.Data
[InlineData("AV00100W\0", "AV", "001", "00", 'W')]
public void XGD1ValidTests(string validString, string publisher, string gameId, string version, char regionIdentifier)
{
XgdInfo xgdInfo = new XgdInfo(validString, validate: true);
XgdInfo xgdInfo = new XgdInfo(validString);
Assert.True(xgdInfo.Initialized);
Assert.Equal(publisher, xgdInfo.PublisherIdentifier);
Assert.Equal(gameId, xgdInfo.GameID);
Assert.Equal(version, xgdInfo.SKU);
Assert.Equal(regionIdentifier, xgdInfo.RegionIdentifier);
Assert.Equal(publisher, xgdInfo.XMID.PublisherIdentifier);
Assert.Equal(gameId, xgdInfo.XMID.GameID);
Assert.Equal(version, xgdInfo.XMID.VersionNumber);
Assert.Equal(regionIdentifier, xgdInfo.XMID.RegionIdentifier);
}
[Theory]
@@ -40,7 +40,7 @@ namespace MPF.Test.Core.Data
[InlineData("AV00000Z\0")]
public void XGD1InvalidTests(string invalidString)
{
XgdInfo xgdInfo = new XgdInfo(invalidString, validate: true);
XgdInfo xgdInfo = new XgdInfo(invalidString);
Assert.False(xgdInfo.Initialized);
}
@@ -55,18 +55,18 @@ namespace MPF.Test.Core.Data
[InlineData("AV200100W01F11DEADBEEF\0", "AV", "001", "00", 'W', "01", 'F', "11", "DEADBEEF")]
public void XGD23ValidTests(string validString, string publisher, string gameId, string sku, char regionIdentifier, string baseVersion, char mediaSubtype, string discNumber, string cert)
{
XgdInfo xgdInfo = new XgdInfo(validString, validate: true);
XgdInfo xgdInfo = new XgdInfo(validString);
Assert.True(xgdInfo.Initialized);
Assert.Equal(publisher, xgdInfo.PublisherIdentifier);
Assert.Equal('2', xgdInfo.PlatformIdentifier);
Assert.Equal(gameId, xgdInfo.GameID);
Assert.Equal(sku, xgdInfo.SKU);
Assert.Equal(regionIdentifier, xgdInfo.RegionIdentifier);
Assert.Equal(baseVersion, xgdInfo.BaseVersion);
Assert.Equal(mediaSubtype, xgdInfo.MediaSubtypeIdentifier);
Assert.Equal(discNumber, xgdInfo.DiscNumberIdentifier);
Assert.Equal(cert, xgdInfo.CertificationSubmissionIdentifier);
Assert.Equal(publisher, xgdInfo.XeMID.PublisherIdentifier);
Assert.Equal('2', xgdInfo.XeMID.PlatformIdentifier);
Assert.Equal(gameId, xgdInfo.XeMID.GameID);
Assert.Equal(sku, xgdInfo.XeMID.SKU);
Assert.Equal(regionIdentifier, xgdInfo.XeMID.RegionIdentifier);
Assert.Equal(baseVersion, xgdInfo.XeMID.BaseVersion);
Assert.Equal(mediaSubtype, xgdInfo.XeMID.MediaSubtypeIdentifier);
Assert.Equal(discNumber, xgdInfo.XeMID.DiscNumberIdentifier);
Assert.Equal(cert, xgdInfo.XeMID.CertificationSubmissionIdentifier);
}
[Theory]
@@ -108,7 +108,7 @@ namespace MPF.Test.Core.Data
[InlineData("AV200000W00A0000000000\0")]
public void XGD23InvalidTests(string invalidString)
{
XgdInfo xgdInfo = new XgdInfo(invalidString, validate: true);
XgdInfo xgdInfo = new XgdInfo(invalidString);
Assert.False(xgdInfo.Initialized);
}
}

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Core.Utilities
@@ -36,6 +36,7 @@ namespace MPF.Test.Core.Utilities
RedumpSystem.HasbroVideoNowColor,
RedumpSystem.HasbroVideoNowJr,
RedumpSystem.HasbroVideoNowXP,
RedumpSystem.PlayStationGameSharkUpdates,
RedumpSystem.PhilipsCDi,
RedumpSystem.SuperAudioCD,
};

View File

@@ -1,7 +1,7 @@
using System.IO;
using MPF.Core.Data;
using MPF.Library;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Library

View File

@@ -1,7 +1,7 @@
using System.Collections.Generic;
using System.IO;
using MPF.Library;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Library
@@ -76,13 +76,21 @@ namespace MPF.Test.Library
CommonDiscInfo = new CommonDiscInfoSection()
{
Comments = "This is a comments line\n[T:ISBN] ISBN Value",
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.VolumeLabel] = "VOLUME_LABEL",
},
Contents = "This is a contents line\n[T:GF] Game Footage",
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.Patches] = "1.04 patch",
},
@@ -132,13 +140,21 @@ namespace MPF.Test.Library
CommonDiscInfo = new CommonDiscInfoSection()
{
Comments = null,
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.VolumeLabel] = "VOLUME_LABEL",
},
Contents = null,
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.Patches] = "1.04 patch",
},

View File

@@ -178,43 +178,6 @@ namespace MPF.Test.Library
Assert.Equal("Anything Else Protection", sanitized);
}
[Theory]
[InlineData(0)]
[InlineData(1)]
[InlineData(2)]
[InlineData(3)]
[InlineData(4)]
[InlineData(5)]
[InlineData(6)]
[InlineData(7)]
[InlineData(8)]
public void SanitizeFoundProtectionsSafeDiscTest(int skip)
{
List<string> protections = new List<string>()
{
"SafeDisc 1.20.000",
"SafeDisc (drvmgt.dll) 1.2.0",
"SafeDisc (secdrv.sys) 1.2.0",
"SafeDisc (dplayerx.dll) 1.2.0",
"SafeDisc 3.20-4.xx (version removed)",
"SafeDisc 2",
"SafeDisc 1/Lite",
"SafeDisc Lite",
"SafeDisc 1-3",
"SafeDisc",
};
// Safeguard for the future
if (skip >= protections.Count)
throw new ArgumentException("Invalid skip value", nameof(skip));
// The list is in order of preference
protections = protections.Skip(skip).ToList();
string sanitized = Protection.SanitizeFoundProtections(protections);
Assert.Equal(protections[0], sanitized);
}
[Theory]
[InlineData(0)]
[InlineData(1)]

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0-windows</TargetFrameworks>
<TargetFrameworks>net48;net6.0-windows;net7.0-windows</TargetFrameworks>
<IsPackable>false</IsPackable>
</PropertyGroup>
@@ -30,13 +30,13 @@
<ProjectReference Include="..\MPF\MPF.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.Modules\MPF.Modules.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.6.3" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.6.3" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="xunit" Version="2.5.0" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.2.0" />

View File

@@ -1,7 +1,7 @@
using System.Collections.Generic;
using MPF.Core.Data;
using MPF.Modules.DiscImageCreator;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Modules

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Data;
using System.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.RedumpLib

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using Newtonsoft.Json;
using psxt001z;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.RedumpLib
@@ -82,12 +82,20 @@ namespace MPF.Test.RedumpLib
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
@@ -161,7 +169,7 @@ namespace MPF.Test.RedumpLib
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd hh:mm:ss"),
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",

View File

@@ -1,6 +1,6 @@
using System.Linq;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Data

View File

@@ -3,9 +3,9 @@ using System.Collections.Generic;
using System.Linq;
using MPF.Core.Utilities;
using MPF.UI.Core.ComboBoxItems;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF
namespace MPF.UI.Core.ComboBoxItems
{
/// <summary>
/// Represents a single item in the System combo box

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Linq;
using System.Windows.Media;
namespace MPF
namespace MPF.UI.Core
{
/// <summary>
/// Variables for UI elements

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<PropertyGroup>
<TargetFrameworks>net48;net6.0-windows</TargetFrameworks>
<TargetFrameworks>net48;net6.0-windows;net7.0-windows</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64</RuntimeIdentifiers>
<UseWindowsForms>true</UseWindowsForms>
<UseWPF>true</UseWPF>
@@ -9,9 +9,7 @@
<AssemblyName>MPF.UI.Core</AssemblyName>
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
@@ -23,13 +21,6 @@
<UserSecretsId>27abb4ca-bf7a-431e-932f-49153303d5ff</UserSecretsId>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Resource Include="Images\ring-code-guide-1-layer.png" />
<Resource Include="Images\ring-code-guide-2-layer.png" />
@@ -37,13 +28,21 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Folder Include="Images\" />
</ItemGroup>

View File

@@ -1,9 +1,8 @@
<UserControl x:Class="MPF.UserControls.LogOutput"
<UserControl x:Class="MPF.UI.Core.UserControls.LogOutput"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:local="clr-namespace:MPF.UserControls"
mc:Ignorable="d"
d:DesignHeight="450" d:DesignWidth="800">
<Grid>

View File

@@ -1,7 +1,7 @@
using System.Windows.Controls;
using MPF.UI.ViewModels;
using MPF.UI.Core.ViewModels;
namespace MPF.UserControls
namespace MPF.UI.Core.UserControls
{
public partial class LogOutput : UserControl
{

View File

@@ -5,7 +5,7 @@ using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.UI.Core.ComboBoxItems;
using MPF.UI.Core.Windows;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.UI.Core.ViewModels
{
@@ -395,6 +395,8 @@ namespace MPF.UI.Core.ViewModels
// Physical Identifiers
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields.ContainsKey(SiteCode.BBFCRegistrationNumber))
Parent.BBFCRegistrationNumberTextBox.Text = SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.BBFCRegistrationNumber];
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields.ContainsKey(SiteCode.CDProjektID))
Parent.CDProjektIDTextBox.Text = SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.CDProjektID];
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields.ContainsKey(SiteCode.DiscHologramID))
Parent.DiscHologramIDTextBox.Text = SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.DiscHologramID];
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields.ContainsKey(SiteCode.DNASDiscID))
@@ -534,9 +536,17 @@ namespace MPF.UI.Core.ViewModels
// Initialize the dictionaries, if needed
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields == null)
#if NET48
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields = new Dictionary<SiteCode?, string>();
#else
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields = new Dictionary<SiteCode, string>();
#endif
if (SubmissionInfo.CommonDiscInfo.ContentsSpecialFields == null)
#if NET48
SubmissionInfo.CommonDiscInfo.ContentsSpecialFields = new Dictionary<SiteCode?, string>();
#else
SubmissionInfo.CommonDiscInfo.ContentsSpecialFields = new Dictionary<SiteCode, string>();
#endif
#region Comment Fields
@@ -550,6 +560,7 @@ namespace MPF.UI.Core.ViewModels
// Physical Identifiers
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.BBFCRegistrationNumber] = Parent.BBFCRegistrationNumberTextBox.Text;
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.CDProjektID] = Parent.CDProjektIDTextBox.Text;
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.DiscHologramID] = Parent.DiscHologramIDTextBox.Text;
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.DNASDiscID] = Parent.DNASDiscIDTextBox.Text;
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields[SiteCode.ISBN] = Parent.ISBNTextBox.Text;
@@ -798,7 +809,7 @@ namespace MPF.UI.Core.ViewModels
}
}
#endregion
#endregion
#region Event Handlers

View File

@@ -0,0 +1,293 @@
using System;
using System.IO;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Media;
using MPF.Core.Data;
using MPF.UI.Core.UserControls;
namespace MPF.UI.Core.ViewModels
{
public class LogViewModel
{
#region Fields
/// <summary>
/// Parent OptionsWindow object
/// </summary>
public LogOutput Parent { get; private set; }
#endregion
#region Private State Variables
/// <summary>
/// Paragraph backing the log
/// </summary>
private readonly Paragraph _paragraph;
/// <summary>
/// Cached value of the last line written
/// </summary>
private Run lastLine = null;
/// <summary>
/// Queue of items that need to be logged
/// </summary>
private readonly ProcessingQueue<LogLine> logQueue;
#endregion
/// <summary>
/// Constructor
/// </summary>
public LogViewModel(LogOutput parent)
{
Parent = parent;
// Add handlers
Parent.OutputViewer.SizeChanged += OutputViewerSizeChanged;
Parent.Output.TextChanged += OnTextChanged;
Parent.ClearButton.Click += OnClearButton;
Parent.SaveButton.Click += OnSaveButton;
// Update the internal state
var document = new FlowDocument()
{
Background = new SolidColorBrush(Color.FromArgb(0xFF, 0x20, 0x20, 0x20))
};
_paragraph = new Paragraph();
document.Blocks.Add(_paragraph);
Parent.Output.Document = document;
logQueue = new ProcessingQueue<LogLine>(ProcessLogLine);
}
#region Logging
/// <summary>
/// Log level for output
/// </summary>
public enum LogLevel
{
USER,
VERBOSE,
ERROR,
SECRET,
}
/// <summary>
/// Log line wrapper
/// </summary>
private struct LogLine
{
public readonly string Text;
public readonly LogLevel LogLevel;
public LogLine(string text, LogLevel logLevel)
{
this.Text = text;
this.LogLevel = logLevel;
}
/// <summary>
/// Get the foreground Brush for the current LogLevel
/// </summary>
/// <returns>Brush representing the color</returns>
public Brush GetForegroundColor()
{
switch (this.LogLevel)
{
case LogLevel.SECRET:
return Brushes.Blue;
case LogLevel.ERROR:
return Brushes.Red;
case LogLevel.VERBOSE:
return Brushes.Yellow;
case LogLevel.USER:
default:
return Brushes.White;
}
}
/// <summary>
/// Generate a Run object from the current LogLine
/// </summary>
/// <returns>Run object based on internal values</returns>
public Run GenerateRun()
{
return new Run { Text = this.Text, Foreground = GetForegroundColor() };
}
}
/// <summary>
/// Enqueue text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void Log(string text) => LogInternal(text, LogLevel.USER);
/// <summary>
/// Enqueue text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void LogLn(string text) => Log(text + "\n");
/// <summary>
/// Enqueue error text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void ErrorLog(string text) => LogInternal(text, LogLevel.ERROR);
/// <summary>
/// Enqueue error text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void ErrorLogLn(string text) => ErrorLog(text + "\n");
/// <summary>
/// Enqueue secret text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void SecretLog(string text) => LogInternal(text, LogLevel.SECRET);
/// <summary>
/// Enqueue secret text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void SecretLogLn(string text) => SecretLog(text + "\n");
/// <summary>
/// Enqueue verbose text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void VerboseLog(string text) => LogInternal(text, LogLevel.VERBOSE);
/// <summary>
/// Enqueue verbose text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void VerboseLogLn(string text) => VerboseLog(text + "\n");
/// <summary>
/// Reset the progress bar state
/// </summary>
public void ResetProgressBar()
{
Parent.Dispatcher.Invoke(() =>
{
Parent.ProgressBar.Value = 0;
Parent.ProgressLabel.Text = string.Empty;
});
}
/// <summary>
/// Enqueue text to the log with formatting
/// </summary>
/// <param name="text">Text to write to the log</param>
/// <param name="logLevel">LogLevel for the log</param>
private void LogInternal(string text, LogLevel logLevel)
{
// Null text gets ignored
if (text == null)
return;
// Enqueue the text
logQueue.Enqueue(new LogLine(text, logLevel));
}
/// <summary>
/// Process the log lines in the queue
/// </summary>
/// <param name="nextLogLine">LogLine item to process</param>
private void ProcessLogLine(LogLine nextLogLine)
{
// Null text gets ignored
string nextText = Parent.Dispatcher.Invoke(() => nextLogLine.Text);
if (nextText == null)
return;
try
{
if (nextText.StartsWith("\r"))
ReplaceLastLine(nextLogLine);
else
AppendToTextBox(nextLogLine);
}
catch (Exception ex)
{
// In the event that something fails horribly, we want to log
AppendToTextBox(new LogLine(ex.ToString(), LogLevel.ERROR));
}
}
/// <summary>
/// Append log line to the log text box
/// </summary>
/// <param name="logLine">LogLine value to append</param>
private void AppendToTextBox(LogLine logLine)
{
Parent.Dispatcher.Invoke(() =>
{
var run = logLine.GenerateRun();
_paragraph.Inlines.Add(run);
lastLine = run;
});
}
/// <summary>
/// Replace the last line written to the log text box
/// </summary>
/// <param name="logLine">LogLine value to append</param>
private void ReplaceLastLine(LogLine logLine)
{
Parent.Dispatcher.Invoke(() =>
{
lastLine.Text = logLine.Text;
lastLine.Foreground = logLine.GetForegroundColor();
});
}
#endregion
#region Helpers
/// <summary>
/// Scroll the current view to the bottom
/// </summary>
public void ScrollToBottom()
{
Parent.OutputViewer.ScrollToBottom();
}
#endregion
#region EventHandlers
private void OnClearButton(object sender, EventArgs e)
{
_paragraph.Inlines.Clear();
ResetProgressBar();
}
private void OnSaveButton(object sender, EventArgs e)
{
using (StreamWriter tw = new StreamWriter(File.OpenWrite("console.log")))
{
foreach (var inline in _paragraph.Inlines)
{
if (inline is Run run)
tw.Write(run.Text);
}
}
}
private void OnTextChanged(object sender, TextChangedEventArgs e) =>
ScrollToBottom();
private void OutputViewerSizeChanged(object sender, SizeChangedEventArgs e) =>
ScrollToBottom();
#endregion
}
}

View File

@@ -7,11 +7,11 @@ using System.Windows;
using System.Windows.Forms;
using MPF.Core.Data;
using MPF.UI.Core.ComboBoxItems;
using MPF.Windows;
using RedumpLib.Web;
using MPF.UI.Core.Windows;
using SabreTools.RedumpLib.Web;
using WPFCustomMessageBox;
namespace MPF.UI.ViewModels
namespace MPF.UI.Core.ViewModels
{
public class OptionsViewModel
{
@@ -51,15 +51,14 @@ namespace MPF.UI.ViewModels
/// <summary>
/// Constructor
/// </summary>
public OptionsViewModel(OptionsWindow parent)
public OptionsViewModel(OptionsWindow parent, Options baseOptions)
{
Parent = parent;
Options = App.Options.Clone() as Options;
Options = new Options(baseOptions);
// Add handlers
Parent.AaruPathButton.Click += BrowseForPathClick;
Parent.DiscImageCreatorPathButton.Click += BrowseForPathClick;
Parent.DDPathButton.Click += BrowseForPathClick;
Parent.DefaultOutputPathButton.Click += BrowseForPathClick;
Parent.AcceptButton.Click += OnAcceptClick;
@@ -105,7 +104,7 @@ namespace MPF.UI.ViewModels
/// </summary>
private static List<Element<InternalProgram>> PopulateInternalPrograms()
{
var internalPrograms = new List<InternalProgram> { InternalProgram.DiscImageCreator, InternalProgram.Aaru, InternalProgram.Redumper, InternalProgram.DD };
var internalPrograms = new List<InternalProgram> { InternalProgram.DiscImageCreator, InternalProgram.Aaru, InternalProgram.Redumper };
return internalPrograms.Select(ip => new Element<InternalProgram>(ip)).ToList();
}
@@ -188,13 +187,13 @@ namespace MPF.UI.ViewModels
/// <summary>
/// Test Redump login credentials
/// </summary>
#if NET48 || NETSTANDARD2_1
#if NET48
private bool? TestRedumpLogin()
#else
private async Task<bool?> TestRedumpLogin()
#endif
{
#if NET48 || NETSTANDARD2_1
#if NET48
(bool? success, string message) = RedumpWebClient.ValidateCredentials(Parent.RedumpUsernameTextBox.Text, Parent.RedumpPasswordBox.Password);
#else
(bool? success, string message) = await RedumpHttpClient.ValidateCredentials(Parent.RedumpUsernameTextBox.Text, Parent.RedumpPasswordBox.Password);
@@ -265,7 +264,7 @@ namespace MPF.UI.ViewModels
/// <summary>
/// Test Redump credentials for validity
/// </summary>
#if NET48 || NETSTANDARD2_1
#if NET48
private void OnRedumpTestClick(object sender, EventArgs e) =>
TestRedumpLogin();
#else

View File

@@ -194,6 +194,7 @@
HorizontalAlignment="Stretch" VerticalAlignment="Stretch">
<StackPanel Orientation="Vertical">
<controls:UserInput x:Name="BBFCRegistrationNumberTextBox" Label="BBFC Reg. No."/>
<controls:UserInput x:Name="CDProjektIDTextBox" Label="CD Projekt ID"/>
<controls:UserInput x:Name="DiscHologramIDTextBox" Label="Disc Hologram ID"/>
<controls:UserInput x:Name="DNASDiscIDTextBox" Label="DNAS Disc ID"/>
<controls:UserInput x:Name="ISBNTextBox" Label="ISBN"/>

View File

@@ -1,6 +1,6 @@
using MPF.Core.Data;
using MPF.UI.Core.ViewModels;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.UI.Core.Windows
{

View File

@@ -1,11 +1,11 @@
<coreWindows:WindowBase x:Class="MPF.Windows.MainWindow"
<coreWindows:WindowBase x:Class="MPF.UI.Core.Windows.MainWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:controls="clr-namespace:MPF.UserControls"
xmlns:coreWindows="clr-namespace:MPF.UI.Core.Windows;assembly=MPF.UI.Core"
xmlns:viewModels="clr-namespace:MPF.UI.ViewModels"
xmlns:controls="clr-namespace:MPF.UI.Core.UserControls"
xmlns:coreWindows="clr-namespace:MPF.UI.Core.Windows"
xmlns:viewModels="clr-namespace:MPF.UI.Core.ViewModels"
mc:Ignorable="d"
Title="Media Preservation Frontend" Width="600" WindowStyle="None"
WindowStartupLocation="CenterScreen" ResizeMode="CanMinimize" SizeToContent="Height"

View File

@@ -1,8 +1,7 @@
using System;
using MPF.UI.Core.Windows;
using MPF.UI.ViewModels;
using MPF.UI.Core.ViewModels;
namespace MPF.Windows
namespace MPF.UI.Core.Windows
{
public partial class MainWindow : WindowBase
{
@@ -22,7 +21,7 @@ namespace MPF.Windows
protected override void OnContentRendered(EventArgs e)
{
base.OnContentRendered(e);
MainViewModel.Init();
MainViewModel.Init(this);
}
}
}

View File

@@ -1,10 +1,10 @@
<coreWindows:WindowBase x:Class="MPF.Windows.OptionsWindow"
<coreWindows:WindowBase x:Class="MPF.UI.Core.Windows.OptionsWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:local="clr-namespace:MPF"
xmlns:coreWindows="clr-namespace:MPF.UI.Core.Windows;assembly=MPF.UI.Core"
xmlns:core="clr-namespace:MPF.UI.Core"
xmlns:coreWindows="clr-namespace:MPF.UI.Core.Windows"
mc:Ignorable="d"
Title="Options" Width="515.132" WindowStyle="None"
WindowStartupLocation="CenterOwner" ResizeMode="CanMinimize" SizeToContent="Height"
@@ -108,23 +108,18 @@
<Button x:Name="DiscImageCreatorPathButton" Grid.Row="1" Grid.Column="2" Height="22" Width="22" Content="..."
Style="{DynamicResource CustomButtonStyle}" />
<Label Grid.Row="2" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="DD Path" />
<TextBox x:Name="DDPathTextBox" Grid.Row="2" Grid.Column="1" Height="22" HorizontalAlignment="Stretch" Text="{Binding Options.DDPath}" VerticalContentAlignment="Center" />
<Button x:Name="DDPathButton" Grid.Row="2" Grid.Column="2" Height="22" Width="22" Content="..."
<Label Grid.Row="2" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Redumper Path" />
<TextBox x:Name="RedumperPathTextBox" Grid.Row="2" Grid.Column="1" Height="22" HorizontalAlignment="Stretch" Text="{Binding Options.RedumperPath}" VerticalContentAlignment="Center" />
<Button x:Name="RedumperPathButton" Grid.Row="2" Grid.Column="2" Height="22" Width="22" Content="..."
Style="{DynamicResource CustomButtonStyle}" />
<Label Grid.Row="3" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Redumper Path" />
<TextBox x:Name="RedumperPathTextBox" Grid.Row="3" Grid.Column="1" Height="22" HorizontalAlignment="Stretch" Text="{Binding Options.RedumperPath}" VerticalContentAlignment="Center" />
<Button x:Name="RedumperPathButton" Grid.Row="3" Grid.Column="2" Height="22" Width="22" Content="..."
Style="{DynamicResource CustomButtonStyle}" />
<Label Grid.Row="4" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Default Dumping Program" />
<ComboBox x:Name="InternalProgramComboBox" Grid.Row="4" Grid.Column="1" Height="22" HorizontalAlignment="Stretch"
<Label Grid.Row="3" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Default Dumping Program" />
<ComboBox x:Name="InternalProgramComboBox" Grid.Row="3" Grid.Column="1" Height="22" HorizontalAlignment="Stretch"
ItemsSource="{Binding InternalPrograms}" Style="{DynamicResource CustomComboBoxStyle}" />
<Label Grid.Row="5" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Default Output Path" />
<TextBox x:Name="DefaultOutputPathTextBox" Grid.Row="5" Grid.Column="1" Height="22" HorizontalAlignment="Stretch" Text="{Binding Options.DefaultOutputPath}" VerticalContentAlignment="Center" />
<Button x:Name="DefaultOutputPathButton" Grid.Row="5" Grid.Column="2" Height="22" Width="22" Content="..."
<Label Grid.Row="4" Grid.Column="0" VerticalAlignment="Center" HorizontalAlignment="Right" Content="Default Output Path" />
<TextBox x:Name="DefaultOutputPathTextBox" Grid.Row="4" Grid.Column="1" Height="22" HorizontalAlignment="Stretch" Text="{Binding Options.DefaultOutputPath}" VerticalContentAlignment="Center" />
<Button x:Name="DefaultOutputPathButton" Grid.Row="4" Grid.Column="2" Height="22" Width="22" Content="..."
Style="{DynamicResource CustomButtonStyle}" />
</Grid>
</TabItem>
@@ -240,28 +235,28 @@
<Label Grid.Row="0" Grid.Column="0" Content="CD" />
<Slider x:Name="DumpSpeedCDSlider" Grid.Row="0" Grid.Column="1" Minimum="1" Maximum="72" IsSnapToTickEnabled="True" TickPlacement="BottomRight"
Ticks="{Binding Source={x:Static local:Constants.SpeedsForCDAsCollection}}"
Ticks="{Binding Source={x:Static core:Constants.SpeedsForCDAsCollection}}"
Value="{Binding Options.PreferredDumpSpeedCD}" />
<TextBox x:Name="DumpSpeedCDTextBox" Grid.Row="0" Grid.Column="2" Width="22" Height="22" TextAlignment="Center" IsReadOnly="True" IsReadOnlyCaretVisible="False" VerticalAlignment="Center"
Text="{Binding ElementName=DumpSpeedCDSlider, Path=Value, UpdateSourceTrigger=PropertyChanged}" Background="LightGray" Foreground="Gray"/>
<Label Grid.Row="1" Grid.Column="0" Content="DVD" />
<Slider x:Name="DumpSpeedDVDSlider" Grid.Row="1" Grid.Column="1" Minimum="1" Maximum="24" IsSnapToTickEnabled="True" TickPlacement="BottomRight"
Ticks="{Binding Source={x:Static local:Constants.SpeedsForDVDAsCollection}}"
Ticks="{Binding Source={x:Static core:Constants.SpeedsForDVDAsCollection}}"
Value="{Binding Options.PreferredDumpSpeedDVD}" />
<TextBox x:Name="DumpSpeedDVDTextBox" Grid.Row="1" Grid.Column="2" Width="22" Height="22" TextAlignment="Center" IsReadOnly="True" IsReadOnlyCaretVisible="False" VerticalAlignment="Center"
Text="{Binding ElementName=DumpSpeedDVDSlider, Path=Value, UpdateSourceTrigger=PropertyChanged}" Background="LightGray" Foreground="Gray"/>
<Label Grid.Row="2" Grid.Column="0" Content="HD-DVD" />
<Slider x:Name="DumpSpeedHDDVDSlider" Grid.Row="2" Grid.Column="1" Minimum="1" Maximum="24" IsSnapToTickEnabled="True" TickPlacement="BottomRight"
Ticks="{Binding Source={x:Static local:Constants.SpeedsForHDDVDAsCollection}}"
Ticks="{Binding Source={x:Static core:Constants.SpeedsForHDDVDAsCollection}}"
Value="{Binding Options.PreferredDumpSpeedHDDVD}" />
<TextBox x:Name="DumpSpeedHDDVDTextBox" Grid.Row="2" Grid.Column="2" Width="22" Height="22" TextAlignment="Center" IsReadOnly="True" IsReadOnlyCaretVisible="False" VerticalAlignment="Center"
Text="{Binding ElementName=DumpSpeedHDDVDSlider, Path=Value, UpdateSourceTrigger=PropertyChanged}" Background="LightGray" Foreground="Gray"/>
<Label Grid.Row="3" Grid.Column="0" Content="BD" />
<Slider x:Name="DumpSpeedBDSlider" Grid.Row="3" Grid.Column="1" Minimum="1" Maximum="16" IsSnapToTickEnabled="True" TickPlacement="BottomRight"
Ticks="{Binding Source={x:Static local:Constants.SpeedsForBDAsCollection}}"
Ticks="{Binding Source={x:Static core:Constants.SpeedsForBDAsCollection}}"
Value="{Binding Options.PreferredDumpSpeedBD}" />
<TextBox x:Name="DumpSpeedBDTextBox" Grid.Row="3" Grid.Column="2" Width="22" Height="22" TextAlignment="Center" IsReadOnly="True" IsReadOnlyCaretVisible="False" VerticalAlignment="Center"
Text="{Binding ElementName=DumpSpeedBDSlider, Path=Value, UpdateSourceTrigger=PropertyChanged}" Background="LightGray" Foreground="Gray"/>
@@ -400,16 +395,6 @@
IsChecked="{Binding Options.OpenLogWindowAtStartup}"
ToolTip="Open the log panel when the program launches" Margin="0,4"
/>
<CheckBox VerticalAlignment="Center" Content="Enable Log Formatting"
IsChecked="{Binding Options.EnableLogFormatting}"
ToolTip="Format log lines written to the log, including overwriting previous lines on match. Disable this if you have a weaker system as there is an increased processing load otherwise." Margin="0,4"
/>
<CheckBox VerticalAlignment="Center" Content="Enable Progress Processing"
IsChecked="{Binding Options.EnableProgressProcessing}" IsEnabled="{Binding Options.EnableLogFormatting}"
ToolTip="Process lines written to the log to update the progress bar. Disable this if you have a weaker system as there is an increased processing load otherwise." Margin="0,4"
/>
</UniformGrid>
</TabItem>

View File

@@ -1,7 +1,7 @@
using MPF.UI.Core.Windows;
using MPF.UI.ViewModels;
using MPF.Core.Data;
using MPF.UI.Core.ViewModels;
namespace MPF.Windows
namespace MPF.UI.Core.Windows
{
/// <summary>
/// Interaction logic for OptionsWindow.xaml
@@ -16,10 +16,10 @@ namespace MPF.Windows
/// <summary>
/// Constructor
/// </summary>
public OptionsWindow()
public OptionsWindow(Options options)
{
InitializeComponent();
DataContext = new OptionsViewModel(this);
DataContext = new OptionsViewModel(this, options);
#if NET6_0_OR_GREATER
this.SkipMediaTypeDetectionCheckBox.IsEnabled = false;

13
MPF.sln
View File

@@ -23,10 +23,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
publish-win.bat = publish-win.bat
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "RedumpLib", "RedumpLib\RedumpLib.csproj", "{13574913-A426-4644-9955-F49AD0876E5F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MPF.CueSheets", "MPF.CueSheets\MPF.CueSheets.csproj", "{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MPF.Core", "MPF.Core\MPF.Core.csproj", "{70B1265D-FE49-472A-A83D-0B462152D37A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MPF.Modules", "MPF.Modules\MPF.Modules.csproj", "{8A4254BD-552F-4238-B8EB-D59AACD768B9}"
@@ -57,14 +53,6 @@ Global
{8CFDE289-E171-4D49-A40D-5293265C1253}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8CFDE289-E171-4D49-A40D-5293265C1253}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8CFDE289-E171-4D49-A40D-5293265C1253}.Release|Any CPU.Build.0 = Release|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Release|Any CPU.Build.0 = Release|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Release|Any CPU.Build.0 = Release|Any CPU
{70B1265D-FE49-472A-A83D-0B462152D37A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{70B1265D-FE49-472A-A83D-0B462152D37A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{70B1265D-FE49-472A-A83D-0B462152D37A}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -86,7 +74,6 @@ Global
{7CC064D2-38AB-4A05-8519-28660DE4562A} = {4160167D-681D-480B-ABC6-06AC869E5769}
{51AB0928-13F9-44BF-A407-B6957A43A056} = {4160167D-681D-480B-ABC6-06AC869E5769}
{8CFDE289-E171-4D49-A40D-5293265C1253} = {4160167D-681D-480B-ABC6-06AC869E5769}
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1} = {4160167D-681D-480B-ABC6-06AC869E5769}
{70B1265D-FE49-472A-A83D-0B462152D37A} = {4160167D-681D-480B-ABC6-06AC869E5769}
{8A4254BD-552F-4238-B8EB-D59AACD768B9} = {4160167D-681D-480B-ABC6-06AC869E5769}
{EA3768DB-694A-4653-82E4-9FF71B8963F3} = {4160167D-681D-480B-ABC6-06AC869E5769}

View File

@@ -2,7 +2,7 @@
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:Themes="clr-namespace:Microsoft.Windows.Themes;assembly=PresentationFramework.Aero"
xmlns:windows="clr-namespace:MPF.Windows"
xmlns:windows="clr-namespace:MPF.UI.Core.Windows;assembly=MPF.UI.Core"
x:Class="MPF.App">
<Application.MainWindow>
<windows:MainWindow Visibility="Visible"/>

View File

@@ -1,8 +1,4 @@
using System.Windows;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.UI.ViewModels;
using MPF.Windows;
namespace MPF
{
@@ -16,46 +12,5 @@ namespace MPF
/// </remarks>
public partial class App : Application
{
/// <summary>
/// Static application instance for reference
/// </summary>
private static App _appInstance;
/// <summary>
/// Read-only access to the current main window
/// </summary>
public static MainWindow Instance => _appInstance.MainWindow as MainWindow;
/// <summary>
/// Read-only access to the current log window
/// </summary>
public static LogViewModel Logger => Instance.LogOutput.LogViewModel;
/// <summary>
/// Access to the current options
/// </summary>
public static Options Options
{
get => _options;
set
{
_options = value;
OptionsLoader.SaveToConfig(_options);
}
}
/// <summary>
/// Internal reference to Options
/// </summary>
private static Options _options;
/// <summary>
/// Constructor
/// </summary>
public App()
{
_appInstance = this;
_options = OptionsLoader.LoadFromConfig();
}
}
}

View File

@@ -1,4 +0,0 @@
using System.Runtime.CompilerServices;
// Anything marked as internal can be used by the test methods
[assembly: InternalsVisibleTo("MPF.Test")]

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<PropertyGroup>
<TargetFrameworks>net48;net6.0-windows</TargetFrameworks>
<TargetFrameworks>net48;net6.0-windows;net7.0-windows</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64</RuntimeIdentifiers>
<OutputType>WinExe</OutputType>
<UseWindowsForms>true</UseWindowsForms>
@@ -13,11 +13,10 @@
<Authors>Matt Nadareski;ReignStumble;Jakz</Authors>
<Copyright>Copyright (c)2019-2023</Copyright>
<RepositoryUrl>https://github.com/SabreTools/MPF</RepositoryUrl>
<Version>2.6.3</Version>
<AssemblyVersion>$(Version)</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<Version>2.6.4</Version>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
<InternalsVisibleTo>MPF.Test</InternalsVisibleTo>
</PropertyGroup>
<PropertyGroup>
@@ -26,10 +25,25 @@
<NrtShowRevision>false</NrtShowRevision>
</PropertyGroup>
<ItemGroup>
<Resource Include="Images\Icon.ico" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.UI.Core\MPF.UI.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BurnOutSharp" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="2.8.0" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="7.0.0" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
@@ -41,19 +55,4 @@
<Content Include="$(PkgBurnOutSharp)\content\**" PackagePath="contentFiles\any\any;content" CopyToOutputDirectory="Always" PackageCopyToOutput="true" />
</ItemGroup>
<ItemGroup>
<Resource Include="Images\Icon.ico" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.UI.Core\MPF.UI.Core.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
</Project>

View File

@@ -1,577 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Media;
using MPF.Core.Data;
using MPF.UserControls;
namespace MPF.UI.ViewModels
{
public class LogViewModel
{
#region Fields
/// <summary>
/// Parent OptionsWindow object
/// </summary>
public LogOutput Parent { get; private set; }
#endregion
#region Private State Variables
/// <summary>
/// Paragraph backing the log
/// </summary>
private readonly Paragraph _paragraph;
/// <summary>
/// Cached value of the last line written
/// </summary>
private Run lastLine = null;
/// <summary>
/// Queue of items that need to be logged
/// </summary>
private readonly ProcessingQueue<LogLine> logQueue;
/// <summary>
/// List of Matchers for progress tracking
/// </summary>
private readonly List<Matcher?> _matchers;
/// <summary>
/// Cached value of the last matcher used
/// </summary>
private Matcher? lastUsedMatcher = null;
/// <summary>
/// Regex pattern to find DiscImageCreator progress messages
/// </summary>
private const string DiscImageCreatorProgressPattern = @"\s*(\d+)\/\s*(\d+)$";
#endregion
/// <summary>
/// Constructor
/// </summary>
public LogViewModel(LogOutput parent)
{
Parent = parent;
// Add handlers
Parent.OutputViewer.SizeChanged += OutputViewerSizeChanged;
Parent.Output.TextChanged += OnTextChanged;
Parent.ClearButton.Click += OnClearButton;
Parent.SaveButton.Click += OnSaveButton;
// Update the internal state
var document = new FlowDocument()
{
Background = new SolidColorBrush(Color.FromArgb(0xFF, 0x20, 0x20, 0x20))
};
_paragraph = new Paragraph();
document.Blocks.Add(_paragraph);
Parent.Output.Document = document;
// TODO: Can we dynamically add matchers *only* during dumping?
_matchers = new List<Matcher?>();
AddAaruMatchers();
AddDiscImageCreatorMatchers();
logQueue = new ProcessingQueue<LogLine>(ProcessLogLine);
}
#region Matching
/// <summary>
/// Matching wrapper
/// </summary>
private struct Matcher
{
private readonly string prefix;
private readonly Regex regex;
private readonly int start;
private readonly string progressBarText;
private readonly Action<Match, string> lambda;
public Matcher(string prefix, string regex, string progressBarText, Action<Match, string> lambda)
{
this.prefix = prefix;
this.regex = new Regex(regex);
this.start = prefix.Length;
this.progressBarText = progressBarText;
this.lambda = lambda;
}
/// <summary>
/// Check if the text matches the prefix
/// </summary>
/// <param name="text">Text to check</param>
/// <returns>True if the line starts with the prefix, false otherwise</returns>
public bool Matches(string text) => text.StartsWith(prefix);
/// <summary>
/// Generate a Match and apply the lambda
/// </summary>
/// <param name="text">Text to match and apply from</param>
public void Apply(string text)
{
Match match = regex?.Match(text, start);
lambda?.Invoke(match, progressBarText);
}
}
/// <summary>
/// Add all Matchers for Aaru
/// </summary>
private void AddAaruMatchers()
{
// TODO: Determine matchers that can be added
}
/// <summary>
/// Add all Matchers for DiscImageCreator
/// </summary>
private void AddDiscImageCreatorMatchers()
{
#region Pre-dump Checking
_matchers.Add(new Matcher(
"Checking EXE",
DiscImageCreatorProgressPattern,
"Checking executables...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Checking Pregap sync, msf, mode (LBA)",
@"\s*-(\d+)$",
"Checking Pregap sync, msf, mode",
(match, text) =>
{
Parent.ProgressBar.Value = 0;
Parent.ProgressLabel.Text = text;
}));
_matchers.Add(new Matcher(
"Checking SubQ adr (Track)",
DiscImageCreatorProgressPattern,
"Checking SubQ adr...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Checking SubQ ctl (Track)",
DiscImageCreatorProgressPattern,
"Checking SubQ ctl...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Checking SubRtoW (Track)",
DiscImageCreatorProgressPattern,
"Checking SubRtoW...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Reading DirectoryRecord",
DiscImageCreatorProgressPattern,
"Reading directory records...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Scanning sector for anti-mod string (LBA)",
DiscImageCreatorProgressPattern,
"Scanning sectors for anti-mod string...",
StandardDiscImageCreatorProgress
));
#endregion
#region Dumping
_matchers.Add(new Matcher(
@"Creating iso(LBA)",
DiscImageCreatorProgressPattern,
"Creating ISO...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
@"Creating .scm (LBA)",
DiscImageCreatorProgressPattern,
"Creating scrambled image...",
StandardDiscImageCreatorProgress
));
#endregion
#region Post-Dump Processing
_matchers.Add(new Matcher(
"Checking sectors:",
DiscImageCreatorProgressPattern,
"Checking for errors...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Creating bin (Track)",
DiscImageCreatorProgressPattern,
"Creating BIN(s)...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Creating cue and ccd (Track)",
DiscImageCreatorProgressPattern,
"Creating CUE and CCD...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Descrambling data sector of img:",
DiscImageCreatorProgressPattern,
"Descrambling image...",
StandardDiscImageCreatorProgress
));
_matchers.Add(new Matcher(
"Scanning sector (LBA)",
DiscImageCreatorProgressPattern,
"Scanning sectors for protection...",
StandardDiscImageCreatorProgress
));
#endregion
}
#endregion
#region Logging
/// <summary>
/// Log level for output
/// </summary>
public enum LogLevel
{
USER,
VERBOSE,
ERROR,
SECRET,
}
/// <summary>
/// Log line wrapper
/// </summary>
private struct LogLine
{
public readonly string Text;
public readonly LogLevel LogLevel;
public LogLine(string text, LogLevel logLevel)
{
this.Text = text;
this.LogLevel = logLevel;
}
/// <summary>
/// Get the foreground Brush for the current LogLevel
/// </summary>
/// <returns>Brush representing the color</returns>
public Brush GetForegroundColor()
{
switch (this.LogLevel)
{
case LogLevel.SECRET:
return Brushes.Blue;
case LogLevel.ERROR:
return Brushes.Red;
case LogLevel.VERBOSE:
return Brushes.Yellow;
case LogLevel.USER:
default:
return Brushes.White;
}
}
/// <summary>
/// Generate a Run object from the current LogLine
/// </summary>
/// <returns>Run object based on internal values</returns>
public Run GenerateRun()
{
return new Run { Text = this.Text, Foreground = GetForegroundColor() };
}
}
/// <summary>
/// Enqueue text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void Log(string text) => LogInternal(text);
/// <summary>
/// Enqueue text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void LogLn(string text) => Log(text + "\n");
/// <summary>
/// Enqueue error text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void ErrorLog(string text) => LogInternal(text, LogViewModel.LogLevel.ERROR);
/// <summary>
/// Enqueue error text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void ErrorLogLn(string text) => ErrorLog(text + "\n");
/// <summary>
/// Enqueue secret text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void SecretLog(string text) => LogInternal(text, LogViewModel.LogLevel.SECRET);
/// <summary>
/// Enqueue secret text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void SecretLogLn(string text) => SecretLog(text + "\n");
/// <summary>
/// Enqueue verbose text to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void VerboseLog(string text) => LogInternal(text, LogViewModel.LogLevel.VERBOSE);
/// <summary>
/// Enqueue verbose text with a newline to the log
/// </summary>
/// <param name="text">Text to write to the log</param>
public void VerboseLogLn(string text) => VerboseLog(text + "\n");
/// <summary>
/// Reset the progress bar state
/// </summary>
public void ResetProgressBar()
{
Parent.Dispatcher.Invoke(() =>
{
Parent.ProgressBar.Value = 0;
Parent.ProgressLabel.Text = string.Empty;
});
}
/// <summary>
/// Enqueue text to the log with formatting
/// </summary>
/// <param name="text">Text to write to the log</param>
/// <param name="logLevel">LogLevel for the log, defaults to USER</param>
private void LogInternal(string text, LogLevel logLevel = LogLevel.USER)
{
// Null text gets ignored
if (text == null)
return;
// If we have verbose logs but not enabled, ignore
if (logLevel == LogLevel.VERBOSE && !App.Options.VerboseLogging)
return;
// Enqueue the text
logQueue.Enqueue(new LogLine(text, logLevel));
}
/// <summary>
/// Process the log lines in the queue
/// </summary>
/// <param name="nextLogLine">LogLine item to process</param>
private void ProcessLogLine(LogLine nextLogLine)
{
// Null text gets ignored
string nextText = Parent.Dispatcher.Invoke(() => nextLogLine.Text);
if (nextText == null)
return;
try
{
// If we're not processing log formatting, just append and continue
if (!App.Options.EnableLogFormatting)
{
if (nextText.StartsWith("\r"))
ReplaceLastLine(nextLogLine);
else
AppendToTextBox(nextLogLine);
return;
}
// Get last line
lastLine = lastLine ?? GetLastLine();
// Always append if there's no previous line
if (lastLine == null)
{
AppendToTextBox(nextLogLine);
lastUsedMatcher = _matchers.FirstOrDefault(m => m?.Matches(nextText) == true);
}
// Return always means overwrite
else if (nextText.StartsWith("\r"))
{
ReplaceLastLine(nextLogLine);
lastUsedMatcher = _matchers.FirstOrDefault(m => m?.Matches(nextText.TrimStart('\r')) == true);
}
// If we have a cached matcher and we match
else if (lastUsedMatcher?.Matches(nextText) == true)
{
ReplaceLastLine(nextLogLine);
}
else
{
// Get the first matching Matcher
var firstMatcher = _matchers.FirstOrDefault(m => m?.Matches(nextText) == true);
if (firstMatcher.HasValue)
{
string lastText = Parent.Dispatcher.Invoke(() => { return lastLine.Text; });
if (firstMatcher.Value.Matches(lastText))
ReplaceLastLine(nextLogLine);
else if (string.IsNullOrWhiteSpace(lastText))
ReplaceLastLine(nextLogLine);
else
AppendToTextBox(nextLogLine);
// Cache the last used Matcher
lastUsedMatcher = firstMatcher;
}
// Default case for all other text
else
{
AppendToTextBox(nextLogLine);
lastUsedMatcher = null;
}
}
// Update the bar if needed
if (App.Options.EnableProgressProcessing)
ProcessStringForProgressBar(nextText, lastUsedMatcher);
}
catch (Exception ex)
{
// In the event that something fails horribly, we want to log
AppendToTextBox(new LogLine(ex.ToString(), LogLevel.ERROR));
}
}
/// <summary>
/// Append log line to the log text box
/// </summary>
/// <param name="logLine">LogLine value to append</param>
private void AppendToTextBox(LogLine logLine)
{
Parent.Dispatcher.Invoke(() =>
{
var run = logLine.GenerateRun();
_paragraph.Inlines.Add(run);
lastLine = run;
});
}
/// <summary>
/// Get the last line written to the log text box
/// </summary>
private Run GetLastLine()
{
return Parent.Dispatcher.Invoke(() =>
{
if (!_paragraph.Inlines.Any())
return null;
return _paragraph.Inlines.LastInline as Run;
});
}
/// <summary>
/// Process text if it should update the progress bar
/// </summary>
/// <param name="text">Text to check and update with</param>
private void ProcessStringForProgressBar(string text, Matcher? matcher)
{
Parent.Dispatcher.Invoke(() => { matcher?.Apply(text); });
}
/// <summary>
/// Replace the last line written to the log text box
/// </summary>
/// <param name="logLine">LogLine value to append</param>
private void ReplaceLastLine(LogLine logLine)
{
Parent.Dispatcher.Invoke(() =>
{
lastLine.Text = logLine.Text;
lastLine.Foreground = logLine.GetForegroundColor();
});
}
#endregion
#region Helpers
/// <summary>
/// Scroll the current view to the bottom
/// </summary>
public void ScrollToBottom()
{
Parent.OutputViewer.ScrollToBottom();
}
#endregion
#region EventHandlers
private void OnClearButton(object sender, EventArgs e)
{
_paragraph.Inlines.Clear();
ResetProgressBar();
}
private void OnSaveButton(object sender, EventArgs e)
{
using (StreamWriter tw = new StreamWriter(File.OpenWrite("console.log")))
{
foreach (var inline in _paragraph.Inlines)
{
if (inline is Run run)
tw.Write(run.Text);
}
}
}
private void OnTextChanged(object sender, TextChangedEventArgs e) =>
ScrollToBottom();
private void OutputViewerSizeChanged(object sender, SizeChangedEventArgs e) =>
ScrollToBottom();
private void StandardDiscImageCreatorProgress(Match match, string text)
{
if (uint.TryParse(match.Groups[1].Value, out uint current) && uint.TryParse(match.Groups[2].Value, out uint total))
{
float percentProgress = (current / (float)total) * 100;
Parent.ProgressBar.Value = percentProgress;
Parent.ProgressLabel.Text = string.Format($"{text} ({percentProgress:N2}%)");
}
}
#endregion
}
}

View File

@@ -1,6 +1,6 @@
# Media Preservation Frontend (MPF)
DiscImageCreator/Aaru UI in C#
Redumper/Aaru/DiscImageCreator UI in C#
[![Build status](https://ci.appveyor.com/api/projects/status/3ldav3v0c373jeqa?svg=true)](https://ci.appveyor.com/project/mnadareski/MPF/build/artifacts)
@@ -15,7 +15,7 @@ For those who like to test the newest features, download the latest AppVeyor WIP
## Media Preservation Frontend (MPF)
MPF is the main, UI-centric application of the MPF suite. This program allows users to use DiscImageCreator, Aaru, Redumper, or dd for Windows in a more user-friendly way. Each backend dumping program is supported as fully as possible to ensure that all information is captured on output. There are many customization options and quality of life settings that can be access through the Options menu.
MPF is the main, UI-centric application of the MPF suite. This program allows users to use Redumper, Aaru, DiscImageCreator, or dd for Windows in a more user-friendly way. Each backend dumping program is supported as fully as possible to ensure that all information is captured on output. There are many customization options and quality of life settings that can be access through the Options menu.
### System Requirements
@@ -61,7 +61,7 @@ Choose one of `[win7-x64|win8-x64|win81-x64|win10-x64]` depending on the machine
## Media Preservation Frontend Checker (MPF.Check)
MPF.Check is a commandline-only program that allows users to generate submission information from their personal rips. This program supports the outputs from DiscImageCreator, Aaru, Redumper, dd for Windows, Cleanrip, and UmdImageCreator. Running this program without any parameters will display the help text, including all supported parameters.
MPF.Check is a commandline-only program that allows users to generate submission information from their personal rips. This program supports the outputs from Redumper, Aaru, DiscImageCreator, dd for Windows, Cleanrip, and UmdImageCreator. Running this program without any parameters will display the help text, including all supported parameters.
### System Requirements
@@ -103,28 +103,38 @@ MPF uses some external libraries to assist with additional information gathering
## Contributors
Here are the talented people who have contributed to the project so far:
Here are the talented people who have contributed to the project so far in ways that GitHub doesn't like to track:
- **darksabre76** - Project Lead / Backend Design / UI Maintenence
- **ReignStumble** - Former Project Lead / UI Design
- **Jakz** - Primary Feature Contributor
- **NHellFire** - Feature Contributor
- **Shadów** - UI Support
For all others who have contributed in some way, please see [here](https://github.com/SabreTools/MPF/graphs/contributors).
## Notable Testers
These are the tireless individuals who have dedicated countless hours to help test the many features of MPF and have worked with the development team closely:
- **Dizzzy/user7** - Additonal thanks for the original concept
- [**ajshell1**](https://github.com/ajshell1)
- [**Billy**](https://github.com/InternalLoss)
- [**David 'Foxhack' Silva**](https://github.com/FoxhackDN)
- [**ehw**](https://github.com/ehw)
- [**fuzzball**](https://github.com/fuzz6001)
- [**Gameboi64**](https://github.com/gameboi64)
- [**Intothisworld**](https://github.com/Intothisworld)
- [**John Veness**](https://github.com/JohnVeness)
- **Kludge**
- **ajshell1**
- **Whovian**
- **Gameboi64**
- **silasqwerty**
- [**Matt Sephton**](https://github.com/gingerbeardman)
- [**NightsoN Blaze**](https://github.com/nightson)
- [**NovaSAurora**](https://github.com/NovaSAurora)
- [**Seventy7**](https://github.com/7Seventy7) - Additonal thanks for the original concept
- [**Silent**](https://github.com/CookiePLMonster)
- [**Terry Janas**](https://github.com/tjanas)
- [**TheRogueArchivist**](https://github.com/TheRogueArchivist)
- [**Whovian9369**](https://github.com/Whovian9369)
## Community Shout-Outs
Thanks to these communities for their use, testing, and feedback. I can't even hope to be able to thank everyone individually.
- **VGPC Discord** - Fast feedback and a lot of testing
- **Redump Community** - Near-daily use to assist with metadata gathering
- [**VGPC Discord**](https://discord.gg/AHTfxQV) - Fast feedback and a lot of testing
- [**Redump Community**](http://redump.org/) - Near-daily use to assist with metadata gathering

View File

@@ -1,48 +0,0 @@
using System;
using System.Linq;
namespace RedumpLib.Attributes
{
public static class AttributeHelper<T>
{
/// <summary>
/// Get the HumanReadableAttribute from a supported value
/// </summary>
/// <param name="value">Value to use</param>
/// <returns>HumanReadableAttribute attached to the value</returns>
public static HumanReadableAttribute GetAttribute(T value)
{
// Null value in, null value out
if (value == null)
return null;
// Current enumeration type
var enumType = typeof(T);
if (Nullable.GetUnderlyingType(enumType) != null)
enumType = Nullable.GetUnderlyingType(enumType);
// If the value returns a null on ToString, just return null
string valueStr = value.ToString();
if (string.IsNullOrWhiteSpace(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
return null;
// Return the first attribute, if possible
return (HumanReadableAttribute)attributes.FirstOrDefault();
}
}
}

View File

@@ -1,25 +0,0 @@
using System;
namespace RedumpLib.Attributes
{
/// <summary>
/// Generic attribute for human readable values
/// </summary>
public class HumanReadableAttribute : Attribute
{
/// <summary>
/// Item is marked as obsolete or unusable
/// </summary>
public bool Available { get; set; } = true;
/// <summary>
/// Human-readable name of the item
/// </summary>
public string LongName { get; set; }
/// <summary>
/// Internally used name of the item
/// </summary>
public string ShortName { get; set; }
}
}

View File

@@ -1,26 +0,0 @@
namespace RedumpLib.Attributes
{
/// <summary>
/// Attribute specifc to Language values
/// </summary>
/// <remarks>
/// Some languages have multiple proper names. Should all be supported?
/// </remarks>
public class LanguageAttribute : HumanReadableAttribute
{
/// <summary>
/// ISO 639-1 Code
/// </summary>
public string TwoLetterCode { get; set; } = null;
/// <summary>
/// ISO 639-2 Code (Standard or Bibliographic)
/// </summary>
public string ThreeLetterCode { get; set; } = null;
/// <summary>
/// ISO 639-2 Code (Terminology)
/// </summary>
public string ThreeLetterCodeAlt { get; set; } = null;
}
}

View File

@@ -1,55 +0,0 @@
using RedumpLib.Data;
namespace RedumpLib.Attributes
{
/// <summary>
/// Attribute specifc to Redump System values
/// </summary>
public class SystemAttribute : HumanReadableAttribute
{
/// <summary>
/// Category for the system
/// </summary>
public SystemCategory Category { get; set; }
/// <summary>
/// System is restricted to dumpers
/// </summary>
public bool IsBanned { get; set; } = false;
/// <summary>
/// System has a CUE pack
/// </summary>
public bool HasCues { get; set; } = false;
/// <summary>
/// System has a DAT
/// </summary>
public bool HasDat { get; set; } = false;
/// <summary>
/// System has a decrypted keys pack
/// </summary>
public bool HasDkeys { get; set; } = false;
/// <summary>
/// System has a GDI pack
/// </summary>
public bool HasGdi { get; set; } = false;
/// <summary>
/// System has a keys pack
/// </summary>
public bool HasKeys { get; set; } = false;
/// <summary>
/// System has an LSD pack
/// </summary>
public bool HasLsd { get; set; } = false;
/// <summary>
/// System has an SBI pack
/// </summary>
public bool HasSbi { get; set; } = false;
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize DiscCategory enum values
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize DiscType enum values
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,32 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize Language enum values
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[] existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, Language?[] value, JsonSerializer serializer)
{
JArray array = new JArray();
foreach (var val in value)
{
JToken t = JToken.FromObject(val.ShortName() ?? string.Empty);
array.Add(t);
}
array.WriteTo(writer);
}
}
}

View File

@@ -1,32 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize LanguageSelection enum values
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[] existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[] value, JsonSerializer serializer)
{
JArray array = new JArray();
foreach (var val in value)
{
JToken t = JToken.FromObject(val.LongName() ?? string.Empty);
array.Add(t);
}
array.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize Region enum values
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.ShortName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize RedumpSystem enum values
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.ShortName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize YesNo enum values
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,306 +0,0 @@
using System.Text.RegularExpressions;
namespace RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new Regex(@"<tr><th>Added</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new Regex(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the BCA field on a disc page
/// </summary>
public static Regex BcaRegex = new Regex(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
+ "<tr><th>Row</th><th>Contents</th><th>ASCII</th></tr>"
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new Regex(@"<tr><th>Category</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new Regex(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new Regex(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new Regex(@"<a href=""/disc/(\d+)/"">");
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new Regex(@"\((.*?)\)");
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new Regex(@"<a href=""/discs/dumper/(.*?)/"">");
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new Regex(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new Regex(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new Regex(@"<h2>(.*?)</h2>");
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new Regex(@"<td class=""static"">full match ids: (.*?)</td>");
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new Regex(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new Regex(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new Regex(@"<tr><th>Media</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new Regex(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new Regex(@"<td class=""static"">partial match ids: (.*?)</td>");
/// <summary>
/// Regex matching the PVD field on a disc page
/// </summary>
public static Regex PvdRegex = new Regex(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
+ @"<tr><th>Record / Entry</th><th>Contents</th><th>Date</th><th>Time</th><th>GMT</th></tr>"
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new Regex(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new Regex(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new Regex(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new Regex(@"<h1>(.*?)</h1>");
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new Regex(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new Regex(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new Regex(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new Regex(@"<tr><th>Version</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new Regex(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
#endregion
#region URLs
/// <summary>
/// Redump disc page URL template
/// </summary>
public const string DiscPageUrl = @"http://redump.org/disc/{0}/";
/// <summary>
/// Redump last modified search URL template
/// </summary>
public const string LastModifiedUrl = @"http://redump.org/discs/sort/modified/dir/desc?page={0}";
/// <summary>
/// Redump login page URL template
/// </summary>
public const string LoginUrl = "http://forum.redump.org/login/";
/// <summary>
/// Redump CUE pack URL template
/// </summary>
public const string PackCuesUrl = @"http://redump.org/cues/{0}/";
/// <summary>
/// Redump DAT pack URL template
/// </summary>
public const string PackDatfileUrl = @"http://redump.org/datfile/{0}/";
/// <summary>
/// Redump DKEYS pack URL template
/// </summary>
public const string PackDkeysUrl = @"http://redump.org/dkeys/{0}/";
/// <summary>
/// Redump GDI pack URL template
/// </summary>
public const string PackGdiUrl = @"http://redump.org/gdi/{0}/";
/// <summary>
/// Redump KEYS pack URL template
/// </summary>
public const string PackKeysUrl = @"http://redump.org/keys/{0}/";
/// <summary>
/// Redump LSD pack URL template
/// </summary>
public const string PackLsdUrl = @"http://redump.org/lsd/{0}/";
/// <summary>
/// Redump SBI pack URL template
/// </summary>
public const string PackSbiUrl = @"http://redump.org/sbi/{0}/";
/// <summary>
/// Redump quicksearch URL template
/// </summary>
public const string QuickSearchUrl = @"http://redump.org/discs/quicksearch/{0}/?page={1}";
/// <summary>
/// Redump user dumps URL template
/// </summary>
public const string UserDumpsUrl = @"http://redump.org/discs/dumper/{0}/?page={1}";
/// <summary>
/// Redump last modified user dumps URL template
/// </summary>
public const string UserDumpsLastModifiedUrl = @"http://redump.org/discs/sort/modified/dir/desc/dumper/{0}?page={1}";
/// <summary>
/// Redump WIP disc page URL template
/// </summary>
public const string WipDiscPageUrl = @"http://redump.org/newdisc/{0}/";
/// <summary>
/// Redump WIP dumps queue URL
/// </summary>
public const string WipDumpsUrl = @"http://redump.org/discs-wip/";
#endregion
#region URL Extensions
/// <summary>
/// Changes page subpath
/// </summary>
public const string ChangesExt = "changes/";
/// <summary>
/// Cuesheet download subpath
/// </summary>
public const string CueExt = "cue/";
/// <summary>
/// Edit page subpath
/// </summary>
public const string EditExt = "edit/";
/// <summary>
/// GDI download subpath
/// </summary>
public const string GdiExt = "gdi/";
/// <summary>
/// Key download subpath
/// </summary>
public const string KeyExt = "key/";
/// <summary>
/// LSD download subpath
/// </summary>
public const string LsdExt = "lsd/";
/// <summary>
/// MD5 download subpath
/// </summary>
public const string Md5Ext = "md5/";
/// <summary>
/// SBI download subpath
/// </summary>
public const string SbiExt = "sbi/";
/// <summary>
/// SFV download subpath
/// </summary>
public const string SfvExt = "sfv/";
/// <summary>
/// SHA1 download subpath
/// </summary>
public const string Sha1Ext = "sha1/";
#endregion
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,570 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using RedumpLib.Converters;
namespace RedumpLib.Data
{
public class SubmissionInfo : ICloneable
{
/// <summary>
/// Version of the current schema
/// </summary>
[JsonProperty(PropertyName = "schema_version", DefaultValueHandling = DefaultValueHandling.Ignore)]
public int SchemaVersion { get; set; } = 3;
/// <summary>
/// Fully matched Redump ID
/// </summary>
[JsonIgnore]
public int? FullyMatchedID { get; set; }
/// <summary>
/// List of partially matched Redump IDs
/// </summary>
[JsonIgnore]
public List<int> PartiallyMatchedIDs { get; set; }
/// <summary>
/// DateTime of when the disc was added
/// </summary>
[JsonIgnore]
public DateTime? Added { get; set; }
/// <summary>
/// DateTime of when the disc was last modified
/// </summary>
[JsonIgnore]
public DateTime? LastModified { get; set; }
[JsonProperty(PropertyName = "common_disc_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CommonDiscInfoSection CommonDiscInfo { get; set; } = new CommonDiscInfoSection();
[JsonProperty(PropertyName = "versions_and_editions", DefaultValueHandling = DefaultValueHandling.Ignore)]
public VersionAndEditionsSection VersionAndEditions { get; set; } = new VersionAndEditionsSection();
[JsonProperty(PropertyName = "edc", DefaultValueHandling = DefaultValueHandling.Ignore)]
public EDCSection EDC { get; set; } = new EDCSection();
[JsonProperty(PropertyName = "parent_clone_relationship", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ParentCloneRelationshipSection ParentCloneRelationship { get; set; } = new ParentCloneRelationshipSection();
[JsonProperty(PropertyName = "extras", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ExtrasSection Extras { get; set; } = new ExtrasSection();
[JsonProperty(PropertyName = "copy_protection", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CopyProtectionSection CopyProtection { get; set; } = new CopyProtectionSection();
[JsonProperty(PropertyName = "dumpers_and_status", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpersAndStatusSection DumpersAndStatus { get; set; } = new DumpersAndStatusSection();
[JsonProperty(PropertyName = "tracks_and_write_offsets", DefaultValueHandling = DefaultValueHandling.Ignore)]
public TracksAndWriteOffsetsSection TracksAndWriteOffsets { get; set; } = new TracksAndWriteOffsetsSection();
[JsonProperty(PropertyName = "size_and_checksums", DefaultValueHandling = DefaultValueHandling.Ignore)]
public SizeAndChecksumsSection SizeAndChecksums { get; set; } = new SizeAndChecksumsSection();
[JsonProperty(PropertyName = "dumping_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpingInfoSection DumpingInfo { get; set; } = new DumpingInfoSection();
[JsonProperty(PropertyName = "artifacts", DefaultValueHandling = DefaultValueHandling.Ignore)]
public Dictionary<string, string> Artifacts { get; set; } = new Dictionary<string, string>();
public object Clone()
{
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
FullyMatchedID = this.FullyMatchedID,
PartiallyMatchedIDs = this.PartiallyMatchedIDs,
Added = this.Added,
LastModified = this.LastModified,
CommonDiscInfo = this.CommonDiscInfo?.Clone() as CommonDiscInfoSection,
VersionAndEditions = this.VersionAndEditions?.Clone() as VersionAndEditionsSection,
EDC = this.EDC?.Clone() as EDCSection,
ParentCloneRelationship = this.ParentCloneRelationship?.Clone() as ParentCloneRelationshipSection,
Extras = this.Extras?.Clone() as ExtrasSection,
CopyProtection = this.CopyProtection?.Clone() as CopyProtectionSection,
DumpersAndStatus = this.DumpersAndStatus?.Clone() as DumpersAndStatusSection,
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Common disc info section of New Disc Form
/// </summary>
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
public string Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string ForeignTitleNonLatin { get; set; }
[JsonProperty(PropertyName = "d_number", NullValueHandling = NullValueHandling.Ignore)]
public string DiscNumberLetter { get; set; }
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[] Languages { get; set; }
[JsonProperty(PropertyName = "d_languages_selection", NullValueHandling = NullValueHandling.Ignore, DefaultValueHandling = DefaultValueHandling.Ignore)]
[JsonConverter(typeof(LanguageSelectionConverter))]
public LanguageSelection?[] LanguageSelection { get; set; }
[JsonProperty(PropertyName = "d_serial", NullValueHandling = NullValueHandling.Ignore)]
public string Serial { get; set; }
[JsonProperty(PropertyName = "d_ring", NullValueHandling = NullValueHandling.Ignore)]
public string Ring { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
public string Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts1", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
public string Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts2", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
public string Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer2MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
public string Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer3MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts4", NullValueHandling = NullValueHandling.Ignore)]
public string Layer3ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_offsets", NullValueHandling = NullValueHandling.Ignore)]
public string RingOffsetsHidden { get { return "1"; } }
[JsonProperty(PropertyName = "d_ring_0_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string RingZeroId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_density", NullValueHandling = NullValueHandling.Ignore)]
public string RingZeroDensity { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_value", NullValueHandling = NullValueHandling.Ignore)]
public string RingWriteOffset { get; set; }
[JsonProperty(PropertyName = "d_ring_count", NullValueHandling = NullValueHandling.Ignore)]
public string RingCount { get { return "1"; } }
[JsonProperty(PropertyName = "d_barcode", NullValueHandling = NullValueHandling.Ignore)]
public string Barcode { get; set; }
[JsonProperty(PropertyName = "d_date", NullValueHandling = NullValueHandling.Ignore)]
public string EXEDateBuildDate { get; set; }
[JsonProperty(PropertyName = "d_errors", NullValueHandling = NullValueHandling.Ignore)]
public string ErrorsCount { get; set; }
[JsonProperty(PropertyName = "d_comments", NullValueHandling = NullValueHandling.Ignore)]
public string Comments { get; set; }
[JsonIgnore]
public Dictionary<SiteCode?, string> CommentsSpecialFields { get; set; }
[JsonProperty(PropertyName = "d_contents", NullValueHandling = NullValueHandling.Ignore)]
public string Contents { get; set; }
[JsonIgnore]
public Dictionary<SiteCode?, string> ContentsSpecialFields { get; set; }
public object Clone()
{
return new CommonDiscInfoSection
{
System = this.System,
Media = this.Media,
Title = this.Title,
ForeignTitleNonLatin = this.ForeignTitleNonLatin,
DiscNumberLetter = this.DiscNumberLetter,
DiscTitle = this.DiscTitle,
Category = this.Category,
Region = this.Region,
Languages = this.Languages?.Clone() as Language?[],
LanguageSelection = this.LanguageSelection?.Clone() as LanguageSelection?[],
Serial = this.Serial,
Ring = this.Ring,
RingId = this.RingId,
Layer0MasteringRing = this.Layer0MasteringRing,
Layer0MasteringSID = this.Layer0MasteringSID,
Layer0ToolstampMasteringCode = this.Layer0ToolstampMasteringCode,
Layer0MouldSID = this.Layer0MouldSID,
Layer0AdditionalMould = this.Layer0AdditionalMould,
Layer1MasteringRing = this.Layer1MasteringRing,
Layer1MasteringSID = this.Layer1MasteringSID,
Layer1ToolstampMasteringCode = this.Layer1ToolstampMasteringCode,
Layer1MouldSID = this.Layer1MouldSID,
Layer1AdditionalMould = this.Layer1AdditionalMould,
Layer2MasteringRing = this.Layer2MasteringRing,
Layer2MasteringSID = this.Layer2MasteringSID,
Layer2ToolstampMasteringCode = this.Layer2ToolstampMasteringCode,
Layer3MasteringRing = this.Layer3MasteringRing,
Layer3MasteringSID = this.Layer3MasteringSID,
Layer3ToolstampMasteringCode = this.Layer3ToolstampMasteringCode,
RingZeroId = this.RingZeroId,
RingZeroDensity = this.RingZeroDensity,
RingWriteOffset = this.RingWriteOffset,
Barcode = this.Barcode,
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Version and editions section of New Disc form
/// </summary>
public class VersionAndEditionsSection : ICloneable
{
[JsonProperty(PropertyName = "d_version", NullValueHandling = NullValueHandling.Ignore)]
public string Version { get; set; }
[JsonProperty(PropertyName = "d_version_datfile", NullValueHandling = NullValueHandling.Ignore)]
public string VersionDatfile { get; set; }
[JsonProperty(PropertyName = "d_editions", NullValueHandling = NullValueHandling.Ignore)]
public string[] CommonEditions { get; set; }
[JsonProperty(PropertyName = "d_editions_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherEditions { get; set; }
public object Clone()
{
return new VersionAndEditionsSection
{
Version = this.Version,
VersionDatfile = this.VersionDatfile,
CommonEditions = this.CommonEditions,
OtherEditions = this.OtherEditions,
};
}
}
/// <summary>
/// EDC section of New Disc form (PSX only)
/// </summary>
public class EDCSection : ICloneable
{
[JsonProperty(PropertyName = "d_edc", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? EDC { get; set; }
public object Clone()
{
return new EDCSection
{
EDC = this.EDC,
};
}
}
/// <summary>
/// Parent/Clone relationship section of New Disc form
/// </summary>
public class ParentCloneRelationshipSection : ICloneable
{
[JsonProperty(PropertyName = "d_parent_id", NullValueHandling = NullValueHandling.Ignore)]
public string ParentID { get; set; }
[JsonProperty(PropertyName = "d_is_regional_parent", NullValueHandling = NullValueHandling.Ignore)]
public bool RegionalParent { get; set; }
public object Clone()
{
return new ParentCloneRelationshipSection
{
ParentID = this.ParentID,
RegionalParent = this.RegionalParent,
};
}
}
/// <summary>
/// Extras section of New Disc form
/// </summary>
public class ExtrasSection : ICloneable
{
[JsonProperty(PropertyName = "d_pvd", NullValueHandling = NullValueHandling.Ignore)]
public string PVD { get; set; }
[JsonProperty(PropertyName = "d_d1_key", NullValueHandling = NullValueHandling.Ignore)]
public string DiscKey { get; set; }
[JsonProperty(PropertyName = "d_d2_key", NullValueHandling = NullValueHandling.Ignore)]
public string DiscID { get; set; }
[JsonProperty(PropertyName = "d_pic_data", NullValueHandling = NullValueHandling.Ignore)]
public string PIC { get; set; }
[JsonProperty(PropertyName = "d_header", NullValueHandling = NullValueHandling.Ignore)]
public string Header { get; set; }
[JsonProperty(PropertyName = "d_bca", NullValueHandling = NullValueHandling.Ignore)]
public string BCA { get; set; }
[JsonProperty(PropertyName = "d_ssranges", NullValueHandling = NullValueHandling.Ignore)]
public string SecuritySectorRanges { get; set; }
public object Clone()
{
return new ExtrasSection
{
PVD = this.PVD,
DiscKey = this.DiscKey,
DiscID = this.DiscID,
PIC = this.PIC,
Header = this.Header,
BCA = this.BCA,
SecuritySectorRanges = this.SecuritySectorRanges,
};
}
}
/// <summary>
/// Copy protection section of New Disc form
/// </summary>
public class CopyProtectionSection : ICloneable
{
[JsonProperty(PropertyName = "d_protection_a", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? AntiModchip { get; set; }
[JsonProperty(PropertyName = "d_protection_1", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? LibCrypt { get; set; }
[JsonProperty(PropertyName = "d_libcrypt", NullValueHandling = NullValueHandling.Ignore)]
public string LibCryptData { get; set; }
[JsonProperty(PropertyName = "d_protection", NullValueHandling = NullValueHandling.Ignore)]
public string Protection { get; set; }
[JsonIgnore]
public Dictionary<string, List<string>> FullProtections { get; set; }
[JsonProperty(PropertyName = "d_securom", NullValueHandling = NullValueHandling.Ignore)]
public string SecuROMData { get; set; }
public object Clone()
{
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
SecuROMData = this.SecuROMData,
};
}
}
/// <summary>
/// Dumpers and status section of New Disc form (Moderator only)
/// </summary>
public class DumpersAndStatusSection : ICloneable
{
[JsonProperty(PropertyName = "d_status", NullValueHandling = NullValueHandling.Ignore)]
public DumpStatus Status { get; set; }
[JsonProperty(PropertyName = "d_dumpers", NullValueHandling = NullValueHandling.Ignore)]
public string[] Dumpers { get; set; }
[JsonProperty(PropertyName = "d_dumpers_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherDumpers { get; set; }
public object Clone()
{
return new DumpersAndStatusSection
{
Status = this.Status,
Dumpers = this.Dumpers?.Clone() as string[],
OtherDumpers = this.OtherDumpers,
};
}
}
/// <summary>
/// Tracks and write offsets section of New Disc form (CD/GD-based)
/// </summary>
public class TracksAndWriteOffsetsSection : ICloneable
{
[JsonProperty(PropertyName = "d_tracks", NullValueHandling = NullValueHandling.Ignore)]
public string ClrMameProData { get; set; }
[JsonProperty(PropertyName = "d_cue", NullValueHandling = NullValueHandling.Ignore)]
public string Cuesheet { get; set; }
[JsonProperty(PropertyName = "d_offset", NullValueHandling = NullValueHandling.Ignore)]
public int[] CommonWriteOffsets { get; set; }
[JsonProperty(PropertyName = "d_offset_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherWriteOffsets { get; set; }
public object Clone()
{
return new TracksAndWriteOffsetsSection
{
ClrMameProData = this.ClrMameProData,
Cuesheet = this.Cuesheet,
CommonWriteOffsets = this.CommonWriteOffsets?.Clone() as int[],
OtherWriteOffsets = this.OtherWriteOffsets,
};
}
}
/// <summary>
/// Size &amp; checksums section of New Disc form (DVD/BD/UMD-based)
/// </summary>
public class SizeAndChecksumsSection : ICloneable
{
[JsonProperty(PropertyName = "d_layerbreak", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_2", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak2 { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_3", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak3 { get; set; }
[JsonProperty(PropertyName = "d_pic_identifier", NullValueHandling = NullValueHandling.Ignore)]
public string PICIdentifier { get; set; }
[JsonProperty(PropertyName = "d_size", NullValueHandling = NullValueHandling.Ignore)]
public long Size { get; set; }
[JsonProperty(PropertyName = "d_crc32", NullValueHandling = NullValueHandling.Ignore)]
public string CRC32 { get; set; }
[JsonProperty(PropertyName = "d_md5", NullValueHandling = NullValueHandling.Ignore)]
public string MD5 { get; set; }
[JsonProperty(PropertyName = "d_sha1", NullValueHandling = NullValueHandling.Ignore)]
public string SHA1 { get; set; }
public object Clone()
{
return new SizeAndChecksumsSection
{
Layerbreak = this.Layerbreak,
Layerbreak2 = this.Layerbreak2,
Layerbreak3 = this.Layerbreak3,
PICIdentifier = this.PICIdentifier,
Size = this.Size,
CRC32 = this.CRC32,
MD5 = this.MD5,
SHA1 = this.SHA1,
};
}
}
/// <summary>
/// Dumping info section for moderation
/// </summary>
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
public string DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
public string DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
public string Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
public string Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
public string Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
public string ReportedDiscType { get; set; }
public object Clone()
{
return new DumpingInfoSection
{
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,
ReportedDiscType = this.ReportedDiscType,
};
}
}
}

View File

@@ -1,13 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup>
</Project>

View File

@@ -1,808 +0,0 @@
#if NET6_0
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using RedumpLib.Data;
namespace RedumpLib.Web
{
public class RedumpHttpClient : HttpClient
{
#region Properties
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
#endregion
/// <summary>
/// Constructor
/// </summary>
public RedumpHttpClient()
: base(new HttpClientHandler { UseCookies = true })
{
}
#region Credentials
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string)> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
using RedumpHttpClient httpClient = new();
bool? loggedIn = await httpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public async Task<bool?> Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
password = WebUtility.UrlEncode(password);
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = await GetStringAsync(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
var postContent = new StringContent($"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0", Encoding.UTF8);
postContent.Headers.ContentType = MediaTypeHeaderValue.Parse("application/x-www-form-urlencoded");
// Send the login request and get the result
var response = await PostAsync(Constants.LoginUrl, postContent);
string responseContent = await response?.Content?.ReadAsStringAsync();
if (string.IsNullOrWhiteSpace(responseContent))
{
Console.WriteLine($"An error occurred while trying to log in on attempt {i}: No response");
continue;
}
if (responseContent.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (responseContent.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#endregion
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url)
{
List<int> ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url)
{
List<int> ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public async Task<byte[]> DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return await GetByteArrayAsync(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadSinglePack(string url, RedumpSystem? system, string outDir, string subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
string packUri = string.Format(url, system.ShortName());
// Make the call to get the pack
string remoteFileName = await DownloadFile(packUri, tempfile);
MoveOrDelete(tempfile, remoteFileName, outDir, subfolder);
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public async Task<string> DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public async Task<bool> DownloadSingleSiteID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
_ = await DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public async Task<string> DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public async Task<bool> DownloadSingleWIPID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
byte[] pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="systems">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string outDir, string subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
await DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string> DownloadFile(string uri, string fileName)
{
// Make the call to get the file
var response = await GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="retries">Number of times to retry on error</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string> DownloadString(string uri, int retries = 3)
{
// Only retry a positive number of times
if (retries <= 0)
return null;
for (int i = 0; i < retries; i++)
{
try
{
return await GetStringAsync(uri);
}
catch { }
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string newfile, string outDir, string subfolder)
{
// If we don't have a file to move to, just delete the temp file
if (string.IsNullOrWhiteSpace(newfile))
{
File.Delete(tempfile);
return;
}
// If we have a subfolder, create it and update the newfile name
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
// If the file already exists, don't overwrite it
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
#endregion
}
}
#endif

View File

@@ -1,837 +0,0 @@
#if NET48 || NETSTANDARD2_1
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using RedumpLib.Data;
namespace RedumpLib.Web
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
public class RedumpWebClient : WebClient
{
private readonly CookieContainer m_container = new CookieContainer();
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
/// <returns></returns>
public string GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrWhiteSpace(headerValue))
return null;
// Extract the filename from the value
#if NETSTANDARD2_1
return headerValue[(headerValue.IndexOf("filename=") + 9)..].Replace("\"", "");
#else
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
#endif
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
webRequest.CookieContainer = m_container;
return request;
}
/// <summary>
/// Validate supplied credentials
/// </summary>
public static (bool?, string) ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
#if NETSTANDARD2_1
using RedumpWebClient wc = new RedumpWebClient();
#else
using (RedumpWebClient wc = new RedumpWebClient())
{
#endif
bool? loggedIn = wc.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
#if NET48
}
#endif
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public bool? Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
password = WebUtility.UrlEncode(password);
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
Encoding = Encoding.UTF8;
var response = UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
if (response.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (response.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleSitePage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleSitePage(string url, string outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleWIPPage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleWIPPage(string url, string outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public byte[] DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return DownloadData(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadSinglePack(string url, RedumpSystem? system, string outDir, string subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
DownloadFile(string.Format(url, system.ShortName()), tempfile);
MoveOrDelete(tempfile, GetLastFilename(), outDir, subfolder);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleSiteID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleWIPID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public Dictionary<RedumpSystem, byte[]> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
byte[] pack = DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadPacks(string url, RedumpSystem?[] systems, string title, string outDir, string subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string newfile, string outDir, string subfolder)
{
if (!string.IsNullOrWhiteSpace(newfile))
{
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
else
File.Delete(tempfile);
}
#endregion
}
}
#endif

View File

@@ -1,5 +1,5 @@
# version format
version: 2.6.2-{build}
version: 2.6.4-{build}
# pull request template
pull_requests:
@@ -51,14 +51,6 @@ after_build:
- 7z x aaru-5.3.2_windows_x64.zip -oMPF\bin\Debug\net6.0-windows\win81-x64\publish\Programs\Aaru *
- 7z x aaru-5.3.2_windows_x64.zip -oMPF\bin\Debug\net6.0-windows\win10-x64\publish\Programs\Aaru *
# dd for Windows
- ps: appveyor DownloadFile http://www.chrysocome.net/downloads/8ab730cd2a29e76ddd89be1f99357942/dd-0.6beta3.zip
- 7z e dd-0.6beta3.zip -oMPF\bin\Debug\net48\publish\Programs\DD *
- 7z e dd-0.6beta3.zip -oMPF\bin\Debug\net6.0-windows\win7-x64\publish\Programs\DD *
- 7z e dd-0.6beta3.zip -oMPF\bin\Debug\net6.0-windows\win8-x64\publish\Programs\DD *
- 7z e dd-0.6beta3.zip -oMPF\bin\Debug\net6.0-windows\win81-x64\publish\Programs\DD *
- 7z e dd-0.6beta3.zip -oMPF\bin\Debug\net6.0-windows\win10-x64\publish\Programs\DD *
# DiscImageCreator
- ps: appveyor DownloadFile https://github.com/saramibreak/DiscImageCreator/files/11660558/DiscImageCreator_20230606.zip
- 7z e DiscImageCreator_20230606.zip -oMPF\bin\Debug\net48\publish\Programs\Creator Release_ANSI\*