Migrate to Nuget package for Redump

This commit is contained in:
Matt Nadareski
2023-09-05 00:08:09 -04:00
parent c2ba58148a
commit 816c94de58
60 changed files with 151 additions and 8904 deletions

View File

@@ -8,6 +8,7 @@
- Fix dumping path in DD
- Fix PlayStation serial code region parsing (Deterous)
- Retrofit README
- Migrate to Nuget package for Redump
### 2.6.3 (2023-08-15)
@@ -835,7 +836,7 @@
- edccchk now run on all CD-Roms
- Discs unsupported by Windows are now regonized
- Extra \ when accepting default save has been removed.
### 1.02b (2018-05-18)
- Added missing DLL
@@ -848,4 +849,4 @@
### 1.01d (2018-05-18)
-Combine IBM PC-CD options, misc fixes.
-Combine IBM PC-CD options, misc fixes.

View File

@@ -27,10 +27,16 @@
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BurnOutSharp" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="2.8.0" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
@@ -41,10 +47,4 @@
<Content Include="$(PkgBurnOutSharp)\content\**" PackagePath="contentFiles\any\any;content" CopyToOutputDirectory="Always" PackageCopyToOutput="true" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -5,8 +5,8 @@ using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Library;
using RedumpLib.Data;
using RedumpLib.Web;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace MPF.Check
{

View File

@@ -6,7 +6,7 @@ using System.Reflection;
using IMAPI2;
#endif
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Converters
{
@@ -98,7 +98,7 @@ namespace MPF.Core.Converters
if (!LongNameMethods.TryGetValue(sourceType, out MethodInfo method))
{
method = typeof(RedumpLib.Data.Extensions).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });
method = typeof(SabreTools.RedumpLib.Data.Extensions).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });
if (method == null)
method = typeof(EnumConverter).GetMethod("LongName", new[] { typeof(Nullable<>).MakeGenericType(sourceType) });

View File

@@ -1,6 +1,6 @@
using System.Collections.Generic;
using System.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{

View File

@@ -6,7 +6,7 @@ using Microsoft.Management.Infrastructure;
using Microsoft.Management.Infrastructure.Generic;
using MPF.Core.Converters;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
#if NETFRAMEWORK
using IMAPI2;
#endif

View File

@@ -2,7 +2,7 @@
using System.Collections;
using System.Collections.Generic;
using MPF.Core.Converters;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Data
{

View File

@@ -41,13 +41,10 @@
</COMReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Management.Infrastructure" Version="2.0.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="7.0.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
</ItemGroup>

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Utilities
{

View File

@@ -2,7 +2,7 @@
using System.Reflection;
using MPF.Core.Data;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Core.Utilities
{

View File

@@ -7,7 +7,7 @@ using BurnOutSharp;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Modules;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Library
{

View File

@@ -14,8 +14,8 @@ using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.Modules;
using Newtonsoft.Json;
using RedumpLib.Data;
using RedumpLib.Web;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
using Formatting = Newtonsoft.Json.Formatting;
namespace MPF.Library
@@ -84,9 +84,17 @@ namespace MPF.Library
Serial = options.AddPlaceholders ? Template.RequiredIfExistsValue : string.Empty,
Barcode = options.AddPlaceholders ? Template.OptionalValue : string.Empty,
Contents = string.Empty,
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>(),
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>(),
#endif
Comments = string.Empty,
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>(),
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>(),
#endif
},
VersionAndEditions = new VersionAndEditionsSection()
{
@@ -569,7 +577,7 @@ namespace MPF.Library
}
}
#endregion
#endregion
#region Information Output
@@ -2050,6 +2058,10 @@ namespace MPF.Library
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
if (!commentLine.Contains(siteCode.ShortName()))
continue;
@@ -2096,12 +2108,12 @@ namespace MPF.Library
}
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields.ContainsKey(siteCode))
info.CommonDiscInfo.CommentsSpecialFields[siteCode] = $"(VERIFY THIS) {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
if (!info.CommonDiscInfo.CommentsSpecialFields.ContainsKey(siteCode.Value))
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
// Otherwise, append the value to the existing key
else
info.CommonDiscInfo.CommentsSpecialFields[siteCode] += $", {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] += $", {commentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
break;
}
@@ -2111,10 +2123,10 @@ namespace MPF.Library
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode] += "\n";
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode] += commentLine;
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
{
@@ -2167,6 +2179,10 @@ namespace MPF.Library
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
if (!contentLine.Contains(siteCode.ShortName()))
continue;
@@ -2175,8 +2191,8 @@ namespace MPF.Library
lastSiteCode = siteCode;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.ContentsSpecialFields.ContainsKey(siteCode))
info.CommonDiscInfo.ContentsSpecialFields[siteCode] = $"(VERIFY THIS) {contentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
if (!info.CommonDiscInfo.ContentsSpecialFields.ContainsKey(siteCode.Value))
info.CommonDiscInfo.ContentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {contentLine.Replace(siteCode.ShortName(), string.Empty).Trim()}";
// A subset of tags can be multiline
addToLast = IsMultiLine(siteCode);
@@ -2191,10 +2207,10 @@ namespace MPF.Library
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode] += "\n";
if (!string.IsNullOrWhiteSpace(info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode] += contentLine;
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
{
@@ -2697,7 +2713,11 @@ namespace MPF.Library
/// Order comment code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
#if NET48
private static List<KeyValuePair<SiteCode?, string>> OrderCommentTags(Dictionary<SiteCode?, string> tags)
#else
private static List<KeyValuePair<SiteCode?, string>> OrderCommentTags(Dictionary<SiteCode, string> tags)
#endif
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
@@ -2820,7 +2840,11 @@ namespace MPF.Library
/// Order content code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
#if NET48
private static List<KeyValuePair<SiteCode?, string>> OrderContentTags(Dictionary<SiteCode?, string> tags)
#else
private static List<KeyValuePair<SiteCode?, string>> OrderContentTags(Dictionary<SiteCode, string> tags)
#endif
{
var sorted = new List<KeyValuePair<SiteCode?, string>>();
@@ -2859,6 +2883,6 @@ namespace MPF.Library
return sorted;
}
#endregion
#endregion
}
}

View File

@@ -24,7 +24,6 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Modules\MPF.Modules.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
@@ -32,6 +31,7 @@
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.IO.Compression" Version="4.3.0" />
<PackageReference Include="System.IO.Compression.ZipFile" Version="4.3.0" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Aaru
{

View File

@@ -10,7 +10,7 @@ using System.Xml.Serialization;
using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.CueSheets;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Schemas;
namespace MPF.Modules.Aaru

View File

@@ -12,7 +12,7 @@ using System.Xml.Serialization;
using MPF.Core.Data;
using MPF.Core.Hashing;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules
{

View File

@@ -5,7 +5,7 @@ using System.Linq;
using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.CleanRip
{

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DD
{

View File

@@ -5,7 +5,7 @@ using System.Linq;
using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DD
{

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DiscImageCreator
{

View File

@@ -8,7 +8,7 @@ using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.CueSheets;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.DiscImageCreator
{

View File

@@ -23,10 +23,10 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.CueSheets\MPF.CueSheets.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="6.0.0" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">

View File

@@ -1,4 +1,4 @@
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Redumper
{

View File

@@ -6,7 +6,7 @@ using System.Text.RegularExpressions;
using MPF.Core.Converters;
using MPF.Core.Data;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.Redumper
{

View File

@@ -4,7 +4,7 @@ using System.IO;
using System.Linq;
using MPF.Core.Converters;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.Modules.UmdImageCreator
{

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using MPF.Core.Utilities;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Core.Utilities

View File

@@ -1,7 +1,7 @@
using System.IO;
using MPF.Core.Data;
using MPF.Library;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Library

View File

@@ -1,7 +1,7 @@
using System.Collections.Generic;
using System.IO;
using MPF.Library;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Library
@@ -76,13 +76,21 @@ namespace MPF.Test.Library
CommonDiscInfo = new CommonDiscInfoSection()
{
Comments = "This is a comments line\n[T:ISBN] ISBN Value",
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.VolumeLabel] = "VOLUME_LABEL",
},
Contents = "This is a contents line\n[T:GF] Game Footage",
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.Patches] = "1.04 patch",
},
@@ -132,13 +140,21 @@ namespace MPF.Test.Library
CommonDiscInfo = new CommonDiscInfoSection()
{
Comments = null,
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.VolumeLabel] = "VOLUME_LABEL",
},
Contents = null,
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.Patches] = "1.04 patch",
},

View File

@@ -30,13 +30,13 @@
<ProjectReference Include="..\MPF\MPF.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.Modules\MPF.Modules.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.6.3" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.6.3" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="xunit" Version="2.5.0" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.2.0" />

View File

@@ -1,7 +1,7 @@
using System.Collections.Generic;
using MPF.Core.Data;
using MPF.Modules.DiscImageCreator;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Modules

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Data;
using System.Linq;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.RedumpLib

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic;
using Newtonsoft.Json;
using psxt001z;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.RedumpLib
@@ -82,12 +82,20 @@ namespace MPF.Test.RedumpLib
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},

View File

@@ -1,6 +1,6 @@
using System.Linq;
using MPF.Core.Data;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace MPF.Test.Data

View File

@@ -23,13 +23,6 @@
<UserSecretsId>27abb4ca-bf7a-431e-932f-49153303d5ff</UserSecretsId>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Resource Include="Images\ring-code-guide-1-layer.png" />
<Resource Include="Images\ring-code-guide-2-layer.png" />
@@ -37,13 +30,20 @@
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Folder Include="Images\" />
</ItemGroup>

View File

@@ -5,7 +5,7 @@ using MPF.Core.Data;
using MPF.Core.Utilities;
using MPF.UI.Core.ComboBoxItems;
using MPF.UI.Core.Windows;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.UI.Core.ViewModels
{
@@ -536,9 +536,17 @@ namespace MPF.UI.Core.ViewModels
// Initialize the dictionaries, if needed
if (SubmissionInfo.CommonDiscInfo.CommentsSpecialFields == null)
#if NET48
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields = new Dictionary<SiteCode?, string>();
#else
SubmissionInfo.CommonDiscInfo.CommentsSpecialFields = new Dictionary<SiteCode, string>();
#endif
if (SubmissionInfo.CommonDiscInfo.ContentsSpecialFields == null)
#if NET48
SubmissionInfo.CommonDiscInfo.ContentsSpecialFields = new Dictionary<SiteCode?, string>();
#else
SubmissionInfo.CommonDiscInfo.ContentsSpecialFields = new Dictionary<SiteCode, string>();
#endif
#region Comment Fields
@@ -801,7 +809,7 @@ namespace MPF.UI.Core.ViewModels
}
}
#endregion
#endregion
#region Event Handlers

View File

@@ -1,6 +1,6 @@
using MPF.Core.Data;
using MPF.UI.Core.ViewModels;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF.UI.Core.Windows
{

View File

@@ -23,8 +23,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
publish-win.bat = publish-win.bat
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "RedumpLib", "RedumpLib\RedumpLib.csproj", "{13574913-A426-4644-9955-F49AD0876E5F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MPF.CueSheets", "MPF.CueSheets\MPF.CueSheets.csproj", "{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MPF.Core", "MPF.Core\MPF.Core.csproj", "{70B1265D-FE49-472A-A83D-0B462152D37A}"
@@ -57,10 +55,6 @@ Global
{8CFDE289-E171-4D49-A40D-5293265C1253}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8CFDE289-E171-4D49-A40D-5293265C1253}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8CFDE289-E171-4D49-A40D-5293265C1253}.Release|Any CPU.Build.0 = Release|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{13574913-A426-4644-9955-F49AD0876E5F}.Release|Any CPU.Build.0 = Release|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2C12798-DF53-4D4C-A55B-F5A77F29D6B1}.Release|Any CPU.ActiveCfg = Release|Any CPU

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Linq;
using MPF.Core.Utilities;
using MPF.UI.Core.ComboBoxItems;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
namespace MPF
{

View File

@@ -26,10 +26,25 @@
<NrtShowRevision>false</NrtShowRevision>
</PropertyGroup>
<ItemGroup>
<Resource Include="Images\Icon.ico" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.UI.Core\MPF.UI.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BurnOutSharp" PrivateAssets="build; analyzers" ExcludeAssets="contentFiles" Version="2.8.0" GeneratePathProperty="true">
<IncludeAssets>runtime; compile; build; native; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="SabreTools.RedumpLib" Version="1.1.1" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="7.0.0" />
<PackageReference Include="Unclassified.NetRevisionTask" Version="0.4.3">
<PrivateAssets>all</PrivateAssets>
@@ -41,19 +56,4 @@
<Content Include="$(PkgBurnOutSharp)\content\**" PackagePath="contentFiles\any\any;content" CopyToOutputDirectory="Always" PackageCopyToOutput="true" />
</ItemGroup>
<ItemGroup>
<Resource Include="Images\Icon.ico" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\MPF.Core\MPF.Core.csproj" />
<ProjectReference Include="..\MPF.Library\MPF.Library.csproj" />
<ProjectReference Include="..\MPF.UI.Core\MPF.UI.Core.csproj" />
<ProjectReference Include="..\RedumpLib\RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<Reference Include="PresentationFramework.Aero" />
</ItemGroup>
</Project>

View File

@@ -12,7 +12,7 @@ using MPF.Library;
using MPF.UI.Core.ComboBoxItems;
using MPF.Windows;
using MPF.UI.Core.Windows;
using RedumpLib.Data;
using SabreTools.RedumpLib.Data;
using WPFCustomMessageBox;
using WinForms = System.Windows.Forms;
using MPF.UI.Core;
@@ -355,12 +355,20 @@ namespace MPF.UI.ViewModels
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
#if NET48
CommentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
CommentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
#if NET48
ContentsSpecialFields = new Dictionary<SiteCode?, string>()
#else
ContentsSpecialFields = new Dictionary<SiteCode, string>()
#endif
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
@@ -543,7 +551,7 @@ namespace MPF.UI.ViewModels
}
}
#endregion
#endregion
#region UI Functionality

View File

@@ -8,7 +8,7 @@ using System.Windows.Forms;
using MPF.Core.Data;
using MPF.UI.Core.ComboBoxItems;
using MPF.Windows;
using RedumpLib.Web;
using SabreTools.RedumpLib.Web;
using WPFCustomMessageBox;
namespace MPF.UI.ViewModels

View File

@@ -1,48 +0,0 @@
using System;
using System.Linq;
namespace RedumpLib.Attributes
{
public static class AttributeHelper<T>
{
/// <summary>
/// Get the HumanReadableAttribute from a supported value
/// </summary>
/// <param name="value">Value to use</param>
/// <returns>HumanReadableAttribute attached to the value</returns>
public static HumanReadableAttribute GetAttribute(T value)
{
// Null value in, null value out
if (value == null)
return null;
// Current enumeration type
var enumType = typeof(T);
if (Nullable.GetUnderlyingType(enumType) != null)
enumType = Nullable.GetUnderlyingType(enumType);
// If the value returns a null on ToString, just return null
string valueStr = value.ToString();
if (string.IsNullOrWhiteSpace(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
return null;
// Return the first attribute, if possible
return (HumanReadableAttribute)attributes.FirstOrDefault();
}
}
}

View File

@@ -1,25 +0,0 @@
using System;
namespace RedumpLib.Attributes
{
/// <summary>
/// Generic attribute for human readable values
/// </summary>
public class HumanReadableAttribute : Attribute
{
/// <summary>
/// Item is marked as obsolete or unusable
/// </summary>
public bool Available { get; set; } = true;
/// <summary>
/// Human-readable name of the item
/// </summary>
public string LongName { get; set; }
/// <summary>
/// Internally used name of the item
/// </summary>
public string ShortName { get; set; }
}
}

View File

@@ -1,26 +0,0 @@
namespace RedumpLib.Attributes
{
/// <summary>
/// Attribute specifc to Language values
/// </summary>
/// <remarks>
/// Some languages have multiple proper names. Should all be supported?
/// </remarks>
public class LanguageAttribute : HumanReadableAttribute
{
/// <summary>
/// ISO 639-1 Code
/// </summary>
public string TwoLetterCode { get; set; } = null;
/// <summary>
/// ISO 639-2 Code (Standard or Bibliographic)
/// </summary>
public string ThreeLetterCode { get; set; } = null;
/// <summary>
/// ISO 639-2 Code (Terminology)
/// </summary>
public string ThreeLetterCodeAlt { get; set; } = null;
}
}

View File

@@ -1,55 +0,0 @@
using RedumpLib.Data;
namespace RedumpLib.Attributes
{
/// <summary>
/// Attribute specifc to Redump System values
/// </summary>
public class SystemAttribute : HumanReadableAttribute
{
/// <summary>
/// Category for the system
/// </summary>
public SystemCategory Category { get; set; }
/// <summary>
/// System is restricted to dumpers
/// </summary>
public bool IsBanned { get; set; } = false;
/// <summary>
/// System has a CUE pack
/// </summary>
public bool HasCues { get; set; } = false;
/// <summary>
/// System has a DAT
/// </summary>
public bool HasDat { get; set; } = false;
/// <summary>
/// System has a decrypted keys pack
/// </summary>
public bool HasDkeys { get; set; } = false;
/// <summary>
/// System has a GDI pack
/// </summary>
public bool HasGdi { get; set; } = false;
/// <summary>
/// System has a keys pack
/// </summary>
public bool HasKeys { get; set; } = false;
/// <summary>
/// System has an LSD pack
/// </summary>
public bool HasLsd { get; set; } = false;
/// <summary>
/// System has an SBI pack
/// </summary>
public bool HasSbi { get; set; } = false;
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize DiscCategory enum values
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize DiscType enum values
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,32 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize Language enum values
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[] existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, Language?[] value, JsonSerializer serializer)
{
JArray array = new JArray();
foreach (var val in value)
{
JToken t = JToken.FromObject(val.ShortName() ?? string.Empty);
array.Add(t);
}
array.WriteTo(writer);
}
}
}

View File

@@ -1,32 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize LanguageSelection enum values
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[] existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[] value, JsonSerializer serializer)
{
JArray array = new JArray();
foreach (var val in value)
{
JToken t = JToken.FromObject(val.LongName() ?? string.Empty);
array.Add(t);
}
array.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize Region enum values
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.ShortName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize RedumpSystem enum values
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.ShortName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RedumpLib.Data;
namespace RedumpLib.Converters
{
/// <summary>
/// Serialize YesNo enum values
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)
{
JToken t = JToken.FromObject(value.LongName() ?? string.Empty);
t.WriteTo(writer);
}
}
}

View File

@@ -1,306 +0,0 @@
using System.Text.RegularExpressions;
namespace RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new Regex(@"<tr><th>Added</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new Regex(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the BCA field on a disc page
/// </summary>
public static Regex BcaRegex = new Regex(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
+ "<tr><th>Row</th><th>Contents</th><th>ASCII</th></tr>"
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new Regex(@"<tr><th>Category</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new Regex(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new Regex(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new Regex(@"<a href=""/disc/(\d+)/"">");
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new Regex(@"\((.*?)\)");
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new Regex(@"<a href=""/discs/dumper/(.*?)/"">");
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new Regex(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new Regex(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new Regex(@"<h2>(.*?)</h2>");
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new Regex(@"<td class=""static"">full match ids: (.*?)</td>");
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new Regex(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new Regex(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new Regex(@"<tr><th>Media</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new Regex(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new Regex(@"<td class=""static"">partial match ids: (.*?)</td>");
/// <summary>
/// Regex matching the PVD field on a disc page
/// </summary>
public static Regex PvdRegex = new Regex(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
+ @"<tr><th>Record / Entry</th><th>Contents</th><th>Date</th><th>Time</th><th>GMT</th></tr>"
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new Regex(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new Regex(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new Regex(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new Regex(@"<h1>(.*?)</h1>");
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new Regex(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new Regex(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new Regex(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new Regex(@"<tr><th>Version</th><td>(.*?)</td></tr>");
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new Regex(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
#endregion
#region URLs
/// <summary>
/// Redump disc page URL template
/// </summary>
public const string DiscPageUrl = @"http://redump.org/disc/{0}/";
/// <summary>
/// Redump last modified search URL template
/// </summary>
public const string LastModifiedUrl = @"http://redump.org/discs/sort/modified/dir/desc?page={0}";
/// <summary>
/// Redump login page URL template
/// </summary>
public const string LoginUrl = "http://forum.redump.org/login/";
/// <summary>
/// Redump CUE pack URL template
/// </summary>
public const string PackCuesUrl = @"http://redump.org/cues/{0}/";
/// <summary>
/// Redump DAT pack URL template
/// </summary>
public const string PackDatfileUrl = @"http://redump.org/datfile/{0}/";
/// <summary>
/// Redump DKEYS pack URL template
/// </summary>
public const string PackDkeysUrl = @"http://redump.org/dkeys/{0}/";
/// <summary>
/// Redump GDI pack URL template
/// </summary>
public const string PackGdiUrl = @"http://redump.org/gdi/{0}/";
/// <summary>
/// Redump KEYS pack URL template
/// </summary>
public const string PackKeysUrl = @"http://redump.org/keys/{0}/";
/// <summary>
/// Redump LSD pack URL template
/// </summary>
public const string PackLsdUrl = @"http://redump.org/lsd/{0}/";
/// <summary>
/// Redump SBI pack URL template
/// </summary>
public const string PackSbiUrl = @"http://redump.org/sbi/{0}/";
/// <summary>
/// Redump quicksearch URL template
/// </summary>
public const string QuickSearchUrl = @"http://redump.org/discs/quicksearch/{0}/?page={1}";
/// <summary>
/// Redump user dumps URL template
/// </summary>
public const string UserDumpsUrl = @"http://redump.org/discs/dumper/{0}/?page={1}";
/// <summary>
/// Redump last modified user dumps URL template
/// </summary>
public const string UserDumpsLastModifiedUrl = @"http://redump.org/discs/sort/modified/dir/desc/dumper/{0}?page={1}";
/// <summary>
/// Redump WIP disc page URL template
/// </summary>
public const string WipDiscPageUrl = @"http://redump.org/newdisc/{0}/";
/// <summary>
/// Redump WIP dumps queue URL
/// </summary>
public const string WipDumpsUrl = @"http://redump.org/discs-wip/";
#endregion
#region URL Extensions
/// <summary>
/// Changes page subpath
/// </summary>
public const string ChangesExt = "changes/";
/// <summary>
/// Cuesheet download subpath
/// </summary>
public const string CueExt = "cue/";
/// <summary>
/// Edit page subpath
/// </summary>
public const string EditExt = "edit/";
/// <summary>
/// GDI download subpath
/// </summary>
public const string GdiExt = "gdi/";
/// <summary>
/// Key download subpath
/// </summary>
public const string KeyExt = "key/";
/// <summary>
/// LSD download subpath
/// </summary>
public const string LsdExt = "lsd/";
/// <summary>
/// MD5 download subpath
/// </summary>
public const string Md5Ext = "md5/";
/// <summary>
/// SBI download subpath
/// </summary>
public const string SbiExt = "sbi/";
/// <summary>
/// SFV download subpath
/// </summary>
public const string SfvExt = "sfv/";
/// <summary>
/// SHA1 download subpath
/// </summary>
public const string Sha1Ext = "sha1/";
#endregion
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,570 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using RedumpLib.Converters;
namespace RedumpLib.Data
{
public class SubmissionInfo : ICloneable
{
/// <summary>
/// Version of the current schema
/// </summary>
[JsonProperty(PropertyName = "schema_version", DefaultValueHandling = DefaultValueHandling.Ignore)]
public int SchemaVersion { get; set; } = 3;
/// <summary>
/// Fully matched Redump ID
/// </summary>
[JsonIgnore]
public int? FullyMatchedID { get; set; }
/// <summary>
/// List of partially matched Redump IDs
/// </summary>
[JsonIgnore]
public List<int> PartiallyMatchedIDs { get; set; }
/// <summary>
/// DateTime of when the disc was added
/// </summary>
[JsonIgnore]
public DateTime? Added { get; set; }
/// <summary>
/// DateTime of when the disc was last modified
/// </summary>
[JsonIgnore]
public DateTime? LastModified { get; set; }
[JsonProperty(PropertyName = "common_disc_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CommonDiscInfoSection CommonDiscInfo { get; set; } = new CommonDiscInfoSection();
[JsonProperty(PropertyName = "versions_and_editions", DefaultValueHandling = DefaultValueHandling.Ignore)]
public VersionAndEditionsSection VersionAndEditions { get; set; } = new VersionAndEditionsSection();
[JsonProperty(PropertyName = "edc", DefaultValueHandling = DefaultValueHandling.Ignore)]
public EDCSection EDC { get; set; } = new EDCSection();
[JsonProperty(PropertyName = "parent_clone_relationship", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ParentCloneRelationshipSection ParentCloneRelationship { get; set; } = new ParentCloneRelationshipSection();
[JsonProperty(PropertyName = "extras", DefaultValueHandling = DefaultValueHandling.Ignore)]
public ExtrasSection Extras { get; set; } = new ExtrasSection();
[JsonProperty(PropertyName = "copy_protection", DefaultValueHandling = DefaultValueHandling.Ignore)]
public CopyProtectionSection CopyProtection { get; set; } = new CopyProtectionSection();
[JsonProperty(PropertyName = "dumpers_and_status", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpersAndStatusSection DumpersAndStatus { get; set; } = new DumpersAndStatusSection();
[JsonProperty(PropertyName = "tracks_and_write_offsets", DefaultValueHandling = DefaultValueHandling.Ignore)]
public TracksAndWriteOffsetsSection TracksAndWriteOffsets { get; set; } = new TracksAndWriteOffsetsSection();
[JsonProperty(PropertyName = "size_and_checksums", DefaultValueHandling = DefaultValueHandling.Ignore)]
public SizeAndChecksumsSection SizeAndChecksums { get; set; } = new SizeAndChecksumsSection();
[JsonProperty(PropertyName = "dumping_info", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DumpingInfoSection DumpingInfo { get; set; } = new DumpingInfoSection();
[JsonProperty(PropertyName = "artifacts", DefaultValueHandling = DefaultValueHandling.Ignore)]
public Dictionary<string, string> Artifacts { get; set; } = new Dictionary<string, string>();
public object Clone()
{
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
FullyMatchedID = this.FullyMatchedID,
PartiallyMatchedIDs = this.PartiallyMatchedIDs,
Added = this.Added,
LastModified = this.LastModified,
CommonDiscInfo = this.CommonDiscInfo?.Clone() as CommonDiscInfoSection,
VersionAndEditions = this.VersionAndEditions?.Clone() as VersionAndEditionsSection,
EDC = this.EDC?.Clone() as EDCSection,
ParentCloneRelationship = this.ParentCloneRelationship?.Clone() as ParentCloneRelationshipSection,
Extras = this.Extras?.Clone() as ExtrasSection,
CopyProtection = this.CopyProtection?.Clone() as CopyProtectionSection,
DumpersAndStatus = this.DumpersAndStatus?.Clone() as DumpersAndStatusSection,
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Common disc info section of New Disc Form
/// </summary>
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
public string Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string ForeignTitleNonLatin { get; set; }
[JsonProperty(PropertyName = "d_number", NullValueHandling = NullValueHandling.Ignore)]
public string DiscNumberLetter { get; set; }
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[] Languages { get; set; }
[JsonProperty(PropertyName = "d_languages_selection", NullValueHandling = NullValueHandling.Ignore, DefaultValueHandling = DefaultValueHandling.Ignore)]
[JsonConverter(typeof(LanguageSelectionConverter))]
public LanguageSelection?[] LanguageSelection { get; set; }
[JsonProperty(PropertyName = "d_serial", NullValueHandling = NullValueHandling.Ignore)]
public string Serial { get; set; }
[JsonProperty(PropertyName = "d_ring", NullValueHandling = NullValueHandling.Ignore)]
public string Ring { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
public string Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts1", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
public string Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts2", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1MouldSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
public string Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer2MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
public string Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
public string Layer3MasteringSID { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ts4", NullValueHandling = NullValueHandling.Ignore)]
public string Layer3ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_offsets", NullValueHandling = NullValueHandling.Ignore)]
public string RingOffsetsHidden { get { return "1"; } }
[JsonProperty(PropertyName = "d_ring_0_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string RingZeroId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_density", NullValueHandling = NullValueHandling.Ignore)]
public string RingZeroDensity { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_0_value", NullValueHandling = NullValueHandling.Ignore)]
public string RingWriteOffset { get; set; }
[JsonProperty(PropertyName = "d_ring_count", NullValueHandling = NullValueHandling.Ignore)]
public string RingCount { get { return "1"; } }
[JsonProperty(PropertyName = "d_barcode", NullValueHandling = NullValueHandling.Ignore)]
public string Barcode { get; set; }
[JsonProperty(PropertyName = "d_date", NullValueHandling = NullValueHandling.Ignore)]
public string EXEDateBuildDate { get; set; }
[JsonProperty(PropertyName = "d_errors", NullValueHandling = NullValueHandling.Ignore)]
public string ErrorsCount { get; set; }
[JsonProperty(PropertyName = "d_comments", NullValueHandling = NullValueHandling.Ignore)]
public string Comments { get; set; }
[JsonIgnore]
public Dictionary<SiteCode?, string> CommentsSpecialFields { get; set; }
[JsonProperty(PropertyName = "d_contents", NullValueHandling = NullValueHandling.Ignore)]
public string Contents { get; set; }
[JsonIgnore]
public Dictionary<SiteCode?, string> ContentsSpecialFields { get; set; }
public object Clone()
{
return new CommonDiscInfoSection
{
System = this.System,
Media = this.Media,
Title = this.Title,
ForeignTitleNonLatin = this.ForeignTitleNonLatin,
DiscNumberLetter = this.DiscNumberLetter,
DiscTitle = this.DiscTitle,
Category = this.Category,
Region = this.Region,
Languages = this.Languages?.Clone() as Language?[],
LanguageSelection = this.LanguageSelection?.Clone() as LanguageSelection?[],
Serial = this.Serial,
Ring = this.Ring,
RingId = this.RingId,
Layer0MasteringRing = this.Layer0MasteringRing,
Layer0MasteringSID = this.Layer0MasteringSID,
Layer0ToolstampMasteringCode = this.Layer0ToolstampMasteringCode,
Layer0MouldSID = this.Layer0MouldSID,
Layer0AdditionalMould = this.Layer0AdditionalMould,
Layer1MasteringRing = this.Layer1MasteringRing,
Layer1MasteringSID = this.Layer1MasteringSID,
Layer1ToolstampMasteringCode = this.Layer1ToolstampMasteringCode,
Layer1MouldSID = this.Layer1MouldSID,
Layer1AdditionalMould = this.Layer1AdditionalMould,
Layer2MasteringRing = this.Layer2MasteringRing,
Layer2MasteringSID = this.Layer2MasteringSID,
Layer2ToolstampMasteringCode = this.Layer2ToolstampMasteringCode,
Layer3MasteringRing = this.Layer3MasteringRing,
Layer3MasteringSID = this.Layer3MasteringSID,
Layer3ToolstampMasteringCode = this.Layer3ToolstampMasteringCode,
RingZeroId = this.RingZeroId,
RingZeroDensity = this.RingZeroDensity,
RingWriteOffset = this.RingWriteOffset,
Barcode = this.Barcode,
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
};
}
}
/// <summary>
/// Version and editions section of New Disc form
/// </summary>
public class VersionAndEditionsSection : ICloneable
{
[JsonProperty(PropertyName = "d_version", NullValueHandling = NullValueHandling.Ignore)]
public string Version { get; set; }
[JsonProperty(PropertyName = "d_version_datfile", NullValueHandling = NullValueHandling.Ignore)]
public string VersionDatfile { get; set; }
[JsonProperty(PropertyName = "d_editions", NullValueHandling = NullValueHandling.Ignore)]
public string[] CommonEditions { get; set; }
[JsonProperty(PropertyName = "d_editions_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherEditions { get; set; }
public object Clone()
{
return new VersionAndEditionsSection
{
Version = this.Version,
VersionDatfile = this.VersionDatfile,
CommonEditions = this.CommonEditions,
OtherEditions = this.OtherEditions,
};
}
}
/// <summary>
/// EDC section of New Disc form (PSX only)
/// </summary>
public class EDCSection : ICloneable
{
[JsonProperty(PropertyName = "d_edc", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? EDC { get; set; }
public object Clone()
{
return new EDCSection
{
EDC = this.EDC,
};
}
}
/// <summary>
/// Parent/Clone relationship section of New Disc form
/// </summary>
public class ParentCloneRelationshipSection : ICloneable
{
[JsonProperty(PropertyName = "d_parent_id", NullValueHandling = NullValueHandling.Ignore)]
public string ParentID { get; set; }
[JsonProperty(PropertyName = "d_is_regional_parent", NullValueHandling = NullValueHandling.Ignore)]
public bool RegionalParent { get; set; }
public object Clone()
{
return new ParentCloneRelationshipSection
{
ParentID = this.ParentID,
RegionalParent = this.RegionalParent,
};
}
}
/// <summary>
/// Extras section of New Disc form
/// </summary>
public class ExtrasSection : ICloneable
{
[JsonProperty(PropertyName = "d_pvd", NullValueHandling = NullValueHandling.Ignore)]
public string PVD { get; set; }
[JsonProperty(PropertyName = "d_d1_key", NullValueHandling = NullValueHandling.Ignore)]
public string DiscKey { get; set; }
[JsonProperty(PropertyName = "d_d2_key", NullValueHandling = NullValueHandling.Ignore)]
public string DiscID { get; set; }
[JsonProperty(PropertyName = "d_pic_data", NullValueHandling = NullValueHandling.Ignore)]
public string PIC { get; set; }
[JsonProperty(PropertyName = "d_header", NullValueHandling = NullValueHandling.Ignore)]
public string Header { get; set; }
[JsonProperty(PropertyName = "d_bca", NullValueHandling = NullValueHandling.Ignore)]
public string BCA { get; set; }
[JsonProperty(PropertyName = "d_ssranges", NullValueHandling = NullValueHandling.Ignore)]
public string SecuritySectorRanges { get; set; }
public object Clone()
{
return new ExtrasSection
{
PVD = this.PVD,
DiscKey = this.DiscKey,
DiscID = this.DiscID,
PIC = this.PIC,
Header = this.Header,
BCA = this.BCA,
SecuritySectorRanges = this.SecuritySectorRanges,
};
}
}
/// <summary>
/// Copy protection section of New Disc form
/// </summary>
public class CopyProtectionSection : ICloneable
{
[JsonProperty(PropertyName = "d_protection_a", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? AntiModchip { get; set; }
[JsonProperty(PropertyName = "d_protection_1", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(YesNoConverter))]
public YesNo? LibCrypt { get; set; }
[JsonProperty(PropertyName = "d_libcrypt", NullValueHandling = NullValueHandling.Ignore)]
public string LibCryptData { get; set; }
[JsonProperty(PropertyName = "d_protection", NullValueHandling = NullValueHandling.Ignore)]
public string Protection { get; set; }
[JsonIgnore]
public Dictionary<string, List<string>> FullProtections { get; set; }
[JsonProperty(PropertyName = "d_securom", NullValueHandling = NullValueHandling.Ignore)]
public string SecuROMData { get; set; }
public object Clone()
{
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
SecuROMData = this.SecuROMData,
};
}
}
/// <summary>
/// Dumpers and status section of New Disc form (Moderator only)
/// </summary>
public class DumpersAndStatusSection : ICloneable
{
[JsonProperty(PropertyName = "d_status", NullValueHandling = NullValueHandling.Ignore)]
public DumpStatus Status { get; set; }
[JsonProperty(PropertyName = "d_dumpers", NullValueHandling = NullValueHandling.Ignore)]
public string[] Dumpers { get; set; }
[JsonProperty(PropertyName = "d_dumpers_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherDumpers { get; set; }
public object Clone()
{
return new DumpersAndStatusSection
{
Status = this.Status,
Dumpers = this.Dumpers?.Clone() as string[],
OtherDumpers = this.OtherDumpers,
};
}
}
/// <summary>
/// Tracks and write offsets section of New Disc form (CD/GD-based)
/// </summary>
public class TracksAndWriteOffsetsSection : ICloneable
{
[JsonProperty(PropertyName = "d_tracks", NullValueHandling = NullValueHandling.Ignore)]
public string ClrMameProData { get; set; }
[JsonProperty(PropertyName = "d_cue", NullValueHandling = NullValueHandling.Ignore)]
public string Cuesheet { get; set; }
[JsonProperty(PropertyName = "d_offset", NullValueHandling = NullValueHandling.Ignore)]
public int[] CommonWriteOffsets { get; set; }
[JsonProperty(PropertyName = "d_offset_text", NullValueHandling = NullValueHandling.Ignore)]
public string OtherWriteOffsets { get; set; }
public object Clone()
{
return new TracksAndWriteOffsetsSection
{
ClrMameProData = this.ClrMameProData,
Cuesheet = this.Cuesheet,
CommonWriteOffsets = this.CommonWriteOffsets?.Clone() as int[],
OtherWriteOffsets = this.OtherWriteOffsets,
};
}
}
/// <summary>
/// Size &amp; checksums section of New Disc form (DVD/BD/UMD-based)
/// </summary>
public class SizeAndChecksumsSection : ICloneable
{
[JsonProperty(PropertyName = "d_layerbreak", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_2", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak2 { get; set; }
[JsonProperty(PropertyName = "d_layerbreak_3", NullValueHandling = NullValueHandling.Ignore)]
public long Layerbreak3 { get; set; }
[JsonProperty(PropertyName = "d_pic_identifier", NullValueHandling = NullValueHandling.Ignore)]
public string PICIdentifier { get; set; }
[JsonProperty(PropertyName = "d_size", NullValueHandling = NullValueHandling.Ignore)]
public long Size { get; set; }
[JsonProperty(PropertyName = "d_crc32", NullValueHandling = NullValueHandling.Ignore)]
public string CRC32 { get; set; }
[JsonProperty(PropertyName = "d_md5", NullValueHandling = NullValueHandling.Ignore)]
public string MD5 { get; set; }
[JsonProperty(PropertyName = "d_sha1", NullValueHandling = NullValueHandling.Ignore)]
public string SHA1 { get; set; }
public object Clone()
{
return new SizeAndChecksumsSection
{
Layerbreak = this.Layerbreak,
Layerbreak2 = this.Layerbreak2,
Layerbreak3 = this.Layerbreak3,
PICIdentifier = this.PICIdentifier,
Size = this.Size,
CRC32 = this.CRC32,
MD5 = this.MD5,
SHA1 = this.SHA1,
};
}
}
/// <summary>
/// Dumping info section for moderation
/// </summary>
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
public string DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
public string DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
public string Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
public string Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
public string Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
public string ReportedDiscType { get; set; }
public object Clone()
{
return new DumpingInfoSection
{
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,
ReportedDiscType = this.ReportedDiscType,
};
}
}
}

View File

@@ -1,13 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win7-x64;win8-x64;win81-x64;win10-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup>
</Project>

View File

@@ -1,808 +0,0 @@
#if NET6_0
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using RedumpLib.Data;
namespace RedumpLib.Web
{
public class RedumpHttpClient : HttpClient
{
#region Properties
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
#endregion
/// <summary>
/// Constructor
/// </summary>
public RedumpHttpClient()
: base(new HttpClientHandler { UseCookies = true })
{
}
#region Credentials
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string)> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
using RedumpHttpClient httpClient = new();
bool? loggedIn = await httpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public async Task<bool?> Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
password = WebUtility.UrlEncode(password);
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = await GetStringAsync(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
var postContent = new StringContent($"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0", Encoding.UTF8);
postContent.Headers.ContentType = MediaTypeHeaderValue.Parse("application/x-www-form-urlencoded");
// Send the login request and get the result
var response = await PostAsync(Constants.LoginUrl, postContent);
string responseContent = await response?.Content?.ReadAsStringAsync();
if (string.IsNullOrWhiteSpace(responseContent))
{
Console.WriteLine($"An error occurred while trying to log in on attempt {i}: No response");
continue;
}
if (responseContent.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (responseContent.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#endregion
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url)
{
List<int> ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url)
{
List<int> ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public async Task<byte[]> DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return await GetByteArrayAsync(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadSinglePack(string url, RedumpSystem? system, string outDir, string subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
string packUri = string.Format(url, system.ShortName());
// Make the call to get the pack
string remoteFileName = await DownloadFile(packUri, tempfile);
MoveOrDelete(tempfile, remoteFileName, outDir, subfolder);
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public async Task<string> DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public async Task<bool> DownloadSingleSiteID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
_ = await DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
_ = await DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public async Task<string> DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public async Task<bool> DownloadSingleWIPID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
byte[] pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="systems">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string outDir, string subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
await DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string> DownloadFile(string uri, string fileName)
{
// Make the call to get the file
var response = await GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="retries">Number of times to retry on error</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string> DownloadString(string uri, int retries = 3)
{
// Only retry a positive number of times
if (retries <= 0)
return null;
for (int i = 0; i < retries; i++)
{
try
{
return await GetStringAsync(uri);
}
catch { }
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string newfile, string outDir, string subfolder)
{
// If we don't have a file to move to, just delete the temp file
if (string.IsNullOrWhiteSpace(newfile))
{
File.Delete(tempfile);
return;
}
// If we have a subfolder, create it and update the newfile name
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
// If the file already exists, don't overwrite it
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
#endregion
}
}
#endif

View File

@@ -1,837 +0,0 @@
#if NET48 || NETSTANDARD2_1
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using RedumpLib.Data;
namespace RedumpLib.Web
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
public class RedumpWebClient : WebClient
{
private readonly CookieContainer m_container = new CookieContainer();
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
/// <returns></returns>
public string GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrWhiteSpace(headerValue))
return null;
// Extract the filename from the value
#if NETSTANDARD2_1
return headerValue[(headerValue.IndexOf("filename=") + 9)..].Replace("\"", "");
#else
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
#endif
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
webRequest.CookieContainer = m_container;
return request;
}
/// <summary>
/// Validate supplied credentials
/// </summary>
public static (bool?, string) ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
#if NETSTANDARD2_1
using RedumpWebClient wc = new RedumpWebClient();
#else
using (RedumpWebClient wc = new RedumpWebClient())
{
#endif
bool? loggedIn = wc.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
#if NET48
}
#endif
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public bool? Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
password = WebUtility.UrlEncode(password);
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
Encoding = Encoding.UTF8;
var response = UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
if (response.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (response.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleSitePage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleSitePage(string url, string outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleWIPPage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleWIPPage(string url, string outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public byte[] DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return DownloadData(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadSinglePack(string url, RedumpSystem? system, string outDir, string subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
DownloadFile(string.Format(url, system.ShortName()), tempfile);
MoveOrDelete(tempfile, GetLastFilename(), outDir, subfolder);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleSiteID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleWIPID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public Dictionary<RedumpSystem, byte[]> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
byte[] pack = DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadPacks(string url, RedumpSystem?[] systems, string title, string outDir, string subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string newfile, string outDir, string subfolder)
{
if (!string.IsNullOrWhiteSpace(newfile))
{
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
else
File.Delete(tempfile);
}
#endregion
}
}
#endif