#if NET6_0
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using RedumpLib.Data;
namespace RedumpLib.Web
{
public class RedumpHttpClient : HttpClient
{
#region Properties
///
/// Determines if user is logged into Redump
///
public bool LoggedIn { get; private set; } = false;
///
/// Determines if the user is a staff member
///
public bool IsStaff { get; private set; } = false;
#endregion
///
/// Constructor
///
public RedumpHttpClient()
: base(new HttpClientHandler { UseCookies = true })
{
}
#region Credentials
///
/// Validate supplied credentials
///
public async static Task<(bool?, string)> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
using RedumpHttpClient httpClient = new();
bool? loggedIn = await httpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
}
///
/// Login to Redump, if possible
///
/// Redump username
/// Redump password
/// True if the user could be logged in, false otherwise, null on error
public async Task Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
password = WebUtility.UrlEncode(password);
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = await GetStringAsync(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
var postContent = new StringContent($"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0", Encoding.UTF8);
postContent.Headers.ContentType = MediaTypeHeaderValue.Parse("application/x-www-form-urlencoded");
// Send the login request and get the result
var response = await PostAsync(Constants.LoginUrl, postContent);
string responseContent = await response?.Content?.ReadAsStringAsync();
if (string.IsNullOrWhiteSpace(responseContent))
{
Console.WriteLine($"An error occurred while trying to log in on attempt {i}: No response");
continue;
}
if (responseContent.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (responseContent.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#endregion
#region Single Page Helpers
///
/// Process a Redump site page as a list of possible IDs or disc page
///
/// Base URL to download using
/// List of IDs from the page, empty on error
public async Task> CheckSingleSitePage(string url)
{
List ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("Download:"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
///
/// Process a Redump site page as a list of possible IDs or disc page
///
/// Base URL to download using
/// Output directory to save data to
/// True to return on first error, false otherwise
/// True if the page could be downloaded, false otherwise
public async Task CheckSingleSitePage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("Download:"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
///
/// Process a Redump WIP page as a list of possible IDs or disc page
///
/// RedumpWebClient to access the packs
/// List of IDs from the page, empty on error
public async Task> CheckSingleWIPPage(string url)
{
List ids = new();
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
///
/// Process a Redump WIP page as a list of possible IDs or disc page
///
/// RedumpWebClient to access the packs
/// Output directory to save data to
/// True to return on first error, false otherwise
/// True if the page could be downloaded, false otherwise
public async Task CheckSingleWIPPage(string url, string outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string dumpsPage = await DownloadString(url, retries: 3);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
///
/// Download a single pack
///
/// Base URL to download using
/// System to download packs for
/// Byte array containing the downloaded pack, null on error
public async Task DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return await GetByteArrayAsync(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
///
/// Download a single pack
///
/// Base URL to download using
/// System to download packs for
/// Output directory to save data to
/// Named subfolder for the pack, used optionally
public async Task DownloadSinglePack(string url, RedumpSystem? system, string outDir, string subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
string packUri = string.Format(url, system.ShortName());
// Make the call to get the pack
string remoteFileName = await DownloadFile(packUri, tempfile);
MoveOrDelete(tempfile, remoteFileName, outDir, subfolder);
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
///
/// Download an individual site ID data, if possible
///
/// Redump disc ID to retrieve
/// String containing the page contents if successful, null on error
public async Task DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
///
/// Download an individual site ID data, if possible
///
/// Redump disc ID to retrieve
/// Output directory to save data to
/// True to rename deleted entries, false otherwise
/// True if all data was downloaded, false otherwise
public async Task DownloadSingleSiteID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"
/// Download an individual WIP ID data, if possible
///
/// Redump WIP disc ID to retrieve
/// String containing the page contents if successful, null on error
public async Task DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(5, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
///
/// Download an individual WIP ID data, if possible
///
/// Redump WIP disc ID to retrieve
/// Output directory to save data to
/// True to rename deleted entries, false otherwise
/// True if all data was downloaded, false otherwise
public async Task DownloadSingleWIPID(int id, string outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(5, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string discPage = await DownloadString(discPageUri, retries: 3);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
///
/// Download a set of packs
///
/// Base URL to download using
/// Systems to download packs for
/// Name of the pack that is downloading
public async Task> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
byte[] pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
///
/// Download a set of packs
///
/// Base URL to download using
/// Systems to download packs for
/// Name of the pack that is downloading
/// Output directory to save data to
/// Named subfolder for the pack, used optionally
public async Task DownloadPacks(string url, RedumpSystem?[] systems, string title, string outDir, string subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
await DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return true;
}
///
/// Download from a URI to a local file
///
/// Remote URI to retrieve
/// Filename to write to
/// The remote filename from the URI, null on error
private async Task DownloadFile(string uri, string fileName)
{
// Make the call to get the file
var response = await GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
}
///
/// Download from a URI to a string
///
/// Remote URI to retrieve
/// Number of times to retry on error
/// String from the URI, null on error
private async Task DownloadString(string uri, int retries = 3)
{
// Only retry a positive number of times
if (retries <= 0)
return null;
for (int i = 0; i < retries; i++)
{
try
{
return await GetStringAsync(uri);
}
catch { }
}
return null;
}
///
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
///
/// Path to existing temporary file
/// Path to new output file
/// Output directory to save data to
/// Optional subfolder to append to the path
private static void MoveOrDelete(string tempfile, string newfile, string outDir, string subfolder)
{
// If we don't have a file to move to, just delete the temp file
if (string.IsNullOrWhiteSpace(newfile))
{
File.Delete(tempfile);
return;
}
// If we have a subfolder, create it and update the newfile name
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
// If the file already exists, don't overwrite it
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
#endregion
}
}
#endif