2024-11-12 21:12:06 -05:00
using System ;
2020-12-09 21:52:38 -08:00
using System.Collections.Generic ;
using System.IO ;
using System.Linq ;
2024-03-05 03:04:47 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2025-01-14 15:32:14 -05:00
using System.Net ;
2020-12-09 21:52:38 -08:00
using System.Threading.Tasks ;
2024-03-05 03:04:47 -05:00
#endif
2024-03-11 15:23:10 -04:00
using SabreTools.Core.Tools ;
2020-12-10 23:24:09 -08:00
using SabreTools.DatFiles ;
2020-12-09 21:52:38 -08:00
using SabreTools.DatItems ;
2021-02-02 10:23:43 -08:00
using SabreTools.DatItems.Formats ;
2024-04-24 13:45:38 -04:00
using SabreTools.IO.Extensions ;
2024-10-24 00:36:44 -04:00
using SabreTools.IO.Logging ;
2024-10-19 11:43:11 -04:00
using SabreTools.Matching.Compare ;
2020-12-09 21:52:38 -08:00
2020-12-10 23:24:09 -08:00
namespace SabreTools.DatTools
2020-12-09 21:52:38 -08:00
{
2020-12-21 11:38:56 -08:00
/// <summary>
/// Helper methods for splitting DatFiles
/// </summary>
/// <remarks>TODO: Implement Level split</remarks>
2020-12-10 13:58:08 -08:00
public class Splitter
2020-12-09 21:52:38 -08:00
{
2021-02-17 17:02:54 -08:00
#region Logging
/// <summary>
/// Logging object
/// </summary>
2025-01-08 16:59:44 -05:00
private static readonly Logger _staticLogger = new ( ) ;
2021-02-17 17:02:54 -08:00
#endregion
2020-12-09 21:52:38 -08:00
/// <summary>
/// Split a DAT by input extensions
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
2024-02-28 19:19:50 -05:00
public static ( DatFile ? extADat , DatFile ? extBDat ) SplitByExtension ( DatFile datFile , List < string > extA , List < string > extB )
2020-12-09 21:52:38 -08:00
{
// If roms is empty, return false
2025-01-12 23:15:30 -05:00
if ( datFile . DatStatistics . TotalCount = = 0 )
2020-12-09 21:52:38 -08:00
return ( null , null ) ;
2023-04-19 16:39:58 -04:00
InternalStopwatch watch = new ( $"Splitting DAT by extension" ) ;
2021-02-02 14:09:49 -08:00
2020-12-09 21:52:38 -08:00
// Make sure all of the extensions don't have a dot at the beginning
2024-11-12 21:12:06 -05:00
var newExtA = extA . ConvertAll ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) . ToArray ( ) ;
2020-12-09 21:52:38 -08:00
string newExtAString = string . Join ( "," , newExtA ) ;
2024-11-12 21:12:06 -05:00
var newExtB = extB . ConvertAll ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) . ToArray ( ) ;
2020-12-09 21:52:38 -08:00
string newExtBString = string . Join ( "," , newExtB ) ;
// Set all of the appropriate outputs for each of the subsets
2025-01-08 17:11:52 -05:00
DatFile extADat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
extADat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , extADat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" ({newExtAString})" ) ;
extADat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , extADat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" ({newExtAString})" ) ;
extADat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , extADat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" ({newExtAString})" ) ;
2020-12-09 21:52:38 -08:00
2025-01-08 17:11:52 -05:00
DatFile extBDat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
extBDat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , extBDat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" ({newExtBString})" ) ;
extBDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , extBDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" ({newExtBString})" ) ;
extBDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , extBDat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" ({newExtBString})" ) ;
2020-12-09 21:52:38 -08:00
// Now separate the roms accordingly
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , Core . Globals . ParallelOptions , key = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , key = >
2024-02-28 22:54:56 -05:00
#else
2025-01-14 15:32:14 -05:00
foreach ( var key in datFile . Items . SortedKeys )
2024-02-28 22:54:56 -05:00
#endif
2020-12-09 21:52:38 -08:00
{
2025-01-12 23:15:30 -05:00
var items = datFile . GetItemsForBucket ( key ) ;
2024-02-28 19:19:50 -05:00
if ( items = = null )
2024-02-29 00:14:16 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2024-02-28 19:19:50 -05:00
return ;
2024-02-29 00:14:16 -05:00
#else
continue ;
#endif
2024-02-28 19:19:50 -05:00
2020-12-09 21:52:38 -08:00
foreach ( DatItem item in items )
{
2024-11-12 21:12:06 -05:00
if ( Array . IndexOf ( newExtA , ( item . GetName ( ) ? ? string . Empty ) . GetNormalizedExtension ( ) ) > - 1 )
2020-12-09 21:52:38 -08:00
{
2025-01-14 15:32:14 -05:00
extADat . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
}
2024-11-12 21:12:06 -05:00
if ( Array . IndexOf ( newExtB , ( item . GetName ( ) ? ? string . Empty ) . GetNormalizedExtension ( ) ) > - 1 )
2020-12-09 21:52:38 -08:00
{
2025-01-14 15:32:14 -05:00
extBDat . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
}
else
{
2025-01-14 15:32:14 -05:00
extADat . AddItem ( item , statsOnly : false ) ;
extBDat . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
}
}
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-09 21:52:38 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
#endif
2020-12-09 21:52:38 -08:00
// Then return both DatFiles
2021-02-02 14:09:49 -08:00
watch . Stop ( ) ;
2020-12-09 21:52:38 -08:00
return ( extADat , extBDat ) ;
}
2024-03-19 23:35:29 -04:00
/// <summary>
/// Split a DAT by input extensions
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="extA">List of extensions to split on (first DAT)</param>
/// <param name="extB">List of extensions to split on (second DAT)</param>
/// <returns>Extension Set A and Extension Set B DatFiles</returns>
public static ( DatFile ? extADat , DatFile ? extBDat ) SplitByExtensionDB ( DatFile datFile , List < string > extA , List < string > extB )
{
// If roms is empty, return false
if ( datFile . ItemsDB . DatStatistics . TotalCount = = 0 )
return ( null , null ) ;
InternalStopwatch watch = new ( $"Splitting DAT by extension" ) ;
// Make sure all of the extensions don't have a dot at the beginning
2024-11-12 21:12:06 -05:00
var newExtA = extA . ConvertAll ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) . ToArray ( ) ;
2024-03-19 23:35:29 -04:00
string newExtAString = string . Join ( "," , newExtA ) ;
2024-11-12 21:12:06 -05:00
var newExtB = extB . ConvertAll ( s = > s . TrimStart ( '.' ) . ToLowerInvariant ( ) ) . ToArray ( ) ;
2024-03-19 23:35:29 -04:00
string newExtBString = string . Join ( "," , newExtB ) ;
// Set all of the appropriate outputs for each of the subsets
2025-01-08 17:11:52 -05:00
DatFile extADat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-19 23:35:29 -04:00
extADat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , extADat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" ({newExtAString})" ) ;
extADat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , extADat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" ({newExtAString})" ) ;
extADat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , extADat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" ({newExtAString})" ) ;
2025-01-08 17:11:52 -05:00
DatFile extBDat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-19 23:35:29 -04:00
extBDat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , extBDat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" ({newExtBString})" ) ;
extBDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , extBDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" ({newExtBString})" ) ;
extBDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , extBDat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" ({newExtBString})" ) ;
2024-03-19 23:58:22 -04:00
// Get all current items, machines, and mappings
2024-12-06 23:16:09 -05:00
var datItems = datFile . ItemsDB . GetItems ( ) ;
2025-01-13 10:01:27 -05:00
var machines = datFile . GetMachinesDB ( ) ;
2024-12-06 23:16:09 -05:00
var sources = datFile . ItemsDB . GetSources ( ) ;
2024-03-19 23:58:22 -04:00
2024-03-20 01:29:59 -04:00
// Create mappings from old index to new index
2024-03-19 23:58:22 -04:00
var machineRemapping = new Dictionary < long , long > ( ) ;
2024-03-20 01:29:59 -04:00
var sourceRemapping = new Dictionary < long , long > ( ) ;
// Loop through and add all sources
foreach ( var source in sources )
{
2025-01-12 22:10:48 -05:00
long newSourceIndex = extADat . AddSourceDB ( source . Value ) ;
_ = extBDat . AddSourceDB ( source . Value ) ;
2024-03-20 01:29:59 -04:00
sourceRemapping [ source . Key ] = newSourceIndex ;
}
2024-03-19 23:58:22 -04:00
// Loop through and add all machines
foreach ( var machine in machines )
{
2025-01-12 22:10:48 -05:00
long newMachineIndex = extADat . AddMachineDB ( machine . Value ) ;
_ = extBDat . AddMachineDB ( machine . Value ) ;
2024-03-19 23:58:22 -04:00
machineRemapping [ machine . Key ] = newMachineIndex ;
}
// Loop through and add the items
2024-03-19 23:35:29 -04:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( datItems , Core . Globals . ParallelOptions , item = >
2024-03-19 23:35:29 -04:00
#elif NET40_OR_GREATER
2024-03-19 23:58:22 -04:00
Parallel . ForEach ( datItems , item = >
2024-03-19 23:35:29 -04:00
#else
2024-03-19 23:58:22 -04:00
foreach ( var item in datItems )
2024-03-19 23:35:29 -04:00
#endif
{
2024-03-20 01:29:59 -04:00
// Get the machine and source index for this item
2025-01-14 20:48:07 -05:00
long machineIndex = datFile . ItemsDB . GetMachineForItem ( item . Key ) . Key ;
long sourceIndex = datFile . ItemsDB . GetSourceForItem ( item . Key ) . Key ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
if ( newExtA . Contains ( ( item . Value . GetName ( ) ? ? string . Empty ) . GetNormalizedExtension ( ) ) )
2024-03-19 23:35:29 -04:00
{
2025-01-12 22:10:48 -05:00
extADat . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
}
else if ( newExtB . Contains ( ( item . Value . GetName ( ) ? ? string . Empty ) . GetNormalizedExtension ( ) ) )
{
2025-01-12 22:10:48 -05:00
extBDat . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
}
else
{
2025-01-12 22:10:48 -05:00
extADat . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
extBDat . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
}
#if NET40_OR_GREATER | | NETCOREAPP
} ) ;
#else
}
#endif
// Then return both DatFiles
watch . Stop ( ) ;
return ( extADat , extBDat ) ;
}
2020-12-09 21:52:38 -08:00
/// <summary>
/// Split a DAT by best available hashes
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <returns>Dictionary of Field to DatFile mappings</returns>
2024-03-05 23:41:00 -05:00
public static Dictionary < string , DatFile > SplitByHash ( DatFile datFile )
2020-12-09 21:52:38 -08:00
{
// Create each of the respective output DATs
2024-03-20 10:49:58 -04:00
var watch = new InternalStopwatch ( $"Splitting DAT by best available hashes" ) ;
// Create mapping of keys to suffixes
var mappings = new Dictionary < string , string >
{
[Models.Metadata.Rom.StatusKey] = " (Nodump)" ,
[Models.Metadata.Rom.SHA512Key] = " (SHA-512)" ,
[Models.Metadata.Rom.SHA384Key] = " (SHA-384)" ,
[Models.Metadata.Rom.SHA256Key] = " (SHA-256)" ,
[Models.Metadata.Rom.SHA1Key] = " (SHA-1)" ,
[Models.Metadata.Rom.MD5Key] = " (MD5)" ,
2025-01-09 05:26:36 -05:00
[Models.Metadata.Rom.MD4Key] = " (MD4)" ,
[Models.Metadata.Rom.MD2Key] = " (MD2)" ,
2024-03-20 10:49:58 -04:00
[Models.Metadata.Rom.CRCKey] = " (CRC)" ,
["null"] = " (Other)" ,
} ;
2020-12-09 21:52:38 -08:00
// Create the set of field-to-dat mappings
2024-03-05 23:41:00 -05:00
Dictionary < string , DatFile > fieldDats = [ ] ;
2024-03-20 10:49:58 -04:00
foreach ( var kvp in mappings )
{
2025-01-08 17:11:52 -05:00
fieldDats [ kvp . Key ] = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-20 10:49:58 -04:00
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + kvp . Value ) ;
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + kvp . Value ) ;
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + kvp . Value ) ;
}
2020-12-09 21:52:38 -08:00
// Now populate each of the DAT objects in turn
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , Core . Globals . ParallelOptions , key = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , key = >
2024-02-28 22:54:56 -05:00
#else
2025-01-14 15:32:14 -05:00
foreach ( var key in datFile . Items . SortedKeys )
2024-02-28 22:54:56 -05:00
#endif
2020-12-09 21:52:38 -08:00
{
2025-01-12 23:15:30 -05:00
var items = datFile . GetItemsForBucket ( key ) ;
2024-02-28 19:19:50 -05:00
if ( items = = null )
2024-02-29 00:14:16 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2024-02-28 19:19:50 -05:00
return ;
2024-02-29 00:14:16 -05:00
#else
continue ;
#endif
2020-12-09 21:52:38 -08:00
foreach ( DatItem item in items )
{
// If the file is not a Disk, Media, or Rom, continue
2024-03-10 16:49:07 -04:00
switch ( item )
2020-12-09 21:52:38 -08:00
{
2024-03-10 16:49:07 -04:00
case Disk disk :
2024-03-11 16:26:28 -04:00
if ( disk . GetStringFieldValue ( Models . Metadata . Disk . StatusKey ) . AsEnumValue < ItemStatus > ( ) = = ItemStatus . Nodump )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Disk . StatusKey ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . SHA1Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Disk . SHA1Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . MD5Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Disk . MD5Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . MD5Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Disk . MD5Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
else
2025-01-14 15:32:14 -05:00
fieldDats [ "null" ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
break ;
case Media media :
2024-03-11 15:46:44 -04:00
if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . SHA256Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Media . SHA256Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . SHA1Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Media . SHA1Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . MD5Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Media . MD5Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
else
2025-01-14 15:32:14 -05:00
fieldDats [ "null" ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
break ;
case Rom rom :
2024-03-11 16:26:28 -04:00
if ( rom . GetStringFieldValue ( Models . Metadata . Rom . StatusKey ) . AsEnumValue < ItemStatus > ( ) = = ItemStatus . Nodump )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . StatusKey ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA512Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . SHA512Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA384Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . SHA384Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA256Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . SHA256Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA1Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . SHA1Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD5Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . MD5Key ] . AddItem ( item , statsOnly : false ) ;
2025-01-09 05:26:36 -05:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD4Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . MD4Key ] . AddItem ( item , statsOnly : false ) ;
2025-01-09 05:26:36 -05:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD2Key ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . MD2Key ] . AddItem ( item , statsOnly : false ) ;
2024-03-11 15:46:44 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . CRCKey ) ) )
2025-01-14 15:32:14 -05:00
fieldDats [ Models . Metadata . Rom . CRCKey ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
else
2025-01-14 15:32:14 -05:00
fieldDats [ "null" ] . AddItem ( item , statsOnly : false ) ;
2024-03-10 16:49:07 -04:00
break ;
default :
continue ;
2020-12-09 21:52:38 -08:00
}
}
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-09 21:52:38 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
#endif
2020-12-09 21:52:38 -08:00
2021-02-02 14:09:49 -08:00
watch . Stop ( ) ;
2020-12-09 21:52:38 -08:00
return fieldDats ;
}
2024-03-19 23:35:29 -04:00
/// <summary>
/// Split a DAT by best available hashes
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <returns>Dictionary of Field to DatFile mappings</returns>
public static Dictionary < string , DatFile > SplitByHashDB ( DatFile datFile )
{
// Create each of the respective output DATs
var watch = new InternalStopwatch ( $"Splitting DAT by best available hashes" ) ;
2024-03-20 10:49:58 -04:00
// Create mapping of keys to suffixes
var mappings = new Dictionary < string , string >
{
[Models.Metadata.Rom.StatusKey] = " (Nodump)" ,
[Models.Metadata.Rom.SHA512Key] = " (SHA-512)" ,
[Models.Metadata.Rom.SHA384Key] = " (SHA-384)" ,
[Models.Metadata.Rom.SHA256Key] = " (SHA-256)" ,
[Models.Metadata.Rom.SHA1Key] = " (SHA-1)" ,
[Models.Metadata.Rom.MD5Key] = " (MD5)" ,
2025-01-09 05:26:36 -05:00
[Models.Metadata.Rom.MD4Key] = " (MD4)" ,
[Models.Metadata.Rom.MD2Key] = " (MD2)" ,
2024-03-20 10:49:58 -04:00
[Models.Metadata.Rom.CRCKey] = " (CRC)" ,
["null"] = " (Other)" ,
} ;
2024-03-19 23:35:29 -04:00
// Create the set of field-to-dat mappings
Dictionary < string , DatFile > fieldDats = [ ] ;
2024-03-20 10:49:58 -04:00
foreach ( var kvp in mappings )
{
2025-01-08 17:11:52 -05:00
fieldDats [ kvp . Key ] = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-20 10:49:58 -04:00
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + kvp . Value ) ;
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + kvp . Value ) ;
fieldDats [ kvp . Key ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , fieldDats [ kvp . Key ] . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + kvp . Value ) ;
}
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
// Get all current items, machines, and mappings
2024-12-06 23:16:09 -05:00
var datItems = datFile . ItemsDB . GetItems ( ) ;
2025-01-13 10:01:27 -05:00
var machines = datFile . GetMachinesDB ( ) ;
2024-12-06 23:16:09 -05:00
var sources = datFile . ItemsDB . GetSources ( ) ;
2024-03-19 23:58:22 -04:00
2024-03-20 01:29:59 -04:00
// Create mappings from old index to new index
2024-03-19 23:58:22 -04:00
var machineRemapping = new Dictionary < long , long > ( ) ;
2024-03-20 01:29:59 -04:00
var sourceRemapping = new Dictionary < long , long > ( ) ;
// Loop through and add all sources
foreach ( var source in sources )
{
2025-01-12 22:10:48 -05:00
long newSourceIndex = fieldDats [ Models . Metadata . Rom . StatusKey ] . AddSourceDB ( source . Value ) ;
2024-03-20 10:49:58 -04:00
sourceRemapping [ source . Key ] = newSourceIndex ;
2025-01-12 22:10:48 -05:00
_ = fieldDats [ Models . Metadata . Rom . SHA512Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA384Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA256Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA1Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD5Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD4Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD2Key ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . CRCKey ] . AddSourceDB ( source . Value ) ;
_ = fieldDats [ "null" ] . AddSourceDB ( source . Value ) ;
2024-03-20 01:29:59 -04:00
}
2024-03-19 23:58:22 -04:00
// Loop through and add all machines
foreach ( var machine in machines )
{
2025-01-12 22:10:48 -05:00
long newMachineIndex = fieldDats [ Models . Metadata . Rom . StatusKey ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA512Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA384Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA256Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . SHA1Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD5Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD4Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . MD2Key ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ Models . Metadata . Rom . CRCKey ] . AddMachineDB ( machine . Value ) ;
_ = fieldDats [ "null" ] . AddMachineDB ( machine . Value ) ;
2024-03-19 23:58:22 -04:00
machineRemapping [ machine . Key ] = newMachineIndex ;
}
// Loop through and add the items
2024-03-19 23:35:29 -04:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( datItems , Core . Globals . ParallelOptions , item = >
2024-03-19 23:35:29 -04:00
#elif NET40_OR_GREATER
2024-03-19 23:58:22 -04:00
Parallel . ForEach ( datItems , item = >
2024-03-19 23:35:29 -04:00
#else
2024-03-19 23:58:22 -04:00
foreach ( var item in datItems )
2024-03-19 23:35:29 -04:00
#endif
{
2024-03-20 01:29:59 -04:00
// Get the machine and source index for this item
2025-01-14 20:48:07 -05:00
long machineIndex = datFile . ItemsDB . GetMachineForItem ( item . Key ) . Key ;
long sourceIndex = datFile . ItemsDB . GetSourceForItem ( item . Key ) . Key ;
2024-03-19 23:58:22 -04:00
// Only process Disk, Media, and Rom
switch ( item . Value )
{
case Disk disk :
if ( disk . GetStringFieldValue ( Models . Metadata . Disk . StatusKey ) . AsEnumValue < ItemStatus > ( ) = = ItemStatus . Nodump )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Disk . StatusKey ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . SHA1Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Disk . SHA1Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . MD5Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Disk . MD5Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( disk . GetStringFieldValue ( Models . Metadata . Disk . MD5Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Disk . MD5Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else
2025-01-12 22:10:48 -05:00
fieldDats [ "null" ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
break ;
case Media media :
if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . SHA256Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Media . SHA256Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . SHA1Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Media . SHA1Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( media . GetStringFieldValue ( Models . Metadata . Media . MD5Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Media . MD5Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else
2025-01-12 22:10:48 -05:00
fieldDats [ "null" ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
break ;
case Rom rom :
if ( rom . GetStringFieldValue ( Models . Metadata . Rom . StatusKey ) . AsEnumValue < ItemStatus > ( ) = = ItemStatus . Nodump )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . StatusKey ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA512Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . SHA512Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA384Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . SHA384Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA256Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . SHA256Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . SHA1Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . SHA1Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD5Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . MD5Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2025-01-09 05:26:36 -05:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD4Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . MD4Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2025-01-09 05:26:36 -05:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . MD2Key ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . MD2Key ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else if ( ! string . IsNullOrEmpty ( rom . GetStringFieldValue ( Models . Metadata . Rom . CRCKey ) ) )
2025-01-12 22:10:48 -05:00
fieldDats [ Models . Metadata . Rom . CRCKey ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
else
2025-01-12 22:10:48 -05:00
fieldDats [ "null" ] . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:58:22 -04:00
break ;
default :
2024-03-19 23:35:29 -04:00
#if NET40_OR_GREATER | | NETCOREAPP
2024-03-19 23:58:22 -04:00
return ;
2024-03-19 23:35:29 -04:00
#else
2024-03-19 23:58:22 -04:00
continue ;
2024-03-19 23:35:29 -04:00
#endif
}
#if NET40_OR_GREATER | | NETCOREAPP
} ) ;
#else
}
#endif
watch . Stop ( ) ;
return fieldDats ;
}
2020-12-09 21:52:38 -08:00
/// <summary>
/// Split a SuperDAT by lowest available directory level
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <param name="outDir">Name of the directory to write the DATs out to</param>
/// <param name="shortname">True if short names should be used, false otherwise</param>
/// <param name="basedat">True if original filenames should be used as the base for output filename, false otherwise</param>
/// <returns>True if split succeeded, false otherwise</returns>
2020-12-10 11:58:46 -08:00
public static bool SplitByLevel ( DatFile datFile , string outDir , bool shortname , bool basedat )
2020-12-09 21:52:38 -08:00
{
2023-04-19 16:39:58 -04:00
InternalStopwatch watch = new ( $"Splitting DAT by level" ) ;
2021-02-02 14:09:49 -08:00
2020-12-09 21:52:38 -08:00
// First, bucket by games so that we can do the right thing
2025-01-14 20:21:54 -05:00
datFile . BucketBy ( ItemKey . Machine , lower : false , norename : true ) ;
2020-12-09 21:52:38 -08:00
// Create a temporary DAT to add things to
2025-01-08 17:11:52 -05:00
DatFile tempDat = DatFileTool . CreateDatFile ( datFile . Header ) ;
2024-03-10 04:10:37 -04:00
tempDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , null ) ;
2020-12-09 21:52:38 -08:00
// Sort the input keys
2025-01-14 15:32:14 -05:00
List < string > keys = [ . . datFile . Items . SortedKeys ] ;
2020-12-09 21:52:38 -08:00
keys . Sort ( SplitByLevelSort ) ;
// Then, we loop over the games
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( keys , Core . Globals . ParallelOptions , key = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
Parallel . ForEach ( keys , key = >
#else
foreach ( var key in keys )
#endif
2020-12-09 21:52:38 -08:00
{
// Here, the key is the name of the game to be used for comparison
2024-03-11 15:46:44 -04:00
if ( tempDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) ! = null & & tempDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) ! = Path . GetDirectoryName ( key ) )
2020-12-09 21:52:38 -08:00
{
// Reset the DAT for the next items
2025-01-08 17:11:52 -05:00
tempDat = DatFileTool . CreateDatFile ( datFile . Header ) ;
2024-03-10 04:10:37 -04:00
tempDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , null ) ;
2020-12-09 21:52:38 -08:00
}
// Clean the input list and set all games to be pathless
2025-01-12 23:15:30 -05:00
List < DatItem > ? items = datFile . GetItemsForBucket ( key ) ;
2024-02-28 19:19:50 -05:00
if ( items = = null )
2024-02-29 00:14:16 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2024-02-28 19:19:50 -05:00
return ;
2024-02-29 00:14:16 -05:00
#else
continue ;
#endif
2024-03-11 15:46:44 -04:00
items . ForEach ( item = > item . GetFieldValue < Machine > ( DatItem . MachineKey ) ! . SetFieldValue < string? > ( Models . Metadata . Machine . NameKey , Path . GetFileName ( item . GetFieldValue < Machine > ( DatItem . MachineKey ) ! . GetStringFieldValue ( Models . Metadata . Machine . NameKey ) ) ) ) ;
items . ForEach ( item = > item . GetFieldValue < Machine > ( DatItem . MachineKey ) ! . SetFieldValue < string? > ( Models . Metadata . Machine . DescriptionKey , Path . GetFileName ( item . GetFieldValue < Machine > ( DatItem . MachineKey ) ! . GetStringFieldValue ( Models . Metadata . Machine . DescriptionKey ) ) ) ) ;
2020-12-09 21:52:38 -08:00
// Now add the game to the output DAT
2025-01-14 15:32:14 -05:00
items . ForEach ( item = > tempDat . AddItem ( item , statsOnly : false ) ) ;
2020-12-09 21:52:38 -08:00
// Then set the DAT name to be the parent directory name
2024-03-10 04:10:37 -04:00
tempDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , Path . GetDirectoryName ( key ) ) ;
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-09 21:52:38 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
#endif
2020-12-09 21:52:38 -08:00
2021-02-02 14:09:49 -08:00
watch . Stop ( ) ;
2020-12-09 21:52:38 -08:00
return true ;
}
/// <summary>
/// Helper function for SplitByLevel to sort the input game names
/// </summary>
/// <param name="a">First string to compare</param>
/// <param name="b">Second string to compare</param>
/// <returns>-1 for a coming before b, 0 for a == b, 1 for a coming after b</returns>
2020-12-10 11:58:46 -08:00
private static int SplitByLevelSort ( string a , string b )
2020-12-09 21:52:38 -08:00
{
2023-04-19 16:39:58 -04:00
NaturalComparer nc = new ( ) ;
2020-12-09 21:52:38 -08:00
int adeep = a . Count ( c = > c = = '/' | | c = = '\\' ) ;
int bdeep = b . Count ( c = > c = = '/' | | c = = '\\' ) ;
if ( adeep = = bdeep )
return nc . Compare ( a , b ) ;
return adeep - bdeep ;
}
/// <summary>
/// Helper function for SplitByLevel to clean and write out a DAT
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <param name="newDatFile">DAT to clean and write out</param>
/// <param name="outDir">Directory to write out to</param>
/// <param name="shortname">True if short naming scheme should be used, false otherwise</param>
/// <param name="restore">True if original filenames should be used as the base for output filename, false otherwise</param>
2020-12-10 11:58:46 -08:00
private static void SplitByLevelHelper ( DatFile datFile , DatFile newDatFile , string outDir , bool shortname , bool restore )
2020-12-09 21:52:38 -08:00
{
// Get the name from the DAT to use separately
2024-03-11 15:46:44 -04:00
string? name = newDatFile . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) ;
2024-02-28 19:19:50 -05:00
string? expName = name ? . Replace ( "/" , " - " ) ? . Replace ( "\\" , " - " ) ;
2020-12-09 21:52:38 -08:00
// Now set the new output values
2024-02-29 00:14:16 -05:00
#if NET20 | | NET35
2024-03-10 21:41:49 -04:00
newDatFile . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , string . IsNullOrEmpty ( name )
2024-03-11 15:46:44 -04:00
? datFile . Header . GetStringFieldValue ( DatHeader . FileNameKey )
2024-02-29 00:14:16 -05:00
: ( shortname
? Path . GetFileName ( name )
: expName
2024-03-10 21:41:49 -04:00
) ) ;
2024-02-29 00:14:16 -05:00
#else
2024-03-10 21:41:49 -04:00
newDatFile . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , WebUtility . HtmlDecode ( string . IsNullOrEmpty ( name )
2024-03-11 15:46:44 -04:00
? datFile . Header . GetStringFieldValue ( DatHeader . FileNameKey )
2020-12-09 21:52:38 -08:00
: ( shortname
? Path . GetFileName ( name )
: expName
)
2024-03-10 21:41:49 -04:00
) ) ;
2024-02-29 00:14:16 -05:00
#endif
2024-03-10 21:41:49 -04:00
newDatFile . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , restore
2024-03-11 15:46:44 -04:00
? $"{datFile.Header.GetStringFieldValue(DatHeader.FileNameKey)} ({newDatFile.Header.GetStringFieldValue(DatHeader.FileNameKey)})"
: newDatFile . Header . GetStringFieldValue ( DatHeader . FileNameKey ) ) ;
newDatFile . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , $"{datFile.Header.GetStringFieldValue(Models.Metadata.Header.NameKey)} ({expName})" ) ;
newDatFile . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , string . IsNullOrEmpty ( datFile . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) )
? newDatFile . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey )
: $"{datFile.Header.GetStringFieldValue(Models.Metadata.Header.DescriptionKey)} ({expName})" ) ;
2024-03-10 04:10:37 -04:00
newDatFile . Header . SetFieldValue < string? > ( Models . Metadata . Header . TypeKey , null ) ;
2020-12-09 21:52:38 -08:00
// Write out the temporary DAT to the proper directory
2020-12-10 14:03:07 -08:00
Writer . Write ( newDatFile , outDir ) ;
2020-12-09 21:52:38 -08:00
}
/// <summary>
/// Split a DAT by size of Rom
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <param name="radix">Long value representing the split point</param>
/// <returns>Less Than and Greater Than DatFiles</returns>
2020-12-10 11:58:46 -08:00
public static ( DatFile lessThan , DatFile greaterThan ) SplitBySize ( DatFile datFile , long radix )
2020-12-09 21:52:38 -08:00
{
// Create each of the respective output DATs
2023-04-19 16:39:58 -04:00
InternalStopwatch watch = new ( $"Splitting DAT by size" ) ;
2020-12-09 21:52:38 -08:00
2025-01-08 17:11:52 -05:00
DatFile lessThan = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
lessThan . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , lessThan . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" (less than {radix})" ) ;
lessThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , lessThan . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" (less than {radix})" ) ;
lessThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , lessThan . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" (less than {radix})" ) ;
2020-12-09 21:52:38 -08:00
2025-01-08 17:11:52 -05:00
DatFile greaterThan = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
greaterThan . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , greaterThan . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" (equal-greater than {radix})" ) ;
greaterThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , greaterThan . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" (equal-greater than {radix})" ) ;
greaterThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , greaterThan . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" (equal-greater than {radix})" ) ;
2020-12-09 21:52:38 -08:00
// Now populate each of the DAT objects in turn
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , Core . Globals . ParallelOptions , key = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , key = >
2024-02-28 22:54:56 -05:00
#else
2025-01-14 15:32:14 -05:00
foreach ( var key in datFile . Items . SortedKeys )
2024-02-28 22:54:56 -05:00
#endif
2020-12-09 21:52:38 -08:00
{
2025-01-12 23:15:30 -05:00
List < DatItem > ? items = datFile . GetItemsForBucket ( key ) ;
2024-02-28 19:19:50 -05:00
if ( items = = null )
2024-02-29 00:14:16 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2024-02-28 19:19:50 -05:00
return ;
2024-02-29 00:14:16 -05:00
#else
continue ;
#endif
2020-12-09 21:52:38 -08:00
foreach ( DatItem item in items )
{
// If the file is not a Rom, it automatically goes in the "lesser" dat
2024-03-10 16:49:07 -04:00
if ( item is not Rom rom )
2025-01-14 15:32:14 -05:00
lessThan . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
// If the file is a Rom and has no size, put it in the "lesser" dat
2024-03-11 15:46:44 -04:00
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) = = null )
2025-01-14 15:32:14 -05:00
lessThan . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
// If the file is a Rom and less than the radix, put it in the "lesser" dat
2024-03-11 15:46:44 -04:00
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) < radix )
2025-01-14 15:32:14 -05:00
lessThan . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
2024-03-11 15:46:44 -04:00
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) > = radix )
2025-01-14 15:32:14 -05:00
greaterThan . AddItem ( item , statsOnly : false ) ;
2020-12-09 21:52:38 -08:00
}
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-09 21:52:38 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
#endif
2020-12-09 21:52:38 -08:00
// Then return both DatFiles
2021-02-02 14:09:49 -08:00
watch . Stop ( ) ;
2020-12-09 21:52:38 -08:00
return ( lessThan , greaterThan ) ;
}
2024-03-19 23:35:29 -04:00
/// <summary>
/// Split a DAT by size of Rom
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="radix">Long value representing the split point</param>
/// <returns>Less Than and Greater Than DatFiles</returns>
public static ( DatFile lessThan , DatFile greaterThan ) SplitBySizeDB ( DatFile datFile , long radix )
{
// Create each of the respective output DATs
var watch = new InternalStopwatch ( $"Splitting DAT by size" ) ;
2025-01-08 17:11:52 -05:00
DatFile lessThan = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-19 23:35:29 -04:00
lessThan . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , lessThan . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" (less than {radix})" ) ;
lessThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , lessThan . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" (less than {radix})" ) ;
lessThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , lessThan . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" (less than {radix})" ) ;
2025-01-08 17:11:52 -05:00
DatFile greaterThan = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-19 23:35:29 -04:00
greaterThan . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , greaterThan . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" (equal-greater than {radix})" ) ;
greaterThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , greaterThan . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" (equal-greater than {radix})" ) ;
greaterThan . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , greaterThan . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" (equal-greater than {radix})" ) ;
2024-03-19 23:58:22 -04:00
// Get all current items, machines, and mappings
2024-12-06 23:16:09 -05:00
var datItems = datFile . ItemsDB . GetItems ( ) ;
2025-01-13 10:01:27 -05:00
var machines = datFile . GetMachinesDB ( ) ;
2024-12-06 23:16:09 -05:00
var sources = datFile . ItemsDB . GetSources ( ) ;
2024-03-19 23:58:22 -04:00
2024-03-20 01:29:59 -04:00
// Create mappings from old index to new index
2024-03-19 23:58:22 -04:00
var machineRemapping = new Dictionary < long , long > ( ) ;
2024-03-20 01:29:59 -04:00
var sourceRemapping = new Dictionary < long , long > ( ) ;
// Loop through and add all sources
foreach ( var source in sources )
{
2025-01-12 22:10:48 -05:00
long newSourceIndex = lessThan . AddSourceDB ( source . Value ) ;
_ = greaterThan . AddSourceDB ( source . Value ) ;
2024-03-20 01:29:59 -04:00
sourceRemapping [ source . Key ] = newSourceIndex ;
}
2024-03-19 23:58:22 -04:00
// Loop through and add all machines
foreach ( var machine in machines )
{
2025-01-12 22:10:48 -05:00
long newMachineIndex = lessThan . AddMachineDB ( machine . Value ) ;
_ = greaterThan . AddMachineDB ( machine . Value ) ;
2024-03-19 23:58:22 -04:00
machineRemapping [ machine . Key ] = newMachineIndex ;
}
// Loop through and add the items
2024-03-19 23:35:29 -04:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( datItems , Core . Globals . ParallelOptions , item = >
2024-03-19 23:35:29 -04:00
#elif NET40_OR_GREATER
2024-03-19 23:58:22 -04:00
Parallel . ForEach ( datItems , item = >
2024-03-19 23:35:29 -04:00
#else
2024-03-19 23:58:22 -04:00
foreach ( var item in datItems )
2024-03-19 23:35:29 -04:00
#endif
{
2024-03-20 01:29:59 -04:00
// Get the machine and source index for this item
2025-01-14 20:48:07 -05:00
long machineIndex = datFile . ItemsDB . GetMachineForItem ( item . Key ) . Key ;
long sourceIndex = datFile . ItemsDB . GetSourceForItem ( item . Key ) . Key ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
// If the file is not a Rom, it automatically goes in the "lesser" dat
if ( item . Value is not Rom rom )
2025-01-12 22:10:48 -05:00
lessThan . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
// If the file is a Rom and has no size, put it in the "lesser" dat
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) = = null )
2025-01-12 22:10:48 -05:00
lessThan . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
// If the file is a Rom and less than the radix, put it in the "lesser" dat
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) < radix )
2025-01-12 22:10:48 -05:00
lessThan . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
// If the file is a Rom and greater than or equal to the radix, put it in the "greater" dat
else if ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) > = radix )
2025-01-12 22:10:48 -05:00
greaterThan . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
#if NET40_OR_GREATER | | NETCOREAPP
} ) ;
#else
}
#endif
// Then return both DatFiles
watch . Stop ( ) ;
return ( lessThan , greaterThan ) ;
}
2021-02-17 16:47:32 -08:00
/// <summary>
/// Split a DAT by size of Rom
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="chunkSize">Long value representing the total size to split at</param>
/// <returns>Less Than and Greater Than DatFiles</returns>
public static List < DatFile > SplitByTotalSize ( DatFile datFile , long chunkSize )
{
// If the size is invalid, just return
if ( chunkSize < = 0 )
2024-02-28 19:19:50 -05:00
return [ ] ;
2021-02-17 16:47:32 -08:00
// Create each of the respective output DATs
2023-04-19 16:39:58 -04:00
InternalStopwatch watch = new ( $"Splitting DAT by total size" ) ;
2021-02-17 16:47:32 -08:00
// Sort the DatFile by machine name
2025-01-14 20:21:54 -05:00
datFile . BucketBy ( ItemKey . Machine ) ;
2021-02-17 16:47:32 -08:00
// Get the keys in a known order for easier sorting
var keys = datFile . Items . SortedKeys ;
// Get the output list
2024-02-28 19:19:50 -05:00
List < DatFile > datFiles = [ ] ;
2021-02-17 16:47:32 -08:00
// Initialize everything
long currentSize = 0 ;
long currentIndex = 0 ;
2025-01-08 17:11:52 -05:00
DatFile currentDat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
currentDat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , currentDat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $"_{currentIndex}" ) ;
currentDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , currentDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $"_{currentIndex}" ) ;
currentDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , currentDat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $"_{currentIndex}" ) ;
2021-02-17 16:47:32 -08:00
// Loop through each machine
foreach ( string machine in keys )
{
// Get the current machine
2025-01-13 09:37:59 -05:00
var items = datFile . GetItemsForBucket ( machine ) ;
2024-10-19 21:41:08 -04:00
if ( items = = null | | items . Count = = 0 )
2021-02-17 17:02:54 -08:00
{
2025-01-08 16:59:44 -05:00
_staticLogger . Error ( $"{machine} contains no items and will be skipped" ) ;
2021-02-17 16:47:32 -08:00
continue ;
2021-02-17 17:02:54 -08:00
}
2021-02-17 16:47:32 -08:00
// Get the total size of the current machine
long machineSize = 0 ;
foreach ( var item in items )
{
if ( item is Rom rom )
2021-02-17 17:02:54 -08:00
{
2021-02-17 17:13:39 -08:00
// TODO: Should there be more than just a log if a single item is larger than the chunksize?
2024-03-11 15:46:44 -04:00
machineSize + = rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) ? ? 0 ;
if ( ( rom . GetInt64FieldValue ( Models . Metadata . Rom . SizeKey ) ? ? 0 ) > chunkSize )
2025-01-08 16:59:44 -05:00
_staticLogger . Error ( $"{rom.GetName() ?? string.Empty} in {machine} is larger than {chunkSize}" ) ;
2021-02-17 17:02:54 -08:00
}
}
// If the current machine size is greater than the chunk size by itself, we want to log and skip
// TODO: Should this eventually try to split the machine here?
if ( machineSize > chunkSize )
{
2025-01-08 16:59:44 -05:00
_staticLogger . Error ( $"{machine} is larger than {chunkSize} and will be skipped" ) ;
2021-02-17 17:02:54 -08:00
continue ;
2021-02-17 16:47:32 -08:00
}
// If the current machine size makes the current DatFile too big, split
2021-02-17 17:02:54 -08:00
else if ( currentSize + machineSize > chunkSize )
2021-02-17 16:47:32 -08:00
{
datFiles . Add ( currentDat ) ;
currentSize = 0 ;
currentIndex + + ;
2025-01-08 17:11:52 -05:00
currentDat = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
currentDat . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , currentDat . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $"_{currentIndex}" ) ;
currentDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , currentDat . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $"_{currentIndex}" ) ;
currentDat . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , currentDat . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $"_{currentIndex}" ) ;
2021-02-17 16:47:32 -08:00
}
// Add the current machine to the current DatFile
2025-01-14 15:32:14 -05:00
items . ForEach ( item = > currentDat . AddItem ( item , statsOnly : false ) ) ;
2021-02-17 16:47:32 -08:00
currentSize + = machineSize ;
}
// Add the final DatFile to the list
datFiles . Add ( currentDat ) ;
// Then return the list
watch . Stop ( ) ;
return datFiles ;
}
2020-12-09 21:52:38 -08:00
/// <summary>
/// Split a DAT by type of DatItem
/// </summary>
2020-12-10 11:38:30 -08:00
/// <param name="datFile">Current DatFile object to split</param>
2020-12-09 21:52:38 -08:00
/// <returns>Dictionary of ItemType to DatFile mappings</returns>
2020-12-10 11:58:46 -08:00
public static Dictionary < ItemType , DatFile > SplitByType ( DatFile datFile )
2020-12-09 21:52:38 -08:00
{
// Create each of the respective output DATs
2023-04-19 16:39:58 -04:00
InternalStopwatch watch = new ( $"Splitting DAT by item type" ) ;
2020-12-09 21:52:38 -08:00
// Create the set of type-to-dat mappings
2024-02-28 19:19:50 -05:00
Dictionary < ItemType , DatFile > typeDats = [ ] ;
2020-12-09 21:52:38 -08:00
// We only care about a subset of types
2024-02-28 19:19:50 -05:00
List < ItemType > outputTypes =
[
2020-12-09 21:52:38 -08:00
ItemType . Disk ,
ItemType . Media ,
ItemType . Rom ,
ItemType . Sample ,
2024-02-28 19:19:50 -05:00
] ;
2020-12-09 21:52:38 -08:00
// Setup all of the DatFiles
foreach ( ItemType itemType in outputTypes )
{
2025-01-08 17:11:52 -05:00
typeDats [ itemType ] = DatFileTool . CreateDatFile ( datFile . Header . CloneStandard ( ) ) ;
2024-03-11 15:46:44 -04:00
typeDats [ itemType ] . Header . SetFieldValue < string? > ( DatHeader . FileNameKey , typeDats [ itemType ] . Header . GetStringFieldValue ( DatHeader . FileNameKey ) + $" ({itemType})" ) ;
typeDats [ itemType ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . NameKey , typeDats [ itemType ] . Header . GetStringFieldValue ( Models . Metadata . Header . NameKey ) + $" ({itemType})" ) ;
typeDats [ itemType ] . Header . SetFieldValue < string? > ( Models . Metadata . Header . DescriptionKey , typeDats [ itemType ] . Header . GetStringFieldValue ( Models . Metadata . Header . DescriptionKey ) + $" ({itemType})" ) ;
2020-12-09 21:52:38 -08:00
}
// Now populate each of the DAT objects in turn
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( outputTypes , Core . Globals . ParallelOptions , itemType = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
Parallel . ForEach ( outputTypes , itemType = >
#else
foreach ( var itemType in outputTypes )
#endif
2020-12-09 21:52:38 -08:00
{
2020-12-10 11:38:30 -08:00
FillWithItemType ( datFile , typeDats [ itemType ] , itemType ) ;
2024-03-19 23:35:29 -04:00
FillWithItemTypeDB ( datFile , typeDats [ itemType ] , itemType ) ;
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-09 21:52:38 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
#endif
2020-12-09 21:52:38 -08:00
2021-02-02 14:09:49 -08:00
watch . Stop ( ) ;
2020-12-09 21:52:38 -08:00
return typeDats ;
}
2020-12-10 11:38:30 -08:00
/// <summary>
/// Fill a DatFile with all items with a particular ItemType
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
2020-12-10 11:58:46 -08:00
private static void FillWithItemType ( DatFile datFile , DatFile indexDat , ItemType itemType )
2020-12-10 11:38:30 -08:00
{
// Loop through and add the items for this index to the output
2024-02-28 22:54:56 -05:00
#if NET452_OR_GREATER | | NETCOREAPP
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , Core . Globals . ParallelOptions , key = >
2024-02-28 22:54:56 -05:00
#elif NET40_OR_GREATER
2025-01-14 15:32:14 -05:00
Parallel . ForEach ( datFile . Items . SortedKeys , key = >
2024-02-28 22:54:56 -05:00
#else
2025-01-14 15:32:14 -05:00
foreach ( var key in datFile . Items . SortedKeys )
2024-02-28 22:54:56 -05:00
#endif
2020-12-10 11:38:30 -08:00
{
2025-01-12 23:15:30 -05:00
List < DatItem > items = DatFileTool . Merge ( datFile . GetItemsForBucket ( key ) ) ;
2020-12-10 11:38:30 -08:00
// If the rom list is empty or null, just skip it
if ( items = = null | | items . Count = = 0 )
2024-03-05 02:52:53 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-10 11:38:30 -08:00
return ;
2024-03-05 02:52:53 -05:00
#else
continue ;
#endif
2020-12-10 11:38:30 -08:00
foreach ( DatItem item in items )
{
2024-03-11 16:26:28 -04:00
if ( item . GetStringFieldValue ( Models . Metadata . DatItem . TypeKey ) . AsEnumValue < ItemType > ( ) = = itemType )
2025-01-14 15:32:14 -05:00
indexDat . AddItem ( item , statsOnly : false ) ;
2020-12-10 11:38:30 -08:00
}
2024-02-28 21:59:13 -05:00
#if NET40_OR_GREATER | | NETCOREAPP
2020-12-10 11:38:30 -08:00
} ) ;
2024-02-28 21:59:13 -05:00
#else
}
2024-03-19 23:35:29 -04:00
#endif
}
/// <summary>
/// Fill a DatFile with all items with a particular ItemType
/// </summary>
/// <param name="datFile">Current DatFile object to split</param>
/// <param name="indexDat">DatFile to add found items to</param>
/// <param name="itemType">ItemType to retrieve items for</param>
/// <returns>DatFile containing all items with the ItemType/returns>
private static void FillWithItemTypeDB ( DatFile datFile , DatFile indexDat , ItemType itemType )
{
2024-03-19 23:58:22 -04:00
// Get all current items, machines, and mappings
2024-12-06 23:16:09 -05:00
var datItems = datFile . ItemsDB . GetItems ( ) ;
2025-01-13 10:01:27 -05:00
var machines = datFile . GetMachinesDB ( ) ;
2024-12-06 23:16:09 -05:00
var sources = datFile . ItemsDB . GetSources ( ) ;
2024-03-19 23:58:22 -04:00
2024-03-20 01:29:59 -04:00
// Create mappings from old index to new index
2024-03-19 23:58:22 -04:00
var machineRemapping = new Dictionary < long , long > ( ) ;
2024-03-20 01:29:59 -04:00
var sourceRemapping = new Dictionary < long , long > ( ) ;
// Loop through and add all sources
foreach ( var source in sources )
{
2025-01-12 22:10:48 -05:00
long newSourceIndex = indexDat . AddSourceDB ( source . Value ) ;
2024-03-20 01:29:59 -04:00
sourceRemapping [ source . Key ] = newSourceIndex ;
}
2024-03-19 23:58:22 -04:00
// Loop through and add all machines
foreach ( var machine in machines )
{
2025-01-12 22:10:48 -05:00
long newMachineIndex = indexDat . AddMachineDB ( machine . Value ) ;
2024-03-19 23:58:22 -04:00
machineRemapping [ machine . Key ] = newMachineIndex ;
}
// Loop through and add the items
2024-03-19 23:35:29 -04:00
#if NET452_OR_GREATER | | NETCOREAPP
2024-10-24 05:58:03 -04:00
Parallel . ForEach ( datItems , Core . Globals . ParallelOptions , item = >
2024-03-19 23:35:29 -04:00
#elif NET40_OR_GREATER
2024-03-19 23:58:22 -04:00
Parallel . ForEach ( datItems , item = >
2024-03-19 23:35:29 -04:00
#else
2024-03-19 23:58:22 -04:00
foreach ( var item in datItems )
2024-03-19 23:35:29 -04:00
#endif
{
2024-03-20 01:29:59 -04:00
// Get the machine and source index for this item
2025-01-14 20:48:07 -05:00
long machineIndex = datFile . ItemsDB . GetMachineForItem ( item . Key ) . Key ;
long sourceIndex = datFile . ItemsDB . GetSourceForItem ( item . Key ) . Key ;
2024-03-19 23:35:29 -04:00
2024-03-19 23:58:22 -04:00
if ( item . Value . GetStringFieldValue ( Models . Metadata . DatItem . TypeKey ) . AsEnumValue < ItemType > ( ) = = itemType )
2025-01-12 22:10:48 -05:00
indexDat . AddItemDB ( item . Value , machineRemapping [ machineIndex ] , sourceRemapping [ sourceIndex ] , statsOnly : false ) ;
2024-03-19 23:35:29 -04:00
#if NET40_OR_GREATER | | NETCOREAPP
} ) ;
#else
}
2024-02-28 21:59:13 -05:00
#endif
2020-12-10 11:38:30 -08:00
}
2020-12-09 21:52:38 -08:00
}
}