mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Make the Update methods cleaner
This commit is contained in:
@@ -61,14 +61,12 @@ in -old DAT file. Ignores those entries in -old that are not in -new.";
|
||||
DatFile datfile = DatFile.Create();
|
||||
datfile.Header.Name = name;
|
||||
datfile.Header.Description = description;
|
||||
datfile.Parse(olddat);
|
||||
|
||||
// Create the inputs
|
||||
List<string> dats = new List<string> { newdat };
|
||||
List<string> basedats = new List<string> { olddat };
|
||||
|
||||
// Now run the diff on the inputs
|
||||
datfile.PopulateUserData(basedats, new ExtraIni(), new Filter());
|
||||
datfile.DiffAgainst(dats, outdat, false, new ExtraIni(), new Filter(), false);
|
||||
// Diff against the new datfile
|
||||
DatFile intDat = DatFile.CreateAndParse(newdat);
|
||||
datfile.DiffAgainst(intDat, false);
|
||||
intDat.Write(outdat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,15 +53,12 @@ namespace RombaSharp.Features
|
||||
}
|
||||
|
||||
// Create the encapsulating datfile
|
||||
DatFile datfile = DatFile.Create();
|
||||
DatFile datfile = DatFile.CreateAndParse(olddat);
|
||||
|
||||
// Create the inputs
|
||||
List<string> dats = new List<string> { newdat };
|
||||
List<string> basedats = new List<string> { olddat };
|
||||
|
||||
// Now run the diff on the inputs
|
||||
datfile.PopulateUserData(basedats, new ExtraIni(), new Filter());
|
||||
datfile.DiffAgainst(dats, outdat, false, new ExtraIni(), new Filter(), false);
|
||||
// Diff against the new datfile
|
||||
DatFile intDat = DatFile.CreateAndParse(newdat);
|
||||
datfile.DiffAgainst(intDat, false);
|
||||
intDat.Write(outdat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,161 +230,77 @@ namespace SabreTools.Library.DatFiles
|
||||
/// <summary>
|
||||
/// Replace item values from the base set represented by the current DAT
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
||||
/// <param name="intDat">DatFile to replace the values in</param>
|
||||
/// <param name="updateFields">List of Fields representing what should be updated</param>
|
||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
||||
public void BaseReplace(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter,
|
||||
List<Field> updateFields,
|
||||
bool onlySame)
|
||||
public void BaseReplace(DatFile intDat, List<Field> updateFields, bool onlySame)
|
||||
{
|
||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||
BaseReplace(paths, outDir, inplace, extras, filter, updateFields, onlySame);
|
||||
}
|
||||
Globals.Logger.User($"Replacing items in '{intDat.Header.FileName}' from the base DAT");
|
||||
|
||||
/// <summary>
|
||||
/// Replace item values from the base set represented by the current DAT
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="updateFields">List of Fields representing what should be updated [only for base replacement]</param>
|
||||
/// <param name="onlySame">True if descriptions should only be replaced if the game name is the same, false otherwise</param>
|
||||
public void BaseReplace(
|
||||
List<ParentablePath> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter,
|
||||
List<Field> updateFields,
|
||||
bool onlySame)
|
||||
{
|
||||
// We want to try to replace each item in each input DAT from the base
|
||||
foreach (ParentablePath path in inputs)
|
||||
// If we are matching based on DatItem fields of any sort
|
||||
if (updateFields.Intersect(DatItem.DatItemFields).Any())
|
||||
{
|
||||
Globals.Logger.User($"Replacing items in '{path.CurrentPath}' from the base DAT");
|
||||
// For comparison's sake, we want to use CRC as the base bucketing
|
||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
||||
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||
|
||||
// First we parse in the DAT internally
|
||||
DatFile intDat = Create(Header.CloneFiltering());
|
||||
intDat.Parse(path, 1, keep: true);
|
||||
intDat.ApplyExtras(extras);
|
||||
intDat.ApplyFilter(filter, false /* useTags */);
|
||||
|
||||
// If we are matching based on DatItem fields of any sort
|
||||
if (updateFields.Intersect(DatItem.DatItemFields).Any())
|
||||
// Then we do a hashwise comparison against the base DAT
|
||||
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// For comparison's sake, we want to use CRC as the base bucketing
|
||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
||||
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||
|
||||
// Then we do a hashwise comparison against the base DAT
|
||||
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> newDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> newDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
List<DatItem> dupes = Items.GetDuplicates(datItem, sorted: true);
|
||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||
List<DatItem> dupes = Items.GetDuplicates(datItem, sorted: true);
|
||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||
|
||||
// Replace fields from the first duplicate, if we have one
|
||||
if (dupes.Count > 0)
|
||||
newDatItem.ReplaceFields(dupes.First(), updateFields);
|
||||
// Replace fields from the first duplicate, if we have one
|
||||
if (dupes.Count > 0)
|
||||
newDatItem.ReplaceFields(dupes.First(), updateFields);
|
||||
|
||||
newDatItems.Add(newDatItem);
|
||||
}
|
||||
newDatItems.Add(newDatItem);
|
||||
}
|
||||
|
||||
// Now add the new list to the key
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, newDatItems);
|
||||
});
|
||||
}
|
||||
// Now add the new list to the key
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, newDatItems);
|
||||
});
|
||||
}
|
||||
|
||||
// If we are matching based on Machine fields of any sort
|
||||
if (updateFields.Intersect(DatItem.MachineFields).Any())
|
||||
// If we are matching based on Machine fields of any sort
|
||||
if (updateFields.Intersect(DatItem.MachineFields).Any())
|
||||
{
|
||||
// For comparison's sake, we want to use Machine Name as the base bucketing
|
||||
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
||||
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||
|
||||
// Then we do a namewise comparison against the base DAT
|
||||
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// For comparison's sake, we want to use Machine Name as the base bucketing
|
||||
Items.BucketBy(Field.Machine_Name, DedupeType.Full);
|
||||
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||
|
||||
// Then we do a namewise comparison against the base DAT
|
||||
Parallel.ForEach(intDat.Items.Keys, Globals.ParallelOptions, key =>
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> newDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> newDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||
if (Items.ContainsKey(key) && Items[key].Count() > 0)
|
||||
newDatItem.Machine.ReplaceFields(Items[key][0].Machine, updateFields, onlySame);
|
||||
DatItem newDatItem = datItem.Clone() as DatItem;
|
||||
if (Items.ContainsKey(key) && Items[key].Count() > 0)
|
||||
newDatItem.Machine.ReplaceFields(Items[key][0].Machine, updateFields, onlySame);
|
||||
|
||||
newDatItems.Add(newDatItem);
|
||||
}
|
||||
newDatItems.Add(newDatItem);
|
||||
}
|
||||
|
||||
// Now add the new list to the key
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, newDatItems);
|
||||
});
|
||||
}
|
||||
|
||||
// Determine the output path for the DAT
|
||||
string interOutDir = path.GetOutputPath(outDir, inplace);
|
||||
|
||||
// Once we're done, try writing out
|
||||
intDat.Write(interOutDir, overwrite: inplace);
|
||||
|
||||
// Due to possible memory requirements, we force a garbage collection
|
||||
GC.Collect();
|
||||
// Now add the new list to the key
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, newDatItems);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output diffs against a base set represented by the current DAT
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="intDat">DatFile to replace the values in</param>
|
||||
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
||||
public void DiffAgainst(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter,
|
||||
bool useGames)
|
||||
{
|
||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||
DiffAgainst(paths, outDir, inplace, extras, filter, useGames);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output diffs against a base set represented by the current DAT
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
/// <param name="useGames">True to diff using games, false to use hashes</param>
|
||||
public void DiffAgainst(
|
||||
List<ParentablePath> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter,
|
||||
bool useGames)
|
||||
public void DiffAgainst(DatFile intDat, bool useGames)
|
||||
{
|
||||
// For comparison's sake, we want to use a base ordering
|
||||
if (useGames)
|
||||
@@ -392,81 +308,62 @@ namespace SabreTools.Library.DatFiles
|
||||
else
|
||||
Items.BucketBy(Field.DatItem_CRC, DedupeType.None);
|
||||
|
||||
// Now we want to compare each input DAT against the base
|
||||
foreach (ParentablePath path in inputs)
|
||||
Globals.Logger.User($"Comparing '{intDat.Header.FileName}' to base DAT");
|
||||
|
||||
// For comparison's sake, we want to a the base bucketing
|
||||
if (useGames)
|
||||
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||
else
|
||||
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
||||
|
||||
// Then we compare against the base DAT
|
||||
List<string> keys = intDat.Items.Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
Globals.Logger.User($"Comparing '{path.CurrentPath}' to base DAT");
|
||||
|
||||
// First we parse in the DAT internally
|
||||
DatFile intDat = Create(Header.CloneFiltering());
|
||||
intDat.Parse(path, 1, keep: true);
|
||||
intDat.ApplyExtras(extras);
|
||||
intDat.ApplyFilter(filter, false /* useTags */);
|
||||
|
||||
// For comparison's sake, we want to a the base bucketing
|
||||
// Game Against uses game names
|
||||
if (useGames)
|
||||
intDat.Items.BucketBy(Field.Machine_Name, DedupeType.None);
|
||||
else
|
||||
intDat.Items.BucketBy(Field.DatItem_CRC, DedupeType.Full);
|
||||
|
||||
// Then we compare against the base DAT
|
||||
List<string> keys = intDat.Items.Keys.ToList();
|
||||
Parallel.ForEach(keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
// Game Against uses game names
|
||||
if (useGames)
|
||||
// If the base DAT doesn't contain the key, keep it
|
||||
if (!Items.ContainsKey(key))
|
||||
return;
|
||||
|
||||
// If the number of items is different, then keep it
|
||||
if (Items[key].Count != intDat.Items[key].Count)
|
||||
return;
|
||||
|
||||
// Otherwise, compare by name and hash the remaining files
|
||||
bool exactMatch = true;
|
||||
foreach (DatItem item in intDat.Items[key])
|
||||
{
|
||||
// If the base DAT doesn't contain the key, keep it
|
||||
if (!Items.ContainsKey(key))
|
||||
return;
|
||||
|
||||
// If the number of items is different, then keep it
|
||||
if (Items[key].Count != intDat.Items[key].Count)
|
||||
return;
|
||||
|
||||
// Otherwise, compare by name and hash the remaining files
|
||||
bool exactMatch = true;
|
||||
foreach (DatItem item in intDat.Items[key])
|
||||
// TODO: Make this granular to name as well
|
||||
if (!Items[key].Contains(item))
|
||||
{
|
||||
// TODO: Make this granular to name as well
|
||||
if (!Items[key].Contains(item))
|
||||
{
|
||||
exactMatch = false;
|
||||
break;
|
||||
}
|
||||
exactMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// If we have an exact match, remove the game
|
||||
if (exactMatch)
|
||||
intDat.Items.Remove(key);
|
||||
}
|
||||
|
||||
// Standard Against uses hashes
|
||||
else
|
||||
{
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> keepDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
if (!Items.HasDuplicates(datItem, true))
|
||||
keepDatItems.Add(datItem);
|
||||
}
|
||||
|
||||
// Now add the new list to the key
|
||||
// If we have an exact match, remove the game
|
||||
if (exactMatch)
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, keepDatItems);
|
||||
}
|
||||
|
||||
// Standard Against uses hashes
|
||||
else
|
||||
{
|
||||
List<DatItem> datItems = intDat.Items[key];
|
||||
List<DatItem> keepDatItems = new List<DatItem>();
|
||||
foreach (DatItem datItem in datItems)
|
||||
{
|
||||
if (!Items.HasDuplicates(datItem, true))
|
||||
keepDatItems.Add(datItem);
|
||||
}
|
||||
});
|
||||
|
||||
// Determine the output path for the DAT
|
||||
string interOutDir = path.GetOutputPath(outDir, inplace);
|
||||
|
||||
// Once we're done, try writing out
|
||||
intDat.Write(interOutDir, overwrite: inplace);
|
||||
|
||||
// Due to possible memory requirements, we force a garbage collection
|
||||
GC.Collect();
|
||||
}
|
||||
// Now add the new list to the key
|
||||
intDat.Items.Remove(key);
|
||||
intDat.Items.AddRange(key, keepDatItems);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -803,55 +700,6 @@ namespace SabreTools.Library.DatFiles
|
||||
watch.Stop();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output user defined merge
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
public void MergeNoDiff(List<string> inputs, string outDir)
|
||||
{
|
||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||
MergeNoDiff(paths, outDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Output user defined merge
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to write out from</param>
|
||||
/// <param name="outDir">Output directory to write the DATs to</param>
|
||||
public void MergeNoDiff(List<ParentablePath> inputs, string outDir)
|
||||
{
|
||||
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||
if (Header.Type == "SuperDAT")
|
||||
{
|
||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = Items[key].ToList();
|
||||
List<DatItem> newItems = new List<DatItem>();
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
DatItem newItem = item;
|
||||
string filename = inputs[newItem.Source.Index].CurrentPath;
|
||||
string rootpath = inputs[newItem.Source.Index].ParentPath;
|
||||
|
||||
rootpath += (string.IsNullOrWhiteSpace(rootpath) ? string.Empty : Path.DirectorySeparatorChar.ToString());
|
||||
filename = filename.Remove(0, rootpath.Length);
|
||||
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
||||
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
||||
+ newItem.Machine.Name;
|
||||
|
||||
newItems.Add(newItem);
|
||||
}
|
||||
|
||||
Items.Remove(key);
|
||||
Items.AddRange(key, newItems);
|
||||
});
|
||||
}
|
||||
|
||||
// Try to output the file
|
||||
Write(outDir, overwrite: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Populate the user DatData object from the input files
|
||||
/// </summary>
|
||||
@@ -903,60 +751,6 @@ namespace SabreTools.Library.DatFiles
|
||||
return datFiles.Select(d => d.Header).ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert, update, and filter a DAT file or set of files
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
public void Update(
|
||||
List<string> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter)
|
||||
{
|
||||
List<ParentablePath> paths = inputs.Select(i => new ParentablePath(i)).ToList();
|
||||
Update(paths, outDir, inplace, extras, filter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert, update, and filter a DAT file or set of files
|
||||
/// </summary>
|
||||
/// <param name="inputs">Names of the input files and/or folders</param>
|
||||
/// <param name="outDir">Optional param for output directory</param>
|
||||
/// <param name="inplace">True if the output files should overwrite their inputs, false otherwise</param>
|
||||
/// <param name="extras">ExtraIni object to apply to the DatFile</param>
|
||||
/// <param name="filter">Filter object to be passed to the DatItem level</param>
|
||||
public void Update(
|
||||
List<ParentablePath> inputs,
|
||||
string outDir,
|
||||
bool inplace,
|
||||
ExtraIni extras,
|
||||
Filter filter)
|
||||
{
|
||||
// Iterate over the files
|
||||
foreach (ParentablePath file in inputs)
|
||||
{
|
||||
DatFile innerDatdata = Create(Header);
|
||||
Globals.Logger.User($"Processing '{Path.GetFileName(file.CurrentPath)}'");
|
||||
innerDatdata.Parse(file, keep: true,
|
||||
keepext: innerDatdata.Header.DatFormat.HasFlag(DatFormat.TSV)
|
||||
|| innerDatdata.Header.DatFormat.HasFlag(DatFormat.CSV)
|
||||
|| innerDatdata.Header.DatFormat.HasFlag(DatFormat.SSV));
|
||||
innerDatdata.ApplyExtras(extras);
|
||||
innerDatdata.ApplyFilter(filter, false /* useTags */);
|
||||
|
||||
// Get the correct output path
|
||||
string realOutDir = file.GetOutputPath(outDir, inplace);
|
||||
|
||||
// Try to output the file, overwriting only if it's not in the current directory
|
||||
innerDatdata.Write(realOutDir, overwrite: inplace);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Filtering
|
||||
@@ -1148,6 +942,38 @@ namespace SabreTools.Library.DatFiles
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Apply SuperDAT naming logic to a merged DatFile
|
||||
/// </summary>
|
||||
/// <param name="inputs">List of inputs to use for renaming</param>
|
||||
public void ApplySuperDAT(List<ParentablePath> inputs)
|
||||
{
|
||||
Parallel.ForEach(Items.Keys, Globals.ParallelOptions, key =>
|
||||
{
|
||||
List<DatItem> items = Items[key].ToList();
|
||||
List<DatItem> newItems = new List<DatItem>();
|
||||
foreach (DatItem item in items)
|
||||
{
|
||||
DatItem newItem = item;
|
||||
string filename = inputs[newItem.Source.Index].CurrentPath;
|
||||
string rootpath = inputs[newItem.Source.Index].ParentPath;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(rootpath))
|
||||
rootpath += Path.DirectorySeparatorChar.ToString();
|
||||
|
||||
filename = filename.Remove(0, rootpath.Length);
|
||||
newItem.Machine.Name = Path.GetDirectoryName(filename) + Path.DirectorySeparatorChar
|
||||
+ Path.GetFileNameWithoutExtension(filename) + Path.DirectorySeparatorChar
|
||||
+ newItem.Machine.Name;
|
||||
|
||||
newItems.Add(newItem);
|
||||
}
|
||||
|
||||
Items.Remove(key);
|
||||
Items.AddRange(key, newItems);
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Use game descriptions as names in the DAT, updating cloneof/romof/sampleof
|
||||
/// </summary>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
using SabreTools.Library.Data;
|
||||
using SabreTools.Library.DatFiles;
|
||||
using SabreTools.Library.DatItems;
|
||||
using SabreTools.Library.Help;
|
||||
@@ -146,19 +148,32 @@ namespace SabreTools.Features
|
||||
updateFields = new List<Field>() { Field.DatItem_Name };
|
||||
|
||||
// Ensure we only have files in the inputs
|
||||
List<ParentablePath> inputFileNames = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
|
||||
List<ParentablePath> baseFileNames = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));
|
||||
List<ParentablePath> inputPaths = DirectoryExtensions.GetFilesOnly(Inputs, appendparent: true);
|
||||
List<ParentablePath> basePaths = DirectoryExtensions.GetFilesOnly(GetList(features, BaseDatListValue));
|
||||
|
||||
// If we're in standard update mode, run through all of the inputs
|
||||
if (updateMode == UpdateMode.None)
|
||||
{
|
||||
DatFile datFile = DatFile.Create(Header);
|
||||
datFile.Update(
|
||||
inputFileNames,
|
||||
OutputDir,
|
||||
GetBoolean(features, InplaceValue),
|
||||
Extras,
|
||||
Filter);
|
||||
// Loop through each input and update
|
||||
foreach (ParentablePath inputPath in inputPaths)
|
||||
{
|
||||
// Create a new base DatFile
|
||||
DatFile datFile = DatFile.Create(Header);
|
||||
Globals.Logger.User($"Processing '{Path.GetFileName(inputPath.CurrentPath)}'");
|
||||
datFile.Parse(inputPath, keep: true,
|
||||
keepext: datFile.Header.DatFormat.HasFlag(DatFormat.TSV)
|
||||
|| datFile.Header.DatFormat.HasFlag(DatFormat.CSV)
|
||||
|| datFile.Header.DatFormat.HasFlag(DatFormat.SSV));
|
||||
datFile.ApplyExtras(Extras);
|
||||
datFile.ApplyFilter(Filter, false /* useTags */);
|
||||
|
||||
// Get the correct output path
|
||||
string realOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||
|
||||
// Try to output the file, overwriting only if it's not in the current directory
|
||||
datFile.Write(realOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -166,12 +181,12 @@ namespace SabreTools.Features
|
||||
if (updateMode.HasFlag(UpdateMode.DiffReverseCascade))
|
||||
{
|
||||
updateMode |= UpdateMode.DiffCascade;
|
||||
inputFileNames.Reverse();
|
||||
inputPaths.Reverse();
|
||||
}
|
||||
if (updateMode.HasFlag(UpdateMode.ReverseBaseReplace))
|
||||
{
|
||||
updateMode |= UpdateMode.BaseReplace;
|
||||
baseFileNames.Reverse();
|
||||
basePaths.Reverse();
|
||||
}
|
||||
|
||||
// Create a DAT to capture inputs
|
||||
@@ -180,27 +195,27 @@ namespace SabreTools.Features
|
||||
// Populate using the correct set
|
||||
List<DatHeader> datHeaders;
|
||||
if (updateMode.HasFlag(UpdateMode.DiffAgainst) || updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||
datHeaders = userInputDat.PopulateUserData(baseFileNames, Extras, Filter);
|
||||
datHeaders = userInputDat.PopulateUserData(basePaths, Extras, Filter);
|
||||
else
|
||||
datHeaders = userInputDat.PopulateUserData(inputFileNames, Extras, Filter);
|
||||
datHeaders = userInputDat.PopulateUserData(inputPaths, Extras, Filter);
|
||||
|
||||
// Output only DatItems that are duplicated across inputs
|
||||
if (updateMode.HasFlag(UpdateMode.DiffDupesOnly))
|
||||
userInputDat.DiffDuplicates(inputFileNames, OutputDir);
|
||||
userInputDat.DiffDuplicates(inputPaths, OutputDir);
|
||||
|
||||
// Output only DatItems that are not duplicated across inputs
|
||||
if (updateMode.HasFlag(UpdateMode.DiffNoDupesOnly))
|
||||
userInputDat.DiffNoDuplicates(inputFileNames, OutputDir);
|
||||
userInputDat.DiffNoDuplicates(inputPaths, OutputDir);
|
||||
|
||||
// Output only DatItems that are unique to each input
|
||||
if (updateMode.HasFlag(UpdateMode.DiffIndividualsOnly))
|
||||
userInputDat.DiffIndividuals(inputFileNames, OutputDir);
|
||||
userInputDat.DiffIndividuals(inputPaths, OutputDir);
|
||||
|
||||
// Output cascaded diffs
|
||||
if (updateMode.HasFlag(UpdateMode.DiffCascade))
|
||||
{
|
||||
userInputDat.DiffCascade(
|
||||
inputFileNames,
|
||||
inputPaths,
|
||||
datHeaders,
|
||||
OutputDir,
|
||||
GetBoolean(features, InplaceValue),
|
||||
@@ -210,32 +225,55 @@ namespace SabreTools.Features
|
||||
// Output differences against a base DAT
|
||||
if (updateMode.HasFlag(UpdateMode.DiffAgainst))
|
||||
{
|
||||
userInputDat.DiffAgainst(
|
||||
inputFileNames,
|
||||
OutputDir,
|
||||
GetBoolean(features, InplaceValue),
|
||||
Extras,
|
||||
Filter,
|
||||
GetBoolean(Features, ByGameValue));
|
||||
// Loop through each input and diff against the base
|
||||
foreach (ParentablePath inputPath in inputPaths)
|
||||
{
|
||||
// Parse, extras, and filter the path to a new DatFile
|
||||
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
||||
repDat.Parse(inputPath, indexId: 1, keep: true);
|
||||
repDat.ApplyExtras(Extras);
|
||||
repDat.ApplyFilter(Filter, false);
|
||||
|
||||
// Now replace the fields from the base DatFile
|
||||
userInputDat.DiffAgainst(repDat, GetBoolean(Features, ByGameValue));
|
||||
|
||||
// Finally output the diffed DatFile
|
||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||
}
|
||||
}
|
||||
|
||||
// Output DATs after replacing fields from a base DAT
|
||||
// Output DATs after replacing fields from a base DatFile
|
||||
if (updateMode.HasFlag(UpdateMode.BaseReplace))
|
||||
{
|
||||
userInputDat.BaseReplace(
|
||||
inputFileNames,
|
||||
OutputDir,
|
||||
GetBoolean(features, InplaceValue),
|
||||
Extras,
|
||||
Filter,
|
||||
updateFields,
|
||||
GetBoolean(features, OnlySameValue));
|
||||
// Loop through each input and apply the base DatFile
|
||||
foreach (ParentablePath inputPath in inputPaths)
|
||||
{
|
||||
// Parse, extras, and filter the path to a new DatFile
|
||||
DatFile repDat = DatFile.Create(userInputDat.Header.CloneFiltering());
|
||||
repDat.Parse(inputPath, indexId: 1, keep: true);
|
||||
repDat.ApplyExtras(Extras);
|
||||
repDat.ApplyFilter(Filter, false);
|
||||
|
||||
// Now replace the fields from the base DatFile
|
||||
userInputDat.BaseReplace(repDat, updateFields, GetBoolean(features, OnlySameValue));
|
||||
|
||||
// Finally output the replaced DatFile
|
||||
string interOutDir = inputPath.GetOutputPath(OutputDir, GetBoolean(features, InplaceValue));
|
||||
repDat.Write(interOutDir, overwrite: GetBoolean(features, InplaceValue));
|
||||
}
|
||||
}
|
||||
|
||||
// Merge all input files and write
|
||||
// This has to be last due to the SuperDAT handling
|
||||
if (updateMode.HasFlag(UpdateMode.Merge))
|
||||
userInputDat.MergeNoDiff(inputFileNames, OutputDir);
|
||||
{
|
||||
// If we're in SuperDAT mode, prefix all games with their respective DATs
|
||||
if (string.Equals(userInputDat.Header.Type, "SuperDAT", StringComparison.OrdinalIgnoreCase))
|
||||
userInputDat.ApplySuperDAT(inputPaths);
|
||||
|
||||
userInputDat.Write(OutputDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user