From 9e4c640e21f4211607ee656fee2d79baa68fa1ca Mon Sep 17 00:00:00 2001 From: PikminGuts92 Date: Mon, 2 Sep 2024 15:12:37 -0400 Subject: [PATCH] Support inflating fme/dcs scenes --- Src/Apps/ArkHelper/Apps/Ark2DirApp.cs | 34 ++++++ Src/Apps/ArkHelper/Options/Ark2DirOptions.cs | 2 +- Src/Core/Mackiloha/Chunk/Chunk.cs | 108 ++++++++++--------- 3 files changed, 95 insertions(+), 49 deletions(-) diff --git a/Src/Apps/ArkHelper/Apps/Ark2DirApp.cs b/Src/Apps/ArkHelper/Apps/Ark2DirApp.cs index de1645f..a56cf50 100644 --- a/Src/Apps/ArkHelper/Apps/Ark2DirApp.cs +++ b/Src/Apps/ArkHelper/Apps/Ark2DirApp.cs @@ -5,6 +5,7 @@ using Mackiloha.App; using Mackiloha.App.Extensions; using Mackiloha.Ark; +using Mackiloha.Chunk; using Mackiloha.CSV; using Mackiloha.IO; using Mackiloha.Milo; @@ -18,11 +19,28 @@ public class Ark2DirApp { protected readonly ILogManager LogManager; protected readonly IScriptHelper ScriptHelper; + protected readonly HashSet ChunkExtensions; public Ark2DirApp(ILogManager logManager, IScriptHelper scriptHelper) { LogManager = logManager; ScriptHelper = scriptHelper; + + string[] platforms = ["durango", "xbox"]; + string[] formats = [ + "char", + "cliptype", + "dir", + "entity", + "layer", + "scene", + "song", + "uiscreen", + ]; + + ChunkExtensions = new HashSet( + platforms.SelectMany(p => formats.Select(f => $"{f}_{p}")), + StringComparer.InvariantCultureIgnoreCase); } private string CombinePath(string basePath, string path) @@ -142,11 +160,17 @@ public void Parse(Ark2DirOptions op) && miloRegex.IsMatch(x.FullPath)) .ToList(); + var chunksToInflate = ark.Entries + .Where(x => op.InflateMilos + && ChunkExtensions.Contains(x.Extension)) + .ToList(); + var entriesToExtract = ark.Entries .Where(x => op.ExtractAll) .Except(scriptsToConvert) .Except(texturesToConvert) .Except(milosToInflate) + .Except(chunksToInflate) .ToList(); foreach (var arkEntry in entriesToExtract) @@ -230,6 +254,16 @@ var p when p.EndsWith("_xbox", StringComparison.InvariantCultureIgnoreCase) => P Log.Information("Wrote \"{ExtractedMiloPath}\"", extPath); } + foreach (var chunkEntry in chunksToInflate) + { + var filePath = ExtractEntry(ark, chunkEntry, CombinePath(op.OutputPath, chunkEntry.FullPath)); + + // Inflate chunk + Chunk.DecompressChunkFile(filePath, filePath); + + Log.Information("Wrote \"{FilePath}\"", filePath); + } + foreach (var csvEntry in csvsToConvert) { var csvStream = ark.GetArkEntryFileStream(csvEntry); diff --git a/Src/Apps/ArkHelper/Options/Ark2DirOptions.cs b/Src/Apps/ArkHelper/Options/Ark2DirOptions.cs index af316aa..728a101 100644 --- a/Src/Apps/ArkHelper/Options/Ark2DirOptions.cs +++ b/Src/Apps/ArkHelper/Options/Ark2DirOptions.cs @@ -19,7 +19,7 @@ public class Ark2DirOptions : BaseOptions [Option('t', "convertTextures", HelpText = "Convert textures to .png", Hidden = true)] public bool ConvertTextures { get; set; } - [Option('m', "inflateMilos", HelpText = "Inflate milo archives (decompress)")] + [Option('m', "inflateMilos", HelpText = "Inflate milo/scene archives (decompress)")] public bool InflateMilos { get; set; } [Option('x', "extractMilos", HelpText = "Extract milo archives (can't be used with decompress)", Hidden = true)] diff --git a/Src/Core/Mackiloha/Chunk/Chunk.cs b/Src/Core/Mackiloha/Chunk/Chunk.cs index eac19bc..07b75a7 100644 --- a/Src/Core/Mackiloha/Chunk/Chunk.cs +++ b/Src/Core/Mackiloha/Chunk/Chunk.cs @@ -1,60 +1,48 @@ -/* -All bytes are in big endian order. +namespace Mackiloha.Chunk; -It looks like milo files were replaced with this. Max Block Size = 0x10000 (2^16) - -BYTES(4) - "CHNK" -INT32 - Uknown - Always 255? -INT32 - Block Count -INT32 - Largest Block (Uncompressed) -INT16 - Always 1 -INT16 - Always 2 -BlockDetails[Block Count] - -* ----Block Details---- -* ===================== -* INT32 - Size -* INT32 - Decompressed Size -* Bool? - If "01 00 00 00", then it's compressed. -* INT32 - Offset - -Begin ZLib'd blocks! -*/ - -namespace Mackiloha.Chunk; - -// Successor to Milo container (Used in FME/RBVR) +// Successor to Milo container (Used in FME/DCS/RBVR) public class Chunk { private const uint CHNK_MAGIC = 0x43484E4B; // "CHNK" + private const uint IS_COMPRESSED = 0x01_00_00_00; public Chunk() { Entries = new List(); } - public void WriteToFile(string outPath, bool noHeader = false) + public void WriteToFile(string outPath, bool writeHeader = true) { - using (FileStream fs = File.OpenWrite(outPath)) - { - WriteToStream(fs, noHeader); - } + using var fs = File.OpenWrite(outPath); + WriteToStream(fs, writeHeader); } - public void WriteToStream(Stream stream, bool noHeader) + public void WriteToStream(Stream stream, bool writeHeader = true) { AwesomeWriter aw = new AwesomeWriter(stream, true); - if (!noHeader) + if (writeHeader) { + int endianFlag = 0xFF; + short extraShort = 2; + + if (IsDurango) + { + endianFlag = 0x1FF; + extraShort = 5; + } + aw.Write(CHNK_MAGIC); - aw.Write((int)255); + aw.Write((int)endianFlag); + + aw.BigEndian = !IsDurango; + aw.Write(Entries.Count); aw.Write(Entries.Max(x => x.Data.Length)); aw.Write((short)1); - aw.Write((short)2); + aw.Write((short)extraShort); - int currentIdx = 20 + (Entries.Count << 2); + int currentIdx = 20 + (Entries.Count * 16); // Writes block details foreach (ChunkEntry entry in Entries) @@ -62,8 +50,16 @@ public void WriteToStream(Stream stream, bool noHeader) aw.Write(entry.Data.Length); aw.Write(entry.Data.Length); - aw.Write((int)(entry.Compressed ? 1 : 0)); - aw.Write(currentIdx); + if (IsDurango) + { + aw.Write(currentIdx); + aw.Write((int)(entry.Compressed ? IS_COMPRESSED : 0)); + } + else + { + aw.Write((int)(entry.Compressed ? IS_COMPRESSED : 0)); + aw.Write(currentIdx); + } currentIdx += entry.Data.Length; } @@ -73,13 +69,15 @@ public void WriteToStream(Stream stream, bool noHeader) Entries.ForEach(x => aw.Write(x.Data)); } - public static void DecompressChunkFile(string inPath, string outPath, bool noHeader) + public static void DecompressChunkFile(string inPath, string outPath, bool writeHeader = true) { - using (FileStream fs = File.OpenRead(inPath)) + Chunk chunk; + using (var fs = File.OpenRead(inPath)) { - Chunk chunk = ReadFromStream(fs); - chunk.WriteToFile(outPath, noHeader); + chunk = ReadFromStream(fs); } + + chunk.WriteToFile(outPath, writeHeader); } private static Chunk ReadFromStream(Stream stream) @@ -89,9 +87,15 @@ private static Chunk ReadFromStream(Stream stream) if (ar.ReadUInt32() != CHNK_MAGIC) return chunk; - ar.BaseStream.Position += 4; // Always 255? + var flag = ar.ReadUInt32(); + if ((flag & 0x100) != 0) + { + chunk.IsDurango = true; + ar.BigEndian = false; + } + int blockCount = ar.ReadInt32(); - ar.BaseStream.Position += 8; // Skips 1, 2 (16-bits) + ar.BaseStream.Position += 8; // Skips 1, 2/5 (16-bits) int[] blockSize = new int[blockCount]; bool[] compressed = new bool[blockCount]; // Uncompressed by default @@ -102,10 +106,17 @@ private static Chunk ReadFromStream(Stream stream) blockSize[i] = ar.ReadInt32(); ar.BaseStream.Position += 4; // Decompressed size (Not needed) - // Sets as compressed if it meets the requirement - compressed[i] = ar.ReadInt32() == 0x1000000; - - ar.BaseStream.Position += 4; // Offset (Not needed) + // Fields are swapped depending on platform + if (chunk.IsDurango) + { + ar.BaseStream.Position += 4; // Offset (Not needed) + compressed[i] = ar.ReadInt32() == IS_COMPRESSED; + } + else + { + compressed[i] = ar.ReadInt32() == IS_COMPRESSED; + ar.BaseStream.Position += 4; // Offset (Not needed) + } } for (int i = 0; i < blockCount; i++) @@ -133,7 +144,8 @@ private static Chunk ReadFromStream(Stream stream) return chunk; } - public List Entries; + public bool IsDurango { get; set; } + public List Entries { get; set; } } public class ChunkEntry