Skip to content

Commit

Permalink
Support inflating fme/dcs scenes
Browse files Browse the repository at this point in the history
  • Loading branch information
PikminGuts92 committed Sep 2, 2024
1 parent 344827d commit 9e4c640
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 49 deletions.
34 changes: 34 additions & 0 deletions Src/Apps/ArkHelper/Apps/Ark2DirApp.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using Mackiloha.App;
using Mackiloha.App.Extensions;
using Mackiloha.Ark;
using Mackiloha.Chunk;
using Mackiloha.CSV;
using Mackiloha.IO;
using Mackiloha.Milo;
Expand All @@ -18,11 +19,28 @@ public class Ark2DirApp
{
protected readonly ILogManager LogManager;
protected readonly IScriptHelper ScriptHelper;
protected readonly HashSet<string> ChunkExtensions;

public Ark2DirApp(ILogManager logManager, IScriptHelper scriptHelper)
{
LogManager = logManager;
ScriptHelper = scriptHelper;

string[] platforms = ["durango", "xbox"];
string[] formats = [
"char",
"cliptype",
"dir",
"entity",
"layer",
"scene",
"song",
"uiscreen",
];

ChunkExtensions = new HashSet<string>(
platforms.SelectMany(p => formats.Select(f => $"{f}_{p}")),
StringComparer.InvariantCultureIgnoreCase);
}

private string CombinePath(string basePath, string path)
Expand Down Expand Up @@ -142,11 +160,17 @@ public void Parse(Ark2DirOptions op)
&& miloRegex.IsMatch(x.FullPath))
.ToList();

var chunksToInflate = ark.Entries
.Where(x => op.InflateMilos
&& ChunkExtensions.Contains(x.Extension))
.ToList();

var entriesToExtract = ark.Entries
.Where(x => op.ExtractAll)
.Except(scriptsToConvert)
.Except(texturesToConvert)
.Except(milosToInflate)
.Except(chunksToInflate)
.ToList();

foreach (var arkEntry in entriesToExtract)
Expand Down Expand Up @@ -230,6 +254,16 @@ var p when p.EndsWith("_xbox", StringComparison.InvariantCultureIgnoreCase) => P
Log.Information("Wrote \"{ExtractedMiloPath}\"", extPath);
}

foreach (var chunkEntry in chunksToInflate)
{
var filePath = ExtractEntry(ark, chunkEntry, CombinePath(op.OutputPath, chunkEntry.FullPath));

// Inflate chunk
Chunk.DecompressChunkFile(filePath, filePath);

Log.Information("Wrote \"{FilePath}\"", filePath);
}

foreach (var csvEntry in csvsToConvert)
{
var csvStream = ark.GetArkEntryFileStream(csvEntry);
Expand Down
2 changes: 1 addition & 1 deletion Src/Apps/ArkHelper/Options/Ark2DirOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public class Ark2DirOptions : BaseOptions
[Option('t', "convertTextures", HelpText = "Convert textures to .png", Hidden = true)]
public bool ConvertTextures { get; set; }

[Option('m', "inflateMilos", HelpText = "Inflate milo archives (decompress)")]
[Option('m', "inflateMilos", HelpText = "Inflate milo/scene archives (decompress)")]
public bool InflateMilos { get; set; }

[Option('x', "extractMilos", HelpText = "Extract milo archives (can't be used with decompress)", Hidden = true)]
Expand Down
108 changes: 60 additions & 48 deletions Src/Core/Mackiloha/Chunk/Chunk.cs
Original file line number Diff line number Diff line change
@@ -1,69 +1,65 @@
/*
All bytes are in big endian order.
namespace Mackiloha.Chunk;

It looks like milo files were replaced with this. Max Block Size = 0x10000 (2^16)
BYTES(4) - "CHNK"
INT32 - Uknown - Always 255?
INT32 - Block Count
INT32 - Largest Block (Uncompressed)
INT16 - Always 1
INT16 - Always 2
BlockDetails[Block Count]
* ----Block Details----
* =====================
* INT32 - Size
* INT32 - Decompressed Size
* Bool? - If "01 00 00 00", then it's compressed.
* INT32 - Offset
Begin ZLib'd blocks!
*/

namespace Mackiloha.Chunk;

// Successor to Milo container (Used in FME/RBVR)
// Successor to Milo container (Used in FME/DCS/RBVR)
public class Chunk
{
private const uint CHNK_MAGIC = 0x43484E4B; // "CHNK"
private const uint IS_COMPRESSED = 0x01_00_00_00;

public Chunk()
{
Entries = new List<ChunkEntry>();
}

public void WriteToFile(string outPath, bool noHeader = false)
public void WriteToFile(string outPath, bool writeHeader = true)
{
using (FileStream fs = File.OpenWrite(outPath))
{
WriteToStream(fs, noHeader);
}
using var fs = File.OpenWrite(outPath);
WriteToStream(fs, writeHeader);
}

public void WriteToStream(Stream stream, bool noHeader)
public void WriteToStream(Stream stream, bool writeHeader = true)
{
AwesomeWriter aw = new AwesomeWriter(stream, true);

if (!noHeader)
if (writeHeader)
{
int endianFlag = 0xFF;
short extraShort = 2;

if (IsDurango)
{
endianFlag = 0x1FF;
extraShort = 5;
}

aw.Write(CHNK_MAGIC);
aw.Write((int)255);
aw.Write((int)endianFlag);

aw.BigEndian = !IsDurango;

aw.Write(Entries.Count);
aw.Write(Entries.Max(x => x.Data.Length));
aw.Write((short)1);
aw.Write((short)2);
aw.Write((short)extraShort);

int currentIdx = 20 + (Entries.Count << 2);
int currentIdx = 20 + (Entries.Count * 16);

// Writes block details
foreach (ChunkEntry entry in Entries)
{
aw.Write(entry.Data.Length);
aw.Write(entry.Data.Length);

aw.Write((int)(entry.Compressed ? 1 : 0));
aw.Write(currentIdx);
if (IsDurango)
{
aw.Write(currentIdx);
aw.Write((int)(entry.Compressed ? IS_COMPRESSED : 0));
}
else
{
aw.Write((int)(entry.Compressed ? IS_COMPRESSED : 0));
aw.Write(currentIdx);
}

currentIdx += entry.Data.Length;
}
Expand All @@ -73,13 +69,15 @@ public void WriteToStream(Stream stream, bool noHeader)
Entries.ForEach(x => aw.Write(x.Data));
}

public static void DecompressChunkFile(string inPath, string outPath, bool noHeader)
public static void DecompressChunkFile(string inPath, string outPath, bool writeHeader = true)
{
using (FileStream fs = File.OpenRead(inPath))
Chunk chunk;
using (var fs = File.OpenRead(inPath))
{
Chunk chunk = ReadFromStream(fs);
chunk.WriteToFile(outPath, noHeader);
chunk = ReadFromStream(fs);
}

chunk.WriteToFile(outPath, writeHeader);
}

private static Chunk ReadFromStream(Stream stream)
Expand All @@ -89,9 +87,15 @@ private static Chunk ReadFromStream(Stream stream)

if (ar.ReadUInt32() != CHNK_MAGIC) return chunk;

ar.BaseStream.Position += 4; // Always 255?
var flag = ar.ReadUInt32();
if ((flag & 0x100) != 0)
{
chunk.IsDurango = true;
ar.BigEndian = false;
}

int blockCount = ar.ReadInt32();
ar.BaseStream.Position += 8; // Skips 1, 2 (16-bits)
ar.BaseStream.Position += 8; // Skips 1, 2/5 (16-bits)

int[] blockSize = new int[blockCount];
bool[] compressed = new bool[blockCount]; // Uncompressed by default
Expand All @@ -102,10 +106,17 @@ private static Chunk ReadFromStream(Stream stream)
blockSize[i] = ar.ReadInt32();
ar.BaseStream.Position += 4; // Decompressed size (Not needed)

// Sets as compressed if it meets the requirement
compressed[i] = ar.ReadInt32() == 0x1000000;

ar.BaseStream.Position += 4; // Offset (Not needed)
// Fields are swapped depending on platform
if (chunk.IsDurango)
{
ar.BaseStream.Position += 4; // Offset (Not needed)
compressed[i] = ar.ReadInt32() == IS_COMPRESSED;
}
else
{
compressed[i] = ar.ReadInt32() == IS_COMPRESSED;
ar.BaseStream.Position += 4; // Offset (Not needed)
}
}

for (int i = 0; i < blockCount; i++)
Expand Down Expand Up @@ -133,7 +144,8 @@ private static Chunk ReadFromStream(Stream stream)
return chunk;
}

public List<ChunkEntry> Entries;
public bool IsDurango { get; set; }
public List<ChunkEntry> Entries { get; set; }
}

public class ChunkEntry
Expand Down

0 comments on commit 9e4c640

Please sign in to comment.