The DAT file reader had several bugs inherited from the old C++ reference code, which targeted an older format version. Verified and fixed against real client_portal.dat and client_cell_1.dat files: - Fix header offset: BTree root is at 0x160, not 0x148 (file size field) - Fix BTree entry size: 24 bytes (flags+id+offset+size+timestamp), not 12 - Fix sector-chain node reading: BTree nodes span multiple sectors via linked-list headers; must assemble node data across sector boundaries - Fix DatStreamImpl.Read() BSTR handling: use Buffer.BlockCopy to match C++ SysAllocStringByteLen instead of Marshal.PtrToStringAnsi - Fix DatStreamImpl.ReadBinary() pointer lifetime: inline fixed block to keep destination buffer pinned during Marshal.Copy - Document LoadFilters() dependency on parameterized COM properties in IDecalCore.Configuration that need IDispatch to call correctly Add smoke test project (13/13 tests pass against real DAT files). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
219 lines
7.5 KiB
C#
219 lines
7.5 KiB
C#
using System;
|
|
using System.IO;
|
|
using System.Runtime.InteropServices;
|
|
using Decal.Interop.Dat;
|
|
|
|
namespace Decal.DecalDat.Tests
|
|
{
|
|
class Program
|
|
{
|
|
// File IDs from FileService.Startup() — decimal values converted to hex
|
|
static readonly (uint id, string name)[] PortalFiles = new[]
|
|
{
|
|
(0x0E00000Eu, "SpellTable"), // 234881038
|
|
(0x0E00000Fu, "ComponentTable"), // 234881039
|
|
(0x0E000003u, "VitalFormulaTable"), // 234881027
|
|
(0x0E000004u, "SkillTable"), // 234881028
|
|
(0x25000006u, "AttributeTable"), // 620756998
|
|
(0x25000007u, "VitalTable"), // 620756999
|
|
};
|
|
|
|
static int _pass, _fail;
|
|
|
|
static void Main(string[] args)
|
|
{
|
|
string acPath = args.Length > 0
|
|
? args[0]
|
|
: @"C:\Turbine\Asheron's Call";
|
|
|
|
string portalPath = Path.Combine(acPath, "client_portal.dat");
|
|
string cellPath = Path.Combine(acPath, "client_cell_1.dat");
|
|
|
|
Console.WriteLine("=== DecalDat Smoke Test ===");
|
|
Console.WriteLine($"Platform: {(IntPtr.Size == 4 ? "x86" : "x64")}");
|
|
Console.WriteLine();
|
|
|
|
TestDatFile("portal", portalPath, 1024);
|
|
TestDatFile("cell", cellPath, 256);
|
|
TestPortalKnownFiles(portalPath);
|
|
TestStreamReadRestart(portalPath);
|
|
TestStreamReadBinary(portalPath);
|
|
|
|
Console.WriteLine();
|
|
Console.WriteLine($"=== Results: {_pass} passed, {_fail} failed ===");
|
|
}
|
|
|
|
static void TestDatFile(string label, string path, int sectorSize)
|
|
{
|
|
Console.WriteLine($"[TEST] Open {label}.dat ({path})");
|
|
if (!File.Exists(path))
|
|
{
|
|
Fail($" File not found: {path}");
|
|
return;
|
|
}
|
|
|
|
try
|
|
{
|
|
using (var dat = new DatFile(path, sectorSize))
|
|
{
|
|
Pass($" Opened successfully (sector size {sectorSize})");
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Fail($" Failed to open: {ex.Message}");
|
|
}
|
|
}
|
|
|
|
static void TestPortalKnownFiles(string portalPath)
|
|
{
|
|
if (!File.Exists(portalPath)) return;
|
|
|
|
Console.WriteLine();
|
|
Console.WriteLine("[TEST] Read known portal.dat file IDs");
|
|
|
|
try
|
|
{
|
|
using (var dat = new DatFile(portalPath, 1024))
|
|
{
|
|
foreach (var (id, name) in PortalFiles)
|
|
{
|
|
try
|
|
{
|
|
var entry = dat.GetFile(id);
|
|
if (entry.Size > 0)
|
|
Pass($" 0x{id:X8} ({name}): {entry.Size:N0} bytes");
|
|
else
|
|
Fail($" 0x{id:X8} ({name}): size is 0");
|
|
}
|
|
catch (FileNotFoundException)
|
|
{
|
|
Fail($" 0x{id:X8} ({name}): NOT FOUND");
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Fail($" 0x{id:X8} ({name}): ERROR - {ex.GetType().Name}: {ex.Message}");
|
|
}
|
|
}
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
Fail($" Failed to open DAT: {ex.Message}");
|
|
}
|
|
}
|
|
|
|
static void TestStreamReadRestart(string portalPath)
|
|
{
|
|
if (!File.Exists(portalPath)) return;
|
|
|
|
Console.WriteLine();
|
|
Console.WriteLine("[TEST] DatStream: Read, Restart, Read again (consistency)");
|
|
|
|
using (var dat = new DatFile(portalPath, 1024))
|
|
{
|
|
var entry = dat.GetFile(0x0E00000E); // SpellTable
|
|
var stream = new DatStreamImpl();
|
|
stream.Load(entry);
|
|
|
|
int size = stream.Size;
|
|
if (size <= 0) { Fail("Size is 0"); return; }
|
|
|
|
// Read first 64 bytes via ReadBinary
|
|
int readSize = Math.Min(64, size);
|
|
byte[] first = new byte[readSize];
|
|
stream.ReadBinary(readSize, ref first[0]);
|
|
|
|
// Restart and read again
|
|
stream.Restart();
|
|
byte[] second = new byte[readSize];
|
|
stream.ReadBinary(readSize, ref second[0]);
|
|
|
|
bool match = true;
|
|
for (int i = 0; i < readSize; i++)
|
|
if (first[i] != second[i]) { match = false; break; }
|
|
|
|
if (match)
|
|
Pass($"Read {readSize} bytes, restarted, read again — identical");
|
|
else
|
|
Fail("Data mismatch after Restart!");
|
|
|
|
stream.Restart();
|
|
if (stream.Tell == 0)
|
|
Pass("Tell resets to 0 after Restart");
|
|
else
|
|
Fail($"Tell is {stream.Tell} after Restart (expected 0)");
|
|
}
|
|
}
|
|
|
|
static void TestStreamReadBinary(string portalPath)
|
|
{
|
|
if (!File.Exists(portalPath)) return;
|
|
|
|
Console.WriteLine();
|
|
Console.WriteLine("[TEST] DatStream: ReadBinary consistency + Tell tracking");
|
|
|
|
using (var dat = new DatFile(portalPath, 1024))
|
|
{
|
|
var entry = dat.GetFile(0x0E000004); // SkillTable
|
|
var stream = new DatStreamImpl();
|
|
stream.Load(entry);
|
|
|
|
int size = stream.Size;
|
|
int readSize = Math.Min(256, size);
|
|
|
|
// Read in two chunks
|
|
int chunk1 = readSize / 2;
|
|
int chunk2 = readSize - chunk1;
|
|
|
|
byte[] viaChunks = new byte[readSize];
|
|
stream.ReadBinary(chunk1, ref viaChunks[0]);
|
|
|
|
if (stream.Tell == chunk1)
|
|
Pass($"Tell is {chunk1} after reading {chunk1} bytes");
|
|
else
|
|
Fail($"Tell is {stream.Tell} (expected {chunk1})");
|
|
|
|
// Read second chunk into offset position
|
|
byte[] temp = new byte[chunk2];
|
|
stream.ReadBinary(chunk2, ref temp[0]);
|
|
Array.Copy(temp, 0, viaChunks, chunk1, chunk2);
|
|
|
|
// Restart and read all at once
|
|
stream.Restart();
|
|
byte[] viaFull = new byte[readSize];
|
|
stream.ReadBinary(readSize, ref viaFull[0]);
|
|
|
|
bool match = true;
|
|
for (int i = 0; i < readSize; i++)
|
|
if (viaChunks[i] != viaFull[i]) { match = false; break; }
|
|
|
|
if (match)
|
|
Pass($"Chunked read ({chunk1}+{chunk2}) matches full read ({readSize})");
|
|
else
|
|
Fail("Chunked vs full read mismatch!");
|
|
|
|
if (stream.Tell == readSize)
|
|
Pass($"Tell is {readSize} after reading {readSize} bytes");
|
|
else
|
|
Fail($"Tell is {stream.Tell} (expected {readSize})");
|
|
}
|
|
}
|
|
|
|
static void Pass(string msg)
|
|
{
|
|
_pass++;
|
|
Console.ForegroundColor = ConsoleColor.Green;
|
|
Console.WriteLine($" PASS {msg}");
|
|
Console.ResetColor();
|
|
}
|
|
|
|
static void Fail(string msg)
|
|
{
|
|
_fail++;
|
|
Console.ForegroundColor = ConsoleColor.Red;
|
|
Console.WriteLine($" FAIL {msg}");
|
|
Console.ResetColor();
|
|
}
|
|
}
|
|
}
|