acdream/tests/AcDream.Core.Tests/Rendering/Wb/EntityClassificationCacheTests.cs
Erik 95ebbf3004 fix(render #53): key cache by (entityId, landblockHint) to defeat ID collision
User confirmed via A/B test (ACDREAM_DISABLE_TIER1_CACHE=1) that the
visual bug — buildings rendering up in the air outside Holtburg — is in
the cache wiring, not elsewhere. The matrix math (restPose * entityWorld
== model) was provably correct, so the bug had to be cache key collision.

Stabs were namespaced in commit 71d0edc, but scenery (0x80LLBB00 +
localIndex) and interior (0x40LLBB00 + localCounter) still have the
same 256-overflow risk. Dense LBs outside Holtburg (forest, urban) push
localIndex past 255, wrapping into the lbY byte and creating cross-LB
collisions.

Fix: change the cache key from uint entityId to (uint, uint) tuple of
(EntityId, LandblockHint). The cache is now correct-by-construction
regardless of any hydration path's Id-generation strategy. Defensive
against future regressions in any ID namespace.

InvalidateEntity becomes a sweep (was O(1)), but it's called rarely
(only on live-entity despawn). InvalidateLandblock was already a sweep.

Updated 14 existing cache tests + 1 dispatcher integration test to thread
landblockHint through TryGet / DebugCrossCheck calls.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-10 23:02:14 +02:00

285 lines
10 KiB
C#
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

using System.Collections.Generic;
using System.Numerics;
using AcDream.App.Rendering.Wb;
using AcDream.Core.Meshing;
using Xunit;
namespace AcDream.Core.Tests.Rendering.Wb;
public class EntityClassificationCacheTests
{
[Fact]
public void TryGet_EmptyCache_ReturnsFalse()
{
var cache = new EntityClassificationCache();
bool found = cache.TryGet(entityId: 42, landblockHint: 0u, out var entry);
Assert.False(found);
Assert.Null(entry);
}
[Fact]
public void Populate_ThenTryGet_ReturnsBatchesInOrder()
{
var cache = new EntityClassificationCache();
var batches = new[]
{
MakeCachedBatch(ibo: 1, firstIndex: 0, indexCount: 6, texHandle: 0xAA),
MakeCachedBatch(ibo: 1, firstIndex: 6, indexCount: 6, texHandle: 0xBB),
};
cache.Populate(entityId: 100, landblockHint: 0xA9B40000u, batches);
Assert.True(cache.TryGet(100, 0xA9B40000u, out var entry));
Assert.NotNull(entry);
Assert.Equal(100u, entry!.EntityId);
Assert.Equal(0xA9B40000u, entry.LandblockHint);
Assert.Equal(batches, entry.Batches);
}
[Fact]
public void Populate_OverridesExistingEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(100, 0u, new[] { MakeCachedBatch(2, 0, 12, 0xCC) });
Assert.True(cache.TryGet(100, 0u, out var entry));
Assert.NotNull(entry);
Assert.Single(entry!.Batches);
Assert.Equal(0xCCu, entry.Batches[0].BindlessTextureHandle);
}
[Fact]
public void Count_TracksLiveEntries()
{
var cache = new EntityClassificationCache();
Assert.Equal(0, cache.Count);
cache.Populate(1, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
Assert.Equal(1, cache.Count);
cache.Populate(2, 0u, new[] { MakeCachedBatch(2, 0, 6, 0xAA) });
Assert.Equal(2, cache.Count);
// Re-populate same id — should not double-count.
cache.Populate(1, 0u, new[] { MakeCachedBatch(3, 0, 6, 0xBB) });
Assert.Equal(2, cache.Count);
}
[Fact]
public void Populate_WithEmptyBatches_StoresEmptyEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(entityId: 7, landblockHint: 0u, System.Array.Empty<CachedBatch>());
Assert.True(cache.TryGet(7, 0u, out var entry));
Assert.NotNull(entry);
Assert.Empty(entry!.Batches);
}
[Fact]
public void Populate_SetupMultiPart_StoresFlatBatchPerSubPart()
{
// Synthetic Setup with 3 subParts × 2 batches each = 6 flat entries.
// This pins the spec §3 Q4 decision: pre-flatten Setup multi-parts at
// populate time so the per-frame hot path is branchless.
var cache = new EntityClassificationCache();
var batches = new CachedBatch[6];
for (int subPart = 0; subPart < 3; subPart++)
for (int b = 0; b < 2; b++)
{
batches[subPart * 2 + b] = MakeCachedBatch(
ibo: (uint)(subPart + 1),
firstIndex: (uint)(b * 6),
indexCount: 6,
texHandle: (ulong)(0x100 + subPart * 2 + b));
}
cache.Populate(99, 0u, batches);
Assert.True(cache.TryGet(99, 0u, out var entry));
Assert.NotNull(entry);
Assert.Equal(6, entry!.Batches.Length);
Assert.Equal(0x100u, entry.Batches[0].BindlessTextureHandle);
Assert.Equal(0x105u, entry.Batches[5].BindlessTextureHandle);
}
[Fact]
public void InvalidateEntity_RemovesEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
Assert.True(cache.TryGet(100, 0u, out _));
cache.InvalidateEntity(100);
Assert.False(cache.TryGet(100, 0u, out var entry));
Assert.Null(entry);
Assert.Equal(0, cache.Count);
}
[Fact]
public void InvalidateEntity_OnMissingId_NoThrow()
{
var cache = new EntityClassificationCache();
var ex = Record.Exception(() => cache.InvalidateEntity(99999));
Assert.Null(ex);
Assert.Equal(0, cache.Count);
}
[Fact]
public void InvalidateLandblock_RemovesAllMatchingEntries()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(2, 0xA9B40000u, new[] { MakeCachedBatch(2, 0, 6, 0xBB) });
cache.Populate(3, 0xA9B40000u, new[] { MakeCachedBatch(3, 0, 6, 0xCC) });
Assert.Equal(3, cache.Count);
cache.InvalidateLandblock(0xA9B40000u);
Assert.Equal(0, cache.Count);
Assert.False(cache.TryGet(1, 0xA9B40000u, out _));
Assert.False(cache.TryGet(2, 0xA9B40000u, out _));
Assert.False(cache.TryGet(3, 0xA9B40000u, out _));
}
[Fact]
public void InvalidateLandblock_LeavesNonMatchingEntries()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(2, 0xA9B50000u, new[] { MakeCachedBatch(2, 0, 6, 0xBB) });
cache.Populate(3, 0xA9B40000u, new[] { MakeCachedBatch(3, 0, 6, 0xCC) });
cache.InvalidateLandblock(0xA9B40000u);
Assert.Equal(1, cache.Count);
Assert.False(cache.TryGet(1, 0xA9B40000u, out _));
Assert.True(cache.TryGet(2, 0xA9B50000u, out var keep));
Assert.NotNull(keep);
Assert.Equal(0xA9B50000u, keep!.LandblockHint);
Assert.False(cache.TryGet(3, 0xA9B40000u, out _));
}
[Fact]
public void InvalidateLandblock_OnMissingLb_NoThrow()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
var ex = Record.Exception(() => cache.InvalidateLandblock(0xDEADBEEFu));
Assert.Null(ex);
Assert.Equal(1, cache.Count);
}
[Fact]
public void DespawnRespawn_UnderReusedId_RepopulatesFresh()
{
// Pins the audit's ObjDescEvent contract (audit section 1):
// ObjDescEvent is despawn + respawn (with a NEW local entity.Id),
// never an in-place mutation. Even when an id IS reused
// (theoretical — _liveEntityIdCounter is monotonic in practice),
// the cache must serve fresh data after invalidation.
var cache = new EntityClassificationCache();
var batchesV1 = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
var batchesV2 = new[] { MakeCachedBatch(2, 6, 12, 0xCC) };
cache.Populate(100, 0xA9B40000u, batchesV1);
cache.InvalidateEntity(100);
cache.Populate(100, 0xA9B40000u, batchesV2);
Assert.True(cache.TryGet(100, 0xA9B40000u, out var entry));
Assert.NotNull(entry);
Assert.Equal(batchesV2, entry!.Batches);
Assert.Equal(0xCCu, entry.Batches[0].BindlessTextureHandle);
}
#if DEBUG
[Fact]
public void DebugCrossCheck_BatchCountMismatch_FiresAssert()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[]
{
MakeCachedBatch(1, 0, 6, 0xAA),
MakeCachedBatch(1, 6, 6, 0xBB),
});
// Synthetic "live" with fewer batches → should fire Debug.Assert.
var liveBatches = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
// Capture Debug.Assert via a custom TraceListener.
var originalListeners = new System.Diagnostics.TraceListener[System.Diagnostics.Trace.Listeners.Count];
System.Diagnostics.Trace.Listeners.CopyTo(originalListeners, 0);
System.Diagnostics.Trace.Listeners.Clear();
var asserts = new List<string>();
System.Diagnostics.Trace.Listeners.Add(new CaptureListener(asserts));
try
{
cache.DebugCrossCheck(100, 0u, liveBatches);
}
finally
{
System.Diagnostics.Trace.Listeners.Clear();
foreach (var l in originalListeners) System.Diagnostics.Trace.Listeners.Add(l);
}
Assert.NotEmpty(asserts);
string joined = string.Join(" ", asserts);
Assert.Contains("batch count mismatch", joined);
}
[Fact]
public void DebugCrossCheck_RestPoseMatch_NoAssert()
{
var cache = new EntityClassificationCache();
var batches = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
cache.Populate(100, 0u, batches);
var originalListeners = new System.Diagnostics.TraceListener[System.Diagnostics.Trace.Listeners.Count];
System.Diagnostics.Trace.Listeners.CopyTo(originalListeners, 0);
System.Diagnostics.Trace.Listeners.Clear();
var asserts = new List<string>();
System.Diagnostics.Trace.Listeners.Add(new CaptureListener(asserts));
try
{
cache.DebugCrossCheck(100, 0u, batches);
}
finally
{
System.Diagnostics.Trace.Listeners.Clear();
foreach (var l in originalListeners) System.Diagnostics.Trace.Listeners.Add(l);
}
Assert.Empty(asserts);
}
private sealed class CaptureListener : System.Diagnostics.TraceListener
{
private readonly List<string> _captured;
public CaptureListener(List<string> captured) { _captured = captured; }
public override void Write(string? message) { if (message != null) _captured.Add(message); }
public override void WriteLine(string? message) { if (message != null) _captured.Add(message); }
public override void Fail(string? message, string? detailMessage)
{
_captured.Add($"{message}: {detailMessage}");
}
public override void Fail(string? message) { if (message != null) _captured.Add(message); }
}
#endif
private static CachedBatch MakeCachedBatch(
uint ibo, uint firstIndex, int indexCount, ulong texHandle)
{
var key = new GroupKey(
Ibo: ibo,
FirstIndex: firstIndex,
BaseVertex: 0,
IndexCount: indexCount,
BindlessTextureHandle: texHandle,
TextureLayer: 0,
Translucency: TranslucencyKind.Opaque);
return new CachedBatch(key, texHandle, Matrix4x4.Identity);
}
}