acdream/tests/AcDream.Core.Tests/Rendering/Wb/EntityClassificationCacheTests.cs
Erik f16604b60b feat(render #53): DEBUG cross-check guards against the prior Tier 1 bug class
Adds EntityClassificationCache.DebugCrossCheck(entityId, liveBatches) that
asserts cached state matches a live re-classification. Wires a simpler
predicate assert into WbDrawDispatcher's cache-hit branch (asserts
isAnimated == false on cache hit). Tests #13a and #13b cover the
batch-count mismatch and clean-match cases via a custom TraceListener
that captures Debug.Assert calls.

Zero cost in Release. In DEBUG, the assert fires immediately if a future
regression mutates static-entity state outside the audit's known write
sites — the same failure mode that bit the prior Tier 1 attempt.

Phase 4 complete. Cache + invalidation + safety net all in place.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-10 19:43:24 +02:00

285 lines
10 KiB
C#
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

using System.Collections.Generic;
using System.Numerics;
using AcDream.App.Rendering.Wb;
using AcDream.Core.Meshing;
using Xunit;
namespace AcDream.Core.Tests.Rendering.Wb;
public class EntityClassificationCacheTests
{
[Fact]
public void TryGet_EmptyCache_ReturnsFalse()
{
var cache = new EntityClassificationCache();
bool found = cache.TryGet(entityId: 42, out var entry);
Assert.False(found);
Assert.Null(entry);
}
[Fact]
public void Populate_ThenTryGet_ReturnsBatchesInOrder()
{
var cache = new EntityClassificationCache();
var batches = new[]
{
MakeCachedBatch(ibo: 1, firstIndex: 0, indexCount: 6, texHandle: 0xAA),
MakeCachedBatch(ibo: 1, firstIndex: 6, indexCount: 6, texHandle: 0xBB),
};
cache.Populate(entityId: 100, landblockHint: 0xA9B40000u, batches);
Assert.True(cache.TryGet(100, out var entry));
Assert.NotNull(entry);
Assert.Equal(100u, entry!.EntityId);
Assert.Equal(0xA9B40000u, entry.LandblockHint);
Assert.Equal(batches, entry.Batches);
}
[Fact]
public void Populate_OverridesExistingEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(100, 0u, new[] { MakeCachedBatch(2, 0, 12, 0xCC) });
Assert.True(cache.TryGet(100, out var entry));
Assert.NotNull(entry);
Assert.Single(entry!.Batches);
Assert.Equal(0xCCu, entry.Batches[0].BindlessTextureHandle);
}
[Fact]
public void Count_TracksLiveEntries()
{
var cache = new EntityClassificationCache();
Assert.Equal(0, cache.Count);
cache.Populate(1, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
Assert.Equal(1, cache.Count);
cache.Populate(2, 0u, new[] { MakeCachedBatch(2, 0, 6, 0xAA) });
Assert.Equal(2, cache.Count);
// Re-populate same id — should not double-count.
cache.Populate(1, 0u, new[] { MakeCachedBatch(3, 0, 6, 0xBB) });
Assert.Equal(2, cache.Count);
}
[Fact]
public void Populate_WithEmptyBatches_StoresEmptyEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(entityId: 7, landblockHint: 0u, System.Array.Empty<CachedBatch>());
Assert.True(cache.TryGet(7, out var entry));
Assert.NotNull(entry);
Assert.Empty(entry!.Batches);
}
[Fact]
public void Populate_SetupMultiPart_StoresFlatBatchPerSubPart()
{
// Synthetic Setup with 3 subParts × 2 batches each = 6 flat entries.
// This pins the spec §3 Q4 decision: pre-flatten Setup multi-parts at
// populate time so the per-frame hot path is branchless.
var cache = new EntityClassificationCache();
var batches = new CachedBatch[6];
for (int subPart = 0; subPart < 3; subPart++)
for (int b = 0; b < 2; b++)
{
batches[subPart * 2 + b] = MakeCachedBatch(
ibo: (uint)(subPart + 1),
firstIndex: (uint)(b * 6),
indexCount: 6,
texHandle: (ulong)(0x100 + subPart * 2 + b));
}
cache.Populate(99, 0u, batches);
Assert.True(cache.TryGet(99, out var entry));
Assert.NotNull(entry);
Assert.Equal(6, entry!.Batches.Length);
Assert.Equal(0x100u, entry.Batches[0].BindlessTextureHandle);
Assert.Equal(0x105u, entry.Batches[5].BindlessTextureHandle);
}
[Fact]
public void InvalidateEntity_RemovesEntry()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
Assert.True(cache.TryGet(100, out _));
cache.InvalidateEntity(100);
Assert.False(cache.TryGet(100, out var entry));
Assert.Null(entry);
Assert.Equal(0, cache.Count);
}
[Fact]
public void InvalidateEntity_OnMissingId_NoThrow()
{
var cache = new EntityClassificationCache();
var ex = Record.Exception(() => cache.InvalidateEntity(99999));
Assert.Null(ex);
Assert.Equal(0, cache.Count);
}
[Fact]
public void InvalidateLandblock_RemovesAllMatchingEntries()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(2, 0xA9B40000u, new[] { MakeCachedBatch(2, 0, 6, 0xBB) });
cache.Populate(3, 0xA9B40000u, new[] { MakeCachedBatch(3, 0, 6, 0xCC) });
Assert.Equal(3, cache.Count);
cache.InvalidateLandblock(0xA9B40000u);
Assert.Equal(0, cache.Count);
Assert.False(cache.TryGet(1, out _));
Assert.False(cache.TryGet(2, out _));
Assert.False(cache.TryGet(3, out _));
}
[Fact]
public void InvalidateLandblock_LeavesNonMatchingEntries()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
cache.Populate(2, 0xA9B50000u, new[] { MakeCachedBatch(2, 0, 6, 0xBB) });
cache.Populate(3, 0xA9B40000u, new[] { MakeCachedBatch(3, 0, 6, 0xCC) });
cache.InvalidateLandblock(0xA9B40000u);
Assert.Equal(1, cache.Count);
Assert.False(cache.TryGet(1, out _));
Assert.True(cache.TryGet(2, out var keep));
Assert.NotNull(keep);
Assert.Equal(0xA9B50000u, keep!.LandblockHint);
Assert.False(cache.TryGet(3, out _));
}
[Fact]
public void InvalidateLandblock_OnMissingLb_NoThrow()
{
var cache = new EntityClassificationCache();
cache.Populate(1, 0xA9B40000u, new[] { MakeCachedBatch(1, 0, 6, 0xAA) });
var ex = Record.Exception(() => cache.InvalidateLandblock(0xDEADBEEFu));
Assert.Null(ex);
Assert.Equal(1, cache.Count);
}
[Fact]
public void DespawnRespawn_UnderReusedId_RepopulatesFresh()
{
// Pins the audit's ObjDescEvent contract (audit section 1):
// ObjDescEvent is despawn + respawn (with a NEW local entity.Id),
// never an in-place mutation. Even when an id IS reused
// (theoretical — _liveEntityIdCounter is monotonic in practice),
// the cache must serve fresh data after invalidation.
var cache = new EntityClassificationCache();
var batchesV1 = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
var batchesV2 = new[] { MakeCachedBatch(2, 6, 12, 0xCC) };
cache.Populate(100, 0xA9B40000u, batchesV1);
cache.InvalidateEntity(100);
cache.Populate(100, 0xA9B40000u, batchesV2);
Assert.True(cache.TryGet(100, out var entry));
Assert.NotNull(entry);
Assert.Equal(batchesV2, entry!.Batches);
Assert.Equal(0xCCu, entry.Batches[0].BindlessTextureHandle);
}
#if DEBUG
[Fact]
public void DebugCrossCheck_BatchCountMismatch_FiresAssert()
{
var cache = new EntityClassificationCache();
cache.Populate(100, 0u, new[]
{
MakeCachedBatch(1, 0, 6, 0xAA),
MakeCachedBatch(1, 6, 6, 0xBB),
});
// Synthetic "live" with fewer batches → should fire Debug.Assert.
var liveBatches = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
// Capture Debug.Assert via a custom TraceListener.
var originalListeners = new System.Diagnostics.TraceListener[System.Diagnostics.Trace.Listeners.Count];
System.Diagnostics.Trace.Listeners.CopyTo(originalListeners, 0);
System.Diagnostics.Trace.Listeners.Clear();
var asserts = new List<string>();
System.Diagnostics.Trace.Listeners.Add(new CaptureListener(asserts));
try
{
cache.DebugCrossCheck(100, liveBatches);
}
finally
{
System.Diagnostics.Trace.Listeners.Clear();
foreach (var l in originalListeners) System.Diagnostics.Trace.Listeners.Add(l);
}
Assert.NotEmpty(asserts);
string joined = string.Join(" ", asserts);
Assert.Contains("batch count mismatch", joined);
}
[Fact]
public void DebugCrossCheck_RestPoseMatch_NoAssert()
{
var cache = new EntityClassificationCache();
var batches = new[] { MakeCachedBatch(1, 0, 6, 0xAA) };
cache.Populate(100, 0u, batches);
var originalListeners = new System.Diagnostics.TraceListener[System.Diagnostics.Trace.Listeners.Count];
System.Diagnostics.Trace.Listeners.CopyTo(originalListeners, 0);
System.Diagnostics.Trace.Listeners.Clear();
var asserts = new List<string>();
System.Diagnostics.Trace.Listeners.Add(new CaptureListener(asserts));
try
{
cache.DebugCrossCheck(100, batches);
}
finally
{
System.Diagnostics.Trace.Listeners.Clear();
foreach (var l in originalListeners) System.Diagnostics.Trace.Listeners.Add(l);
}
Assert.Empty(asserts);
}
private sealed class CaptureListener : System.Diagnostics.TraceListener
{
private readonly List<string> _captured;
public CaptureListener(List<string> captured) { _captured = captured; }
public override void Write(string? message) { if (message != null) _captured.Add(message); }
public override void WriteLine(string? message) { if (message != null) _captured.Add(message); }
public override void Fail(string? message, string? detailMessage)
{
_captured.Add($"{message}: {detailMessage}");
}
public override void Fail(string? message) { if (message != null) _captured.Add(message); }
}
#endif
private static CachedBatch MakeCachedBatch(
uint ibo, uint firstIndex, int indexCount, ulong texHandle)
{
var key = new GroupKey(
Ibo: ibo,
FirstIndex: firstIndex,
BaseVertex: 0,
IndexCount: indexCount,
BindlessTextureHandle: texHandle,
TextureLayer: 0,
Translucency: TranslucencyKind.Opaque);
return new CachedBatch(key, texHandle, Matrix4x4.Identity);
}
}