feat(app): Phase A.1 — LandblockStreamer (background worker + channels)
Background thread pulls load/unload jobs from an inbox channel, invokes a caller-supplied Func<uint, LoadedLandblock?> (production wraps LandblockLoader.Load, tests inject a fake), and posts results to an outbox channel the render thread drains. Graceful shutdown via CancellationToken; failed loads reported rather than retried. 4 new tests, all green. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
9d1c2c45e5
commit
0904372af6
2 changed files with 248 additions and 0 deletions
101
tests/AcDream.Core.Tests/Streaming/LandblockStreamerTests.cs
Normal file
101
tests/AcDream.Core.Tests/Streaming/LandblockStreamerTests.cs
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
using System.Threading.Tasks;
|
||||
using AcDream.App.Streaming;
|
||||
using AcDream.Core.World;
|
||||
using DatReaderWriter.DBObjs;
|
||||
using Xunit;
|
||||
|
||||
namespace AcDream.Core.Tests.Streaming;
|
||||
|
||||
public class LandblockStreamerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Load_FollowedByDrain_ReturnsLoadedRecord()
|
||||
{
|
||||
var stubLandblock = new LoadedLandblock(
|
||||
0xA9B4FFFEu,
|
||||
new LandBlock(),
|
||||
System.Array.Empty<WorldEntity>());
|
||||
|
||||
using var streamer = new LandblockStreamer(
|
||||
loadLandblock: id => id == 0xA9B4FFFEu ? stubLandblock : null);
|
||||
|
||||
streamer.Start();
|
||||
streamer.EnqueueLoad(0xA9B4FFFEu);
|
||||
|
||||
// Spin until the worker produces a completion, with a 2s timeout.
|
||||
LandblockStreamResult? result = null;
|
||||
for (int i = 0; i < 200 && result is null; i++)
|
||||
{
|
||||
var drained = streamer.DrainCompletions(maxBatchSize: 4);
|
||||
if (drained.Count > 0) result = drained[0];
|
||||
else await Task.Delay(10);
|
||||
}
|
||||
|
||||
Assert.NotNull(result);
|
||||
var loaded = Assert.IsType<LandblockStreamResult.Loaded>(result);
|
||||
Assert.Equal(0xA9B4FFFEu, loaded.LandblockId);
|
||||
Assert.Same(stubLandblock, loaded.Landblock);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Load_WhenLoaderReturnsNull_ReportsFailed()
|
||||
{
|
||||
using var streamer = new LandblockStreamer(
|
||||
loadLandblock: _ => null);
|
||||
|
||||
streamer.Start();
|
||||
streamer.EnqueueLoad(0x12340000u);
|
||||
|
||||
LandblockStreamResult? result = null;
|
||||
for (int i = 0; i < 200 && result is null; i++)
|
||||
{
|
||||
var drained = streamer.DrainCompletions(4);
|
||||
if (drained.Count > 0) result = drained[0];
|
||||
else await Task.Delay(10);
|
||||
}
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.IsType<LandblockStreamResult.Failed>(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Load_WhenLoaderThrows_ReportsFailedWithMessage()
|
||||
{
|
||||
using var streamer = new LandblockStreamer(
|
||||
loadLandblock: _ => throw new System.InvalidOperationException("boom"));
|
||||
|
||||
streamer.Start();
|
||||
streamer.EnqueueLoad(0x55550000u);
|
||||
|
||||
LandblockStreamResult? result = null;
|
||||
for (int i = 0; i < 200 && result is null; i++)
|
||||
{
|
||||
var drained = streamer.DrainCompletions(4);
|
||||
if (drained.Count > 0) result = drained[0];
|
||||
else await Task.Delay(10);
|
||||
}
|
||||
|
||||
var failed = Assert.IsType<LandblockStreamResult.Failed>(result);
|
||||
Assert.Contains("boom", failed.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Unload_ProducesUnloadedResult()
|
||||
{
|
||||
using var streamer = new LandblockStreamer(loadLandblock: _ => null);
|
||||
|
||||
streamer.Start();
|
||||
streamer.EnqueueUnload(0xABCD0000u);
|
||||
|
||||
LandblockStreamResult? result = null;
|
||||
for (int i = 0; i < 200 && result is null; i++)
|
||||
{
|
||||
var drained = streamer.DrainCompletions(4);
|
||||
if (drained.Count > 0) result = drained[0];
|
||||
else await Task.Delay(10);
|
||||
}
|
||||
|
||||
var unloaded = Assert.IsType<LandblockStreamResult.Unloaded>(result);
|
||||
Assert.Equal(0xABCD0000u, unloaded.LandblockId);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue