Add suitbuilder backend improvements and SSE streaming fix

- Add dedicated streaming proxy endpoint for real-time suitbuilder SSE updates
- Implement stable sorting with character_name and name tiebreakers for deterministic results
- Refactor locked items to locked slots supporting set_id and spell constraints
- Add Mag-SuitBuilder style branch pruning tracking variables
- Enhance search with phase updates and detailed progress logging
- Update design document with SSE streaming proxy fix details

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
erik 2026-02-05 19:14:07 +00:00
parent 8e70f88de1
commit e0265e261c
4 changed files with 655 additions and 222 deletions

View file

@ -222,3 +222,31 @@ No changes needed - ItemPreFilter used at line 969.
| Task 5: Add Armor Level Scoring | ✅ Complete | Added armor_score = total_armor // 100 | | Task 5: Add Armor Level Scoring | ✅ Complete | Added armor_score = total_armor // 100 |
| Task 6: Add Item Pre-Filtering | ✅ Already Working | No changes needed | | Task 6: Add Item Pre-Filtering | ✅ Already Working | No changes needed |
| Task 7: AccessorySearcher | Not Started | Future | | Task 7: AccessorySearcher | Not Started | Future |
| Task 8: Fix SSE Streaming Proxy | ✅ Complete | Added dedicated streaming endpoint in main.py |
---
## Bug Fixes Applied
### SSE Streaming Proxy Fix (2026-01-30)
**Problem:** The generic inventory proxy at `/inv/{path:path}` used `httpx.request()` which buffers the entire response before returning. For SSE streams like suitbuilder search, this caused:
- No progress updates reaching the frontend
- "Gateway Time-out" after buffering the full 5-minute search
**Solution:** Added dedicated streaming proxy endpoint `/inv/suitbuilder/search` in `main.py`:
```python
@app.post("/inv/suitbuilder/search")
async def proxy_suitbuilder_search(request: Request):
async def stream_response():
async with httpx.AsyncClient.stream(...) as response:
async for chunk in response.aiter_bytes():
yield chunk
return StreamingResponse(
stream_response(),
media_type="text/event-stream",
headers={"X-Accel-Buffering": "no"} # Disable nginx buffering
)
```
**Result:** Suits now stream to frontend in real-time with scores visible as they're found.

View file

@ -2702,7 +2702,7 @@ async def search_items(
# Handle NULLS for optional fields # Handle NULLS for optional fields
nulls_clause = "NULLS LAST" if sort_direction == "ASC" else "NULLS FIRST" nulls_clause = "NULLS LAST" if sort_direction == "ASC" else "NULLS FIRST"
query_parts.append(f"ORDER BY {sort_field} {sort_direction} {nulls_clause}") query_parts.append(f"ORDER BY {sort_field} {sort_direction} {nulls_clause}, character_name, db_item_id")
# Add pagination # Add pagination
offset = (page - 1) * limit offset = (page - 1) * limit

File diff suppressed because it is too large Load diff

39
main.py
View file

@ -18,7 +18,7 @@ import socket
import struct import struct
from fastapi import FastAPI, Header, HTTPException, Query, WebSocket, WebSocketDisconnect, Request from fastapi import FastAPI, Header, HTTPException, Query, WebSocket, WebSocketDisconnect, Request
from fastapi.responses import JSONResponse, Response from fastapi.responses import JSONResponse, Response, StreamingResponse
from fastapi.routing import APIRoute from fastapi.routing import APIRoute
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from fastapi.encoders import jsonable_encoder from fastapi.encoders import jsonable_encoder
@ -2268,6 +2268,43 @@ async def test_inventory_route():
"""Test route to verify inventory proxy is working""" """Test route to verify inventory proxy is working"""
return {"message": "Inventory proxy route is working"} return {"message": "Inventory proxy route is working"}
@app.post("/inv/suitbuilder/search")
async def proxy_suitbuilder_search(request: Request):
"""Stream suitbuilder search results - SSE requires streaming proxy."""
inventory_service_url = os.getenv('INVENTORY_SERVICE_URL', 'http://inventory-service:8000')
logger.info(f"Streaming proxy to suitbuilder search")
# Read body BEFORE creating generator (request context needed)
body = await request.body()
async def stream_response():
try:
# Use streaming request with long timeout for searches
async with httpx.AsyncClient(timeout=httpx.Timeout(300.0, connect=10.0)) as client:
async with client.stream(
method="POST",
url=f"{inventory_service_url}/suitbuilder/search",
content=body,
headers={"Content-Type": "application/json"}
) as response:
async for chunk in response.aiter_bytes():
yield chunk
except httpx.ReadTimeout:
yield b"event: error\ndata: {\"message\": \"Search timeout\"}\n\n"
except Exception as e:
logger.error(f"Streaming proxy error: {e}")
yield f"event: error\ndata: {{\"message\": \"Proxy error: {str(e)}\"}}\n\n".encode()
return StreamingResponse(
stream_response(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no" # Disable nginx buffering
}
)
@app.api_route("/inv/{path:path}", methods=["GET", "POST"]) @app.api_route("/inv/{path:path}", methods=["GET", "POST"])
async def proxy_inventory_service(path: str, request: Request): async def proxy_inventory_service(path: str, request: Request):
"""Proxy all inventory service requests""" """Proxy all inventory service requests"""