added portals, quest tracking, discord monitor etc etc
This commit is contained in:
parent
72de9b0f7f
commit
dffd295091
312 changed files with 4130 additions and 7 deletions
166
main.py
166
main.py
|
|
@ -37,9 +37,11 @@ from db_async import (
|
|||
spawn_events,
|
||||
rare_events,
|
||||
character_inventories,
|
||||
portal_discoveries,
|
||||
server_health_checks,
|
||||
server_status,
|
||||
init_db_async
|
||||
init_db_async,
|
||||
cleanup_old_portals
|
||||
)
|
||||
import asyncio
|
||||
|
||||
|
|
@ -99,6 +101,10 @@ _server_status_cache = {
|
|||
"last_restart": None
|
||||
}
|
||||
|
||||
# Quest status cache - stores last received quest data per player
|
||||
# Structure: {character_name: {quest_name: countdown_value}}
|
||||
_quest_status_cache: Dict[str, Dict[str, str]] = {}
|
||||
|
||||
# AC Hash32 checksum algorithm (based on ThwargLauncher)
|
||||
def calculate_hash32(data: bytes) -> int:
|
||||
"""Calculate AC Hash32 checksum as used in ThwargLauncher."""
|
||||
|
|
@ -249,13 +255,15 @@ async def get_player_count_from_treestats(server_name: str) -> int:
|
|||
return 0
|
||||
|
||||
async def monitor_server_health():
|
||||
"""Background task to monitor server health every 30 seconds."""
|
||||
"""Background task to monitor server health every 30 seconds and cleanup old portals hourly."""
|
||||
server_name = "Coldeve"
|
||||
server_address = "play.coldeve.ac"
|
||||
server_port = 9000
|
||||
check_interval = 30 # seconds
|
||||
player_count_interval = 300 # 5 minutes (like ThwargLauncher's 10 minutes, but more frequent)
|
||||
portal_cleanup_interval = 3600 # 1 hour
|
||||
last_player_count_check = 0
|
||||
last_portal_cleanup = 0
|
||||
current_player_count = None
|
||||
|
||||
# Initialize server status in database
|
||||
|
|
@ -373,6 +381,16 @@ async def monitor_server_health():
|
|||
|
||||
logger.debug(f"Server health check: {status}, latency={latency_ms}ms, players={current_player_count}")
|
||||
|
||||
# Portal cleanup (run every hour)
|
||||
current_time = time.time()
|
||||
if current_time - last_portal_cleanup >= portal_cleanup_interval:
|
||||
try:
|
||||
deleted_count = await cleanup_old_portals()
|
||||
logger.info(f"Portal cleanup: removed {deleted_count} old portal discoveries")
|
||||
last_portal_cleanup = current_time
|
||||
except Exception as cleanup_error:
|
||||
logger.error(f"Portal cleanup error: {cleanup_error}", exc_info=True)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Server health monitoring error: {e}", exc_info=True)
|
||||
|
||||
|
|
@ -1045,6 +1063,63 @@ async def get_server_health():
|
|||
logger.error(f"Failed to get server health data: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@app.get("/quest-status")
|
||||
async def get_quest_status():
|
||||
"""Return current cached quest status for all players."""
|
||||
try:
|
||||
# Return the quest cache with structured data
|
||||
return {
|
||||
"quest_data": _quest_status_cache,
|
||||
"tracked_quests": [
|
||||
"Stipend Collection Timer",
|
||||
"Blank Augmentation Gem Pickup Timer",
|
||||
"Insatiable Eater Jaw"
|
||||
],
|
||||
"player_count": len(_quest_status_cache)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get quest status data: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@app.get("/portals")
|
||||
async def get_portals():
|
||||
"""Return unique portal discoveries from the last 24 hours."""
|
||||
try:
|
||||
# Query unique portals from last 24 hours, keeping the most recent discovery of each
|
||||
cutoff_time = datetime.now(timezone.utc) - timedelta(hours=24)
|
||||
|
||||
query = """
|
||||
SELECT DISTINCT ON (portal_name)
|
||||
character_name, portal_name, timestamp, ns, ew, z
|
||||
FROM portal_discoveries
|
||||
WHERE timestamp >= :cutoff_time
|
||||
ORDER BY portal_name, timestamp DESC
|
||||
"""
|
||||
|
||||
rows = await database.fetch_all(query, {"cutoff_time": cutoff_time})
|
||||
|
||||
portals = []
|
||||
for row in rows:
|
||||
portal = {
|
||||
"character_name": row["character_name"],
|
||||
"portal_name": row["portal_name"],
|
||||
"timestamp": row["timestamp"].isoformat(),
|
||||
"ns": row["ns"],
|
||||
"ew": row["ew"],
|
||||
"z": row["z"]
|
||||
}
|
||||
portals.append(portal)
|
||||
|
||||
return {
|
||||
"portals": portals,
|
||||
"portal_count": len(portals),
|
||||
"cutoff_time": cutoff_time.isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get portals data: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@app.get("/live", response_model=dict)
|
||||
@app.get("/live/", response_model=dict)
|
||||
async def get_live_players():
|
||||
|
|
@ -1842,6 +1917,93 @@ async def ws_receive_snapshots(
|
|||
except Exception as e:
|
||||
logger.error(f"Failed to process vitals for {data.get('character_name', 'unknown')}: {e}", exc_info=True)
|
||||
continue
|
||||
# --- Quest message: update cache and broadcast (no database storage) ---
|
||||
if msg_type == "quest":
|
||||
character_name = data.get("character_name")
|
||||
quest_name = data.get("quest_name")
|
||||
countdown = data.get("countdown")
|
||||
|
||||
if character_name and quest_name and countdown is not None:
|
||||
# Only track specific quest types
|
||||
allowed_quests = {
|
||||
"Stipend Collection Timer",
|
||||
"Blank Augmentation Gem Pickup Timer",
|
||||
"Insatiable Eater Jaw"
|
||||
}
|
||||
|
||||
if quest_name in allowed_quests:
|
||||
# Update quest cache
|
||||
if character_name not in _quest_status_cache:
|
||||
_quest_status_cache[character_name] = {}
|
||||
_quest_status_cache[character_name][quest_name] = countdown
|
||||
|
||||
# Broadcast to browser clients for real-time updates
|
||||
await _broadcast_to_browser_clients(data)
|
||||
logger.debug(f"Updated quest status for {character_name}: {quest_name} = {countdown}")
|
||||
else:
|
||||
logger.debug(f"Ignoring non-tracked quest: {quest_name}")
|
||||
else:
|
||||
logger.warning(f"Invalid quest message format from {websocket.client}: missing required fields")
|
||||
continue
|
||||
# --- Portal message: store in database and broadcast ---
|
||||
if msg_type == "portal":
|
||||
character_name = data.get("character_name")
|
||||
portal_name = data.get("portal_name")
|
||||
ns = data.get("ns")
|
||||
ew = data.get("ew")
|
||||
z = data.get("z")
|
||||
timestamp_str = data.get("timestamp")
|
||||
|
||||
if all([character_name, portal_name, ns, ew, z, timestamp_str]):
|
||||
try:
|
||||
# Parse timestamp
|
||||
timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
||||
|
||||
# Convert coordinates to floats for database storage
|
||||
ns = float(ns)
|
||||
ew = float(ew)
|
||||
z = float(z)
|
||||
|
||||
# Check if this portal was recently discovered (within last hour) to avoid duplicates
|
||||
recent_check = await database.fetch_one(
|
||||
"""
|
||||
SELECT id FROM portal_discoveries
|
||||
WHERE character_name = :character_name
|
||||
AND portal_name = :portal_name
|
||||
AND timestamp > :cutoff_time
|
||||
LIMIT 1
|
||||
""",
|
||||
{
|
||||
"character_name": character_name,
|
||||
"portal_name": portal_name,
|
||||
"cutoff_time": timestamp - timedelta(hours=1)
|
||||
}
|
||||
)
|
||||
|
||||
if not recent_check:
|
||||
# Store portal discovery in database
|
||||
await database.execute(
|
||||
portal_discoveries.insert().values(
|
||||
character_name=character_name,
|
||||
portal_name=portal_name,
|
||||
timestamp=timestamp,
|
||||
ns=ns,
|
||||
ew=ew,
|
||||
z=z
|
||||
)
|
||||
)
|
||||
logger.info(f"Recorded portal discovery: {portal_name} by {character_name} at {ns}, {ew}")
|
||||
else:
|
||||
logger.debug(f"Skipping duplicate portal discovery: {portal_name} by {character_name} (already discovered recently)")
|
||||
|
||||
# Broadcast to browser clients for map updates
|
||||
await _broadcast_to_browser_clients(data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process portal discovery for {character_name}: {e}", exc_info=True)
|
||||
else:
|
||||
logger.warning(f"Invalid portal message format from {websocket.client}: missing required fields")
|
||||
continue
|
||||
# Unknown message types are ignored
|
||||
if msg_type:
|
||||
logger.warning(f"Unknown message type '{msg_type}' from {websocket.client}")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue