new comments

This commit is contained in:
erik 2025-05-24 18:33:03 +00:00
parent b2f649a489
commit 09404da121
13 changed files with 430 additions and 70 deletions

View file

@ -1,14 +1,23 @@
# Dockerfile for Dereth Tracker application # Dockerfile for Dereth Tracker application
# Base image: lightweight Python runtime
FROM python:3.12-slim FROM python:3.12-slim
# Set working directory ## Set application working directory
WORKDIR /app WORKDIR /app
# Upgrade pip and install Python dependencies # Upgrade pip and install required Python packages without caching
RUN python -m pip install --upgrade pip && \ RUN python -m pip install --upgrade pip && \
pip install --no-cache-dir fastapi uvicorn pydantic websockets databases[postgresql] sqlalchemy alembic psycopg2-binary pip install --no-cache-dir \
fastapi \
uvicorn \
pydantic \
websockets \
databases[postgresql] \
sqlalchemy \
alembic \
psycopg2-binary
# Copy application code ## Copy application source code and migration scripts into container
COPY static/ /app/static/ COPY static/ /app/static/
COPY main.py /app/main.py COPY main.py /app/main.py
COPY db.py /app/db.py COPY db.py /app/db.py
@ -16,17 +25,23 @@ COPY db_async.py /app/db_async.py
COPY alembic.ini /app/alembic.ini COPY alembic.ini /app/alembic.ini
COPY alembic/ /app/alembic/ COPY alembic/ /app/alembic/
COPY Dockerfile /Dockerfile COPY Dockerfile /Dockerfile
# Expose the application port ## Expose the application port to host
EXPOSE 8765 EXPOSE 8765
# Default environment variables (override as needed) ## Default environment variables for application configuration
ENV DATABASE_URL=postgresql://postgres:password@db:5432/dereth \ ENV DATABASE_URL=postgresql://postgres:password@db:5432/dereth \
DB_MAX_SIZE_MB=2048 \ DB_MAX_SIZE_MB=2048 \
DB_RETENTION_DAYS=7 \ DB_RETENTION_DAYS=7 \
DB_MAX_SQL_LENGTH=1000000000 \ DB_MAX_SQL_LENGTH=1000000000 \
DB_MAX_SQL_VARIABLES=32766 \ DB_MAX_SQL_VARIABLES=32766 \
DB_WAL_AUTOCHECKPOINT_PAGES=1000 \ DB_WAL_AUTOCHECKPOINT_PAGES=1000 \
SHARED_SECRET=your_shared_secret SHARED_SECRET=your_shared_secret # Secret for plugin authentication
# Run the FastAPI application with Uvicorn ## Launch the FastAPI app using Uvicorn
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8765", "--reload", "--workers", "1"] CMD [
"uvicorn", "main:app",
"--host", "0.0.0.0",
"--port", "8765",
"--reload", # auto-restart on code changes
"--workers", "1"
]

View file

@ -1,2 +1,4 @@
# Reformat Python code using Black formatter
.PHONY: reformat
reformat: reformat:
black *py black *.py

View file

@ -1,7 +1,8 @@
; Alembic configuration file for database migrations
[alembic] [alembic]
# path to migration scripts ; Path to migration scripts directory
script_location = alembic script_location = alembic
# default database URL; overridden by DATABASE_URL env var in env.py ; Default SQLAlchemy URL for migrations (use DATABASE_URL env var to override)
sqlalchemy.url = postgresql://postgres:password@localhost:5432/dereth sqlalchemy.url = postgresql://postgres:password@localhost:5432/dereth
[loggers] [loggers]

View file

@ -1,18 +1,22 @@
"""Alembic environment configuration for database migrations.
Configures offline and online migration contexts using SQLAlchemy
and the target metadata defined in db_async.metadata.
"""
import os import os
from logging.config import fileConfig from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from alembic import context from alembic import context
# this is the Alembic Config object, which provides # Alembic Config object provides access to values in the .ini file
# access to the values within the .ini file in use.
config = context.config config = context.config
# Interpret the DATABASE_URL env var, if set; else fall back to ini file # Override sqlalchemy.url with DATABASE_URL environment variable if provided
database_url = os.getenv('DATABASE_URL', config.get_main_option('sqlalchemy.url')) database_url = os.getenv('DATABASE_URL', config.get_main_option('sqlalchemy.url'))
config.set_main_option('sqlalchemy.url', database_url) config.set_main_option('sqlalchemy.url', database_url)
# Interpret log config # Set up Python logging according to config file
if config.config_file_name is not None: if config.config_file_name is not None:
fileConfig(config.config_file_name) fileConfig(config.config_file_name)
@ -23,7 +27,7 @@ target_metadata = metadata
def run_migrations_offline(): def run_migrations_offline():
'''Run migrations in 'offline' mode.''' # noqa """Run migrations in 'offline' mode using literal SQL script generation."""
url = config.get_main_option('sqlalchemy.url') url = config.get_main_option('sqlalchemy.url')
context.configure( context.configure(
url=url, url=url,
@ -37,7 +41,7 @@ def run_migrations_offline():
def run_migrations_online(): def run_migrations_online():
'''Run migrations in 'online' mode.''' # noqa """Run migrations in 'online' mode against a live database connection."""
connectable = engine_from_config( connectable = engine_from_config(
config.get_section(config.config_ini_section), config.get_section(config.config_ini_section),
prefix='sqlalchemy.', prefix='sqlalchemy.',

View file

@ -1,3 +1,7 @@
<%#
Alembic migration script template generated by 'alembic revision'.
Edit the upgrade() and downgrade() functions to apply schema changes.
%>
""" """
Revision ID: ${up_revision} Revision ID: ${up_revision}
Revises: ${down_revision | comma,n} Revises: ${down_revision | comma,n}

44
db.py
View file

@ -1,12 +1,20 @@
"""SQLite3 helper module for local telemetry storage.
Provides functions to initialize the local database schema and save
telemetry snapshots into history and live_state tables.
Enforces WAL mode, size limits, and auto-vacuum for efficient storage.
"""
import os import os
import sqlite3 import sqlite3
from typing import Dict from typing import Dict
from datetime import datetime, timedelta from datetime import datetime, timedelta
# Local SQLite database file name (used when running without TimescaleDB)
DB_FILE = "dereth.db" DB_FILE = "dereth.db"
# Maximum allowed database size (in MB). Defaults to 2048 (2GB). Override via env DB_MAX_SIZE_MB. # Maximum allowed database size (in MB). Defaults to 2048 (2GB). Override via env DB_MAX_SIZE_MB.
MAX_DB_SIZE_MB = int(os.getenv("DB_MAX_SIZE_MB", "2048")) MAX_DB_SIZE_MB = int(os.getenv("DB_MAX_SIZE_MB", "2048"))
# Retention window for telemetry history in days. Override via env DB_RETENTION_DAYS. # Retention window for telemetry history in days (currently not auto-enforced).
# Override via env DB_RETENTION_DAYS for future cleanup scripts.
MAX_RETENTION_DAYS = int(os.getenv("DB_RETENTION_DAYS", "7")) MAX_RETENTION_DAYS = int(os.getenv("DB_RETENTION_DAYS", "7"))
# SQLite runtime limits customization # SQLite runtime limits customization
DB_MAX_SQL_LENGTH = int(os.getenv("DB_MAX_SQL_LENGTH", "1000000000")) DB_MAX_SQL_LENGTH = int(os.getenv("DB_MAX_SQL_LENGTH", "1000000000"))
@ -16,8 +24,15 @@ DB_WAL_AUTOCHECKPOINT_PAGES = int(os.getenv("DB_WAL_AUTOCHECKPOINT_PAGES", "1000
def init_db() -> None: def init_db() -> None:
"""Create tables if they do not exist (extended with kills_per_hour and onlinetime).""" """
Initialize local SQLite database schema for telemetry logging.
- Applies SQLite PRAGMA settings for performance and file size management
- Ensures WAL journaling and auto-vacuum for concurrency and compaction
- Creates telemetry_log for full history and live_state for latest snapshot per character
"""
# Open connection with a longer timeout # Open connection with a longer timeout
# Open connection with extended timeout for schema operations
conn = sqlite3.connect(DB_FILE, timeout=30) conn = sqlite3.connect(DB_FILE, timeout=30)
# Bump SQLite runtime limits # Bump SQLite runtime limits
conn.setlimit(sqlite3.SQLITE_LIMIT_LENGTH, DB_MAX_SQL_LENGTH) conn.setlimit(sqlite3.SQLITE_LIMIT_LENGTH, DB_MAX_SQL_LENGTH)
@ -25,16 +40,20 @@ def init_db() -> None:
conn.setlimit(sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER, DB_MAX_SQL_VARIABLES) conn.setlimit(sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER, DB_MAX_SQL_VARIABLES)
c = conn.cursor() c = conn.cursor()
# Enable auto_vacuum FULL and rebuild DB so that deletions shrink the file # Enable auto_vacuum FULL and rebuild DB so that deletions shrink the file
# Enable full auto-vacuum to shrink database file on deletes
c.execute("PRAGMA auto_vacuum=FULL;") c.execute("PRAGMA auto_vacuum=FULL;")
conn.commit() conn.commit()
# Rebuild database to apply auto_vacuum changes
c.execute("VACUUM;") c.execute("VACUUM;")
conn.commit() conn.commit()
# Switch to WAL mode for concurrency, adjust checkpointing, and enforce max size # Switch to WAL mode for concurrency, adjust checkpointing, and enforce max size
# Configure write-ahead logging for concurrency and performance
c.execute("PRAGMA journal_mode=WAL") c.execute("PRAGMA journal_mode=WAL")
c.execute("PRAGMA synchronous=NORMAL") c.execute("PRAGMA synchronous=NORMAL")
# Auto-checkpoint after specified WAL frames to limit WAL file size
c.execute(f"PRAGMA wal_autocheckpoint={DB_WAL_AUTOCHECKPOINT_PAGES}") c.execute(f"PRAGMA wal_autocheckpoint={DB_WAL_AUTOCHECKPOINT_PAGES}")
# History log # Create history log table for all telemetry snapshots
c.execute( c.execute(
""" """
CREATE TABLE IF NOT EXISTS telemetry_log ( CREATE TABLE IF NOT EXISTS telemetry_log (
@ -57,7 +76,7 @@ def init_db() -> None:
""" """
) )
# Live snapshot (upsert) # Create live_state table for upserts of the most recent snapshot per character
c.execute( c.execute(
""" """
CREATE TABLE IF NOT EXISTS live_state ( CREATE TABLE IF NOT EXISTS live_state (
@ -84,20 +103,27 @@ def init_db() -> None:
def save_snapshot(data: Dict) -> None: def save_snapshot(data: Dict) -> None:
"""Insert snapshot into history and upsert into live_state (with new fields).""" """
# Open connection with a longer busy timeout Save a telemetry snapshot into the local SQLite database.
Inserts a full record into telemetry_log (history) and upserts into live_state
for quick lookup of the most recent data per character.
Respects WAL mode and checkpoint settings on each connection.
"""
# Open new connection with extended timeout for inserting data
conn = sqlite3.connect(DB_FILE, timeout=30) conn = sqlite3.connect(DB_FILE, timeout=30)
# Bump SQLite runtime limits on this connection # Bump SQLite runtime limits on this connection
conn.setlimit(sqlite3.SQLITE_LIMIT_LENGTH, DB_MAX_SQL_LENGTH) conn.setlimit(sqlite3.SQLITE_LIMIT_LENGTH, DB_MAX_SQL_LENGTH)
conn.setlimit(sqlite3.SQLITE_LIMIT_SQL_LENGTH, DB_MAX_SQL_LENGTH) conn.setlimit(sqlite3.SQLITE_LIMIT_SQL_LENGTH, DB_MAX_SQL_LENGTH)
conn.setlimit(sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER, DB_MAX_SQL_VARIABLES) conn.setlimit(sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER, DB_MAX_SQL_VARIABLES)
c = conn.cursor() c = conn.cursor()
# Ensure WAL mode, checkpointing, and size limit on this connection # Ensure WAL mode and checkpointing settings on this connection
c.execute("PRAGMA journal_mode=WAL") c.execute("PRAGMA journal_mode=WAL")
c.execute("PRAGMA synchronous=NORMAL") c.execute("PRAGMA synchronous=NORMAL")
c.execute(f"PRAGMA wal_autocheckpoint={DB_WAL_AUTOCHECKPOINT_PAGES}") c.execute(f"PRAGMA wal_autocheckpoint={DB_WAL_AUTOCHECKPOINT_PAGES}")
# Insert full history row # Insert the snapshot into the telemetry_log (history) table
c.execute( c.execute(
""" """
INSERT INTO telemetry_log ( INSERT INTO telemetry_log (
@ -125,7 +151,7 @@ def save_snapshot(data: Dict) -> None:
), ),
) )
# Upsert into live_state # Upsert (insert or update) the latest snapshot into live_state table
c.execute( c.execute(
""" """
INSERT INTO live_state ( INSERT INTO live_state (

View file

@ -1,3 +1,8 @@
"""Asynchronous database layer for telemetry service using PostgreSQL/TimescaleDB.
Defines table schemas via SQLAlchemy Core and provides an
initialization function to set up TimescaleDB hypertable.
"""
import os import os
import sqlalchemy import sqlalchemy
from databases import Database from databases import Database
@ -8,10 +13,13 @@ DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:password@localho
# Async database client # Async database client
database = Database(DATABASE_URL) database = Database(DATABASE_URL)
# Metadata for SQLAlchemy Core # Metadata for SQLAlchemy Core
# SQLAlchemy metadata container for table definitions
metadata = MetaData() metadata = MetaData()
# Telemetry events hypertable schema # --- Table Definitions ---
# Table for storing raw telemetry snapshots at scale (converted to hypertable)
telemetry_events = Table( telemetry_events = Table(
# Time-series hypertable storing raw telemetry snapshots from plugins
"telemetry_events", "telemetry_events",
metadata, metadata,
Column("id", Integer, primary_key=True), Column("id", Integer, primary_key=True),
@ -36,17 +44,18 @@ telemetry_events = Table(
Column("latency_ms", Float, nullable=True), Column("latency_ms", Float, nullable=True),
) )
# Persistent kill statistics per character # Table for persistent total kills per character
# Persistent kill statistics per character
char_stats = Table( char_stats = Table(
# Stores cumulative kills per character in a single-row upsert table
"char_stats", "char_stats",
metadata, metadata,
Column("character_name", String, primary_key=True), Column("character_name", String, primary_key=True),
Column("total_kills", Integer, nullable=False, default=0), Column("total_kills", Integer, nullable=False, default=0),
) )
# Rare event tracking: total and per-session counts # Table for persistent total rare counts per character
rare_stats = Table( rare_stats = Table(
# Stores cumulative rare event counts per character
"rare_stats", "rare_stats",
metadata, metadata,
Column("character_name", String, primary_key=True), Column("character_name", String, primary_key=True),
@ -54,14 +63,16 @@ rare_stats = Table(
) )
rare_stats_sessions = Table( rare_stats_sessions = Table(
# Stores per-session rare counts; composite PK (character_name, session_id)
"rare_stats_sessions", "rare_stats_sessions",
metadata, metadata,
Column("character_name", String, primary_key=True), Column("character_name", String, primary_key=True),
Column("session_id", String, primary_key=True), Column("session_id", String, primary_key=True),
Column("session_rares", Integer, nullable=False, default=0), Column("session_rares", Integer, nullable=False, default=0),
) )
# Spawn events: record mob spawns for heatmapping # Table for recording spawn events (mob creates) for heatmap analysis
spawn_events = Table( spawn_events = Table(
# Records individual mob spawn occurrences for heatmap and analysis
"spawn_events", "spawn_events",
metadata, metadata,
Column("id", Integer, primary_key=True), Column("id", Integer, primary_key=True),
@ -72,8 +83,9 @@ spawn_events = Table(
Column("ns", Float, nullable=False), Column("ns", Float, nullable=False),
Column("z", Float, nullable=False), Column("z", Float, nullable=False),
) )
# Rare events: record individual rare spawns for future heatmaps # Table for recording individual rare spawn events for analysis
rare_events = Table( rare_events = Table(
# Records individual rare mob events for detailed analysis and heatmaps
"rare_events", "rare_events",
metadata, metadata,
Column("id", Integer, primary_key=True), Column("id", Integer, primary_key=True),
@ -86,23 +98,32 @@ rare_events = Table(
) )
async def init_db_async(): async def init_db_async():
"""Create tables and enable TimescaleDB hypertable for telemetry_events.""" """Initialize PostgreSQL/TimescaleDB schema and hypertable.
Creates all defined tables and ensures the TimescaleDB extension is
installed. Converts telemetry_events table into a hypertable for efficient
time-series data storage.
"""
# Create tables in Postgres # Create tables in Postgres
engine = sqlalchemy.create_engine(DATABASE_URL) engine = sqlalchemy.create_engine(DATABASE_URL)
# Reflects metadata definitions into actual database tables via SQLAlchemy
metadata.create_all(engine) metadata.create_all(engine)
# Enable TimescaleDB extension and convert telemetry_events to hypertable # Enable TimescaleDB extension and convert telemetry_events to hypertable
# Use a transactional context to ensure DDL statements are committed # Use a transactional context to ensure DDL statements are committed
with engine.begin() as conn: with engine.begin() as conn:
# Enable or update TimescaleDB extension # Enable or update TimescaleDB extension
# Install or confirm TimescaleDB extension to support hypertables
try: try:
conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb")) conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb"))
except Exception as e: except Exception as e:
print(f"Warning: failed to create extension timescaledb: {e}") print(f"Warning: failed to create extension timescaledb: {e}")
# Update TimescaleDB extension if an older version exists
try: try:
conn.execute(text("ALTER EXTENSION timescaledb UPDATE")) conn.execute(text("ALTER EXTENSION timescaledb UPDATE"))
except Exception as e: except Exception as e:
print(f"Warning: failed to update timescaledb extension: {e}") print(f"Warning: failed to update timescaledb extension: {e}")
# Create hypertable for telemetry_events, skip default indexes to avoid collisions # Create hypertable for telemetry_events, skip default indexes to avoid collisions
# Transform telemetry_events into a hypertable partitioned by timestamp
try: try:
conn.execute(text( conn.execute(text(
"SELECT create_hypertable('telemetry_events', 'timestamp', \ "SELECT create_hypertable('telemetry_events', 'timestamp', \

View file

@ -1,6 +1,8 @@
## Docker Compose configuration for Dereth Tracker microservices
version: "3.8" version: "3.8"
services: services:
# Application service: Dereth Tracker API and WebSockets server
dereth-tracker: dereth-tracker:
build: . build: .
ports: ports:
@ -30,6 +32,7 @@ services:
max-size: "10m" max-size: "10m"
max-file: "3" max-file: "3"
# TimescaleDB service for telemetry data storage
db: db:
image: timescale/timescaledb:latest-pg14 image: timescale/timescaledb:latest-pg14
container_name: dereth-db container_name: dereth-db
@ -46,6 +49,7 @@ services:
options: options:
max-size: "10m" max-size: "10m"
max-file: "3" max-file: "3"
# Grafana service for visualization and dashboards
grafana: grafana:
image: grafana/grafana:latest image: grafana/grafana:latest
container_name: dereth-grafana container_name: dereth-grafana
@ -58,6 +62,9 @@ services:
GF_SECURITY_ADMIN_PASSWORD: "${GF_SECURITY_ADMIN_PASSWORD}" GF_SECURITY_ADMIN_PASSWORD: "${GF_SECURITY_ADMIN_PASSWORD}"
# Allow embedding Grafana dashboards in iframes # Allow embedding Grafana dashboards in iframes
GF_SECURITY_ALLOW_EMBEDDING: "true" GF_SECURITY_ALLOW_EMBEDDING: "true"
# Enable anonymous access so embedded panels work without login
GF_AUTH_ANONYMOUS_ENABLED: "true"
GF_AUTH_ANONYMOUS_ORG_ROLE: "Viewer"
GF_USERS_ALLOW_SIGN_UP: "false" GF_USERS_ALLOW_SIGN_UP: "false"
# Serve Grafana under /grafana path # Serve Grafana under /grafana path
GF_SERVER_ROOT_URL: "https://overlord.snakedesert.se/grafana" GF_SERVER_ROOT_URL: "https://overlord.snakedesert.se/grafana"

View file

@ -1,19 +1,46 @@
import asyncio """
import websockets generate_data.py - Standalone script to simulate plugin telemetry data.
import json
This script connects to the plugin WebSocket at /ws/position and sends
fabricated TelemetrySnapshot payloads at regular intervals. Useful for:
- Functional testing of the telemetry ingestion pipeline
- Demonstrating real-time map updates without a live game client
"""
import asyncio # Async event loop and sleep support
import websockets # WebSocket client for Python
import json # JSON serialization of payloads
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from main import TelemetrySnapshot from main import TelemetrySnapshot # Pydantic model matches plugin protocol
async def main() -> None: async def main() -> None:
"""
Continuously emit synthetic telemetry snapshots at fixed intervals.
Updates in-game coordinates (ew, ns) gradually and increments
an 'online_time' counter to mimic real gameplay progression.
Each iteration:
1. Build TelemetrySnapshot with current state
2. Serialize to JSON, set 'type' field
3. Send over WebSocket
4. Sleep for 'wait' seconds
"""
# Interval between snapshots (seconds)
wait = 10 wait = 10
online_time = 24 * 3600 # start at 1 day # Simulated total online time in seconds (starting at 24h)
online_time = 24 * 3600
# Starting coordinates (E/W and N/S)
ew = 0.0 ew = 0.0
ns = 0.0 ns = 0.0
# WebSocket endpoint for plugin telemetry (include secret for auth)
uri = "ws://localhost:8000/ws/position?secret=your_shared_secret" uri = "ws://localhost:8000/ws/position?secret=your_shared_secret"
# Connect to the plugin WebSocket endpoint with authentication
# Establish WebSocket connection to the server
async with websockets.connect(uri) as websocket: async with websockets.connect(uri) as websocket:
print(f"Connected to {uri}") print(f"Connected to {uri}")
# Loop indefinitely, sending telemetry at each interval
while True: while True:
# Construct a new TelemetrySnapshot dataclass instance
snapshot = TelemetrySnapshot( snapshot = TelemetrySnapshot(
character_name="Test name", character_name="Test name",
char_tag="test_tag", char_tag="test_tag",
@ -30,13 +57,18 @@ async def main() -> None:
prismatic_taper_count=0, prismatic_taper_count=0,
vt_state="test state", vt_state="test state",
) )
# wrap in envelope with message type # Prepare payload dictionary:
# Serialize telemetry snapshot without telemetry.kind 'rares_found' # - Convert Pydantic model to dict
# - Remove any extraneous fields (e.g., 'rares_found')
# - Insert message 'type' for server routing
payload = snapshot.model_dump() payload = snapshot.model_dump()
payload.pop("rares_found", None) payload.pop("rares_found", None)
payload["type"] = "telemetry" payload["type"] = "telemetry"
# Send JSON-encoded payload over WebSocket
# Transmit JSON payload (datetime serialized via default=str)
await websocket.send(json.dumps(payload, default=str)) await websocket.send(json.dumps(payload, default=str))
print(f"Sent snapshot: EW={ew:.2f}, NS={ns:.2f}") print(f"Sent snapshot: EW={ew:.2f}, NS={ns:.2f}")
# Wait before next update, then increment simulated state
await asyncio.sleep(wait) await asyncio.sleep(wait)
ew += 0.1 ew += 0.1
ns += 0.1 ns += 0.1

103
main.py
View file

@ -1,3 +1,10 @@
"""
main.py - FastAPI-based telemetry server for Dereth Tracker.
This service ingests real-time position and event data from plugin clients via WebSockets,
stores telemetry and statistics in a TimescaleDB backend, and exposes HTTP and WebSocket
endpoints for browser clients to retrieve live and historical data, trails, and per-character stats.
"""
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
import json import json
import os import os
@ -27,14 +34,21 @@ import asyncio
# ------------------------------------------------------------------ # ------------------------------------------------------------------
app = FastAPI() app = FastAPI()
# test # In-memory store mapping character_name to the most recent telemetry snapshot
# In-memory store of the last packet per character
live_snapshots: Dict[str, dict] = {} live_snapshots: Dict[str, dict] = {}
# Shared secret used to authenticate plugin WebSocket connections (override for production)
SHARED_SECRET = "your_shared_secret" SHARED_SECRET = "your_shared_secret"
# LOG_FILE = "telemetry_log.jsonl" # LOG_FILE = "telemetry_log.jsonl"
# ------------------------------------------------------------------ # ------------------------------------------------------------------
ACTIVE_WINDOW = timedelta(seconds=30) # player is “online” if seen in last 30 s ACTIVE_WINDOW = timedelta(seconds=30) # Time window defining “online” players (last 30 seconds)
"""
Data models for plugin events:
- TelemetrySnapshot: periodic telemetry data from a player client
- SpawnEvent: information about a mob spawn event
- RareEvent: details of a rare mob event
"""
class TelemetrySnapshot(BaseModel): class TelemetrySnapshot(BaseModel):
@ -63,6 +77,10 @@ class TelemetrySnapshot(BaseModel):
class SpawnEvent(BaseModel): class SpawnEvent(BaseModel):
"""
Model for a spawn event emitted by plugin clients when a mob appears.
Records character context, mob type, timestamp, and spawn location.
"""
character_name: str character_name: str
mob: str mob: str
timestamp: datetime timestamp: datetime
@ -71,6 +89,10 @@ class SpawnEvent(BaseModel):
z: float = 0.0 z: float = 0.0
class RareEvent(BaseModel): class RareEvent(BaseModel):
"""
Model for a rare mob event when a player encounters or discovers a rare entity.
Includes character, event name, timestamp, and location coordinates.
"""
character_name: str character_name: str
name: str name: str
timestamp: datetime timestamp: datetime
@ -81,7 +103,11 @@ class RareEvent(BaseModel):
@app.on_event("startup") @app.on_event("startup")
async def on_startup(): async def on_startup():
# Retry connecting to database on startup to handle DB readiness delays """Event handler triggered when application starts up.
Attempts to connect to the database with retry logic to accommodate
potential startup delays (e.g., waiting for Postgres to be ready).
"""
max_attempts = 5 max_attempts = 5
for attempt in range(1, max_attempts + 1): for attempt in range(1, max_attempts + 1):
try: try:
@ -98,7 +124,10 @@ async def on_startup():
@app.on_event("shutdown") @app.on_event("shutdown")
async def on_shutdown(): async def on_shutdown():
# Disconnect from database """Event handler triggered when application is shutting down.
Ensures the database connection is closed cleanly.
"""
await database.disconnect() await database.disconnect()
@ -114,7 +143,9 @@ def debug():
async def get_live_players(): async def get_live_players():
"""Return recent live telemetry per character (last 30 seconds).""" """Return recent live telemetry per character (last 30 seconds)."""
cutoff = datetime.now(timezone.utc) - ACTIVE_WINDOW cutoff = datetime.now(timezone.utc) - ACTIVE_WINDOW
# Include rare counts: total and session-specific # Build SQL to select the most recent telemetry entry per character:
# - Use DISTINCT ON (character_name) to get latest row for each player
# - Join rare_stats for cumulative counts and rare_stats_sessions for session-specific counts
sql = """ sql = """
SELECT sub.*, SELECT sub.*,
COALESCE(rs.total_rares, 0) AS total_rares, COALESCE(rs.total_rares, 0) AS total_rares,
@ -144,18 +175,21 @@ async def get_history(
to_ts: str | None = Query(None, alias="to"), to_ts: str | None = Query(None, alias="to"),
): ):
"""Returns a time-ordered list of telemetry snapshots.""" """Returns a time-ordered list of telemetry snapshots."""
# Base SQL query: fetch timestamp, character_name, kills, kills_per_hour (as kph)
sql = ( sql = (
"SELECT timestamp, character_name, kills, kills_per_hour AS kph " "SELECT timestamp, character_name, kills, kills_per_hour AS kph "
"FROM telemetry_events" "FROM telemetry_events"
) )
values: dict = {} values: dict = {}
conditions: list[str] = [] conditions: list[str] = []
# Apply filters if time bounds provided via 'from' and 'to' query parameters
if from_ts: if from_ts:
conditions.append("timestamp >= :from_ts") conditions.append("timestamp >= :from_ts")
values["from_ts"] = from_ts values["from_ts"] = from_ts
if to_ts: if to_ts:
conditions.append("timestamp <= :to_ts") conditions.append("timestamp <= :to_ts")
values["to_ts"] = to_ts values["to_ts"] = to_ts
# Concatenate WHERE clauses dynamically based on provided filters
if conditions: if conditions:
sql += " WHERE " + " AND ".join(conditions) sql += " WHERE " + " AND ".join(conditions)
sql += " ORDER BY timestamp" sql += " ORDER BY timestamp"
@ -181,6 +215,7 @@ async def get_trails(
): ):
"""Return position snapshots (timestamp, character_name, ew, ns, z) for the past `seconds`.""" """Return position snapshots (timestamp, character_name, ew, ns, z) for the past `seconds`."""
cutoff = datetime.utcnow().replace(tzinfo=timezone.utc) - timedelta(seconds=seconds) cutoff = datetime.utcnow().replace(tzinfo=timezone.utc) - timedelta(seconds=seconds)
# Query position snapshots for all characters since the cutoff time
sql = """ sql = """
SELECT timestamp, character_name, ew, ns, z SELECT timestamp, character_name, ew, ns, z
FROM telemetry_events FROM telemetry_events
@ -202,12 +237,18 @@ async def get_trails(
return JSONResponse(content=jsonable_encoder({"trails": trails})) return JSONResponse(content=jsonable_encoder({"trails": trails}))
# -------------------- WebSocket endpoints ----------------------- # -------------------- WebSocket endpoints -----------------------
## WebSocket connection tracking
# Set of browser WebSocket clients subscribed to live updates
browser_conns: set[WebSocket] = set() browser_conns: set[WebSocket] = set()
# Map of registered plugin clients: character_name -> WebSocket # Mapping of plugin clients by character_name to their WebSocket for command forwarding
plugin_conns: Dict[str, WebSocket] = {} plugin_conns: Dict[str, WebSocket] = {}
async def _broadcast_to_browser_clients(snapshot: dict): async def _broadcast_to_browser_clients(snapshot: dict):
# Ensure all data (e.g. datetime) is JSON-serializable """Broadcast a telemetry or chat message to all connected browser clients.
Converts any non-serializable types (e.g., datetime) before sending.
"""
# Convert snapshot payload to JSON-friendly types
data = jsonable_encoder(snapshot) data = jsonable_encoder(snapshot)
for ws in list(browser_conns): for ws in list(browser_conns):
try: try:
@ -221,7 +262,17 @@ async def ws_receive_snapshots(
secret: str | None = Query(None), secret: str | None = Query(None),
x_plugin_secret: str | None = Header(None) x_plugin_secret: str | None = Header(None)
): ):
# Verify shared secret from query parameter or header """WebSocket endpoint for plugin clients to send telemetry and events.
Validates a shared secret for authentication, then listens for messages of
various types (register, spawn, telemetry, rare, chat) and handles each:
- register: record plugin WebSocket for command forwarding
- spawn: persist spawn event
- telemetry: store snapshot, update stats, broadcast to browsers
- rare: update total and session rare counts, persist event
- chat: broadcast chat messages to browsers
"""
# Authenticate plugin connection using shared secret
key = secret or x_plugin_secret key = secret or x_plugin_secret
if key != SHARED_SECRET: if key != SHARED_SECRET:
# Reject without completing the WebSocket handshake # Reject without completing the WebSocket handshake
@ -246,13 +297,13 @@ async def ws_receive_snapshots(
except json.JSONDecodeError: except json.JSONDecodeError:
continue continue
msg_type = data.get("type") msg_type = data.get("type")
# Registration message: map character to this socket # --- Registration: associate character_name with this plugin socket ---
if msg_type == "register": if msg_type == "register":
name = data.get("character_name") or data.get("player_name") name = data.get("character_name") or data.get("player_name")
if isinstance(name, str): if isinstance(name, str):
plugin_conns[name] = websocket plugin_conns[name] = websocket
continue continue
# Spawn event: persist spawn for heatmaps # --- Spawn event: persist to spawn_events table ---
if msg_type == "spawn": if msg_type == "spawn":
payload = data.copy() payload = data.copy()
payload.pop("type", None) payload.pop("type", None)
@ -264,7 +315,7 @@ async def ws_receive_snapshots(
spawn_events.insert().values(**spawn.dict()) spawn_events.insert().values(**spawn.dict())
) )
continue continue
# Telemetry message: save to DB and broadcast # --- Telemetry message: persist snapshot and update kill stats ---
if msg_type == "telemetry": if msg_type == "telemetry":
# Parse telemetry snapshot and update in-memory state # Parse telemetry snapshot and update in-memory state
payload = data.copy() payload = data.copy()
@ -291,10 +342,10 @@ async def ws_receive_snapshots(
) )
await database.execute(stmt) await database.execute(stmt)
ws_receive_snapshots._last_kills[key] = snap.kills ws_receive_snapshots._last_kills[key] = snap.kills
# Broadcast to browser clients # Broadcast updated snapshot to all browser clients
await _broadcast_to_browser_clients(snap.dict()) await _broadcast_to_browser_clients(snap.dict())
continue continue
# Rare event: increment total and session counts # --- Rare event: update total and session counters and persist ---
if msg_type == "rare": if msg_type == "rare":
name = data.get("character_name") name = data.get("character_name")
if isinstance(name, str): if isinstance(name, str):
@ -330,7 +381,7 @@ async def ws_receive_snapshots(
except Exception: except Exception:
pass pass
continue continue
# Chat message: broadcast to browser clients only (no DB write) # --- Chat message: forward chat payload to browser clients ---
if msg_type == "chat": if msg_type == "chat":
await _broadcast_to_browser_clients(data) await _broadcast_to_browser_clients(data)
continue continue
@ -342,12 +393,18 @@ async def ws_receive_snapshots(
del plugin_conns[n] del plugin_conns[n]
print(f"[WS] Cleaned up plugin connections for {websocket.client}") print(f"[WS] Cleaned up plugin connections for {websocket.client}")
# In-memory store of last kills per session for delta calculations # In-memory cache of last seen kill counts per (session_id, character_name)
# Used to compute deltas for updating persistent kill statistics efficiently
ws_receive_snapshots._last_kills = {} ws_receive_snapshots._last_kills = {}
@app.websocket("/ws/live") @app.websocket("/ws/live")
async def ws_live_updates(websocket: WebSocket): async def ws_live_updates(websocket: WebSocket):
# Browser clients connect here to receive telemetry and chat, and send commands """WebSocket endpoint for browser clients to receive live updates and send commands.
Manages a set of connected browser clients; listens for incoming command messages
and forwards them to the appropriate plugin client WebSocket.
"""
# Add new browser client to the set
await websocket.accept() await websocket.accept()
browser_conns.add(websocket) browser_conns.add(websocket)
try: try:
@ -385,14 +442,21 @@ async def ws_live_updates(websocket: WebSocket):
## (static mount moved to end of file, below API routes) ## (static mount moved to end of file, below API routes)
# list routes for convenience # list routes for convenience
print("🔍 Registered routes:") print("🔍 Registered HTTP API routes:")
for route in app.routes: for route in app.routes:
if isinstance(route, APIRoute): if isinstance(route, APIRoute):
# Log the path and allowed methods for each API route
print(f"{route.path} -> {route.methods}") print(f"{route.path} -> {route.methods}")
# Add stats endpoint for per-character metrics # Add stats endpoint for per-character metrics
@app.get("/stats/{character_name}") @app.get("/stats/{character_name}")
async def get_stats(character_name: str): async def get_stats(character_name: str):
"""Return latest telemetry snapshot and aggregates for a specific character.""" """
HTTP GET endpoint to retrieve per-character metrics:
- latest_snapshot: most recent telemetry entry for the character
- total_kills: accumulated kills from char_stats
- total_rares: accumulated rares from rare_stats
Returns 404 if character has no recorded telemetry.
"""
# Latest snapshot # Latest snapshot
sql_snap = ( sql_snap = (
"SELECT * FROM telemetry_events " "SELECT * FROM telemetry_events "
@ -421,4 +485,5 @@ async def get_stats(character_name: str):
# -------------------- static frontend --------------------------- # -------------------- static frontend ---------------------------
# Serve SPA files (catch-all for frontend routes) # Serve SPA files (catch-all for frontend routes)
# Mount the single-page application frontend (static assets) at root path
app.mount("/", StaticFiles(directory="static", html=True), name="static") app.mount("/", StaticFiles(directory="static", html=True), name="static")

View file

@ -1,22 +1,29 @@
<!--
Dereth Tracker Single-Page Application
Displays live player locations, trails, and statistics on a map.
-->
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>Dereth Tracker</title> <title>Dereth Tracker</title>
<!-- Link to main stylesheet -->
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
</head> </head>
<body> <body>
<!-- SIDEBAR --> <!-- Sidebar for active players list and filters -->
<aside id="sidebar"> <aside id="sidebar">
<!-- Segmented sort buttons --> <!-- Container for sort and filter controls -->
<div id="sortButtons" class="sort-buttons"></div> <div id="sortButtons" class="sort-buttons"></div>
<h2>Active Players</h2> <h2>Active Players</h2>
<!-- Text input to filter active players by name -->
<input type="text" id="playerFilter" class="player-filter" placeholder="Filter players..." />
<ul id="playerList"></ul> <ul id="playerList"></ul>
</aside> </aside>
<!-- MAP --> <!-- Main map container showing terrain and player data -->
<div id="mapContainer"> <div id="mapContainer">
<div id="mapGroup"> <div id="mapGroup">
<img id="map" src="dereth.png" alt="Dereth map"> <img id="map" src="dereth.png" alt="Dereth map">
@ -26,6 +33,7 @@
<div id="tooltip" class="tooltip"></div> <div id="tooltip" class="tooltip"></div>
</div> </div>
<!-- Main JavaScript file for WebSocket communication and UI logic -->
<script src="script.js" defer></script> <script src="script.js" defer></script>
</body> </body>
</html> </html>

View file

@ -1,3 +1,27 @@
/*
* script.js - Frontend logic for Dereth Tracker Single-Page Application.
* Handles WebSocket communication, UI rendering of player lists, map display,
* and user interactions (filtering, sorting, chat, stats windows).
*/
/**
* script.js - Frontend controller for Dereth Tracker SPA
*
* Responsibilities:
* - Establish WebSocket connections to receive live telemetry and chat data
* - Fetch and render live player lists, trails, and map dots
* - Handle user interactions: filtering, sorting, selecting players
* - Manage dynamic UI components: chat windows, stats panels, tooltips
* - Provide smooth pan/zoom of map overlay using CSS transforms
*
* Structure:
* 1. DOM references and constant definitions
* 2. Color palette and assignment logic
* 3. Sorting and filtering setup
* 4. Utility functions (coordinate mapping, color hashing)
* 5. UI window creation (stats, chat)
* 6. Rendering functions for list and map
* 7. Event listeners for map interactions and WebSocket messages
*/
/* ---------- DOM references --------------------------------------- */ /* ---------- DOM references --------------------------------------- */
const wrap = document.getElementById('mapContainer'); const wrap = document.getElementById('mapContainer');
const group = document.getElementById('mapGroup'); const group = document.getElementById('mapGroup');
@ -7,6 +31,15 @@ const trailsContainer = document.getElementById('trails');
const list = document.getElementById('playerList'); const list = document.getElementById('playerList');
const btnContainer = document.getElementById('sortButtons'); const btnContainer = document.getElementById('sortButtons');
const tooltip = document.getElementById('tooltip'); const tooltip = document.getElementById('tooltip');
// Filter input for player names (starts-with filter)
let currentFilter = '';
const filterInput = document.getElementById('playerFilter');
if (filterInput) {
filterInput.addEventListener('input', e => {
currentFilter = e.target.value.toLowerCase().trim();
renderList();
});
}
// WebSocket for chat and commands // WebSocket for chat and commands
let socket; let socket;
@ -15,6 +48,18 @@ const chatWindows = {};
// Keep track of open stats windows: character_name -> DOM element // Keep track of open stats windows: character_name -> DOM element
const statsWindows = {}; const statsWindows = {};
/**
* ---------- Application Constants -----------------------------
* Defines key parameters for map rendering, data polling, and UI limits.
*
* MAX_Z: Maximum altitude difference considered (filter out outliers by Z)
* FOCUS_ZOOM: Zoom level when focusing on a selected character
* POLL_MS: Millisecond interval to fetch live player data and trails
* MAP_BOUNDS: World coordinate bounds for the game map (used for projection)
* API_BASE: Prefix for AJAX endpoints (set when behind a proxy)
* MAX_CHAT_LINES: Max number of lines per chat window to cap memory usage
* CHAT_COLOR_MAP: Color mapping for in-game chat channels by channel code
*/
/* ---------- constants ------------------------------------------- */ /* ---------- constants ------------------------------------------- */
const MAX_Z = 10; const MAX_Z = 10;
const FOCUS_ZOOM = 3; // zoom level when you click a name const FOCUS_ZOOM = 3; // zoom level when you click a name
@ -65,6 +110,50 @@ const CHAT_COLOR_MAP = {
31: '#FFFF00' // AdminTell 31: '#FFFF00' // AdminTell
}; };
/**
* ---------- Player Color Assignment ----------------------------
* Uses a predefined accessible color palette for player dots to ensure
* high contrast and colorblind-friendly display. Once the palette
* is exhausted, falls back to a deterministic hash-to-hue function.
*/
/* ---------- player/dot color assignment ------------------------- */
// A base palette of distinct, color-blind-friendly colors
const PALETTE = [
'#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
'#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf'
];
// Map from character name to assigned color
const colorMap = {};
// Next index to pick from PALETTE
let nextPaletteIndex = 0;
/**
* Assigns or returns a consistent color for a given name.
* Uses a fixed palette first, then falls back to hue hashing.
*/
function getColorFor(name) {
if (colorMap[name]) {
return colorMap[name];
}
let color;
if (nextPaletteIndex < PALETTE.length) {
color = PALETTE[nextPaletteIndex++];
} else {
// Fallback: hash to HSL hue
color = hue(name);
}
colorMap[name] = color;
return color;
}
/*
* ---------- Sort Configuration -------------------------------
* Defines available sort criteria for the active player list:
* - name: alphabetical ascending
* - kph: kills per hour descending
* - kills: total kills descending
* - rares: rare events found during current session descending
* Each option includes a label for UI display and a comparator function.
*/
/* ---------- sort configuration ---------------------------------- */ /* ---------- sort configuration ---------------------------------- */
const sortOptions = [ const sortOptions = [
{ {
@ -188,6 +277,53 @@ function showStatsWindow(name) {
iframe.allowFullscreen = true; iframe.allowFullscreen = true;
content.appendChild(iframe); content.appendChild(iframe);
}); });
// Enable dragging of the stats window via its header
if (!window.__chatZ) window.__chatZ = 10000;
let drag = false;
let startX = 0, startY = 0, startLeft = 0, startTop = 0;
header.style.cursor = 'move';
const bringToFront = () => {
window.__chatZ += 1;
win.style.zIndex = window.__chatZ;
};
header.addEventListener('mousedown', e => {
if (e.target.closest('button')) return;
e.preventDefault();
drag = true;
bringToFront();
startX = e.clientX; startY = e.clientY;
startLeft = win.offsetLeft; startTop = win.offsetTop;
document.body.classList.add('noselect');
});
window.addEventListener('mousemove', e => {
if (!drag) return;
const dx = e.clientX - startX;
const dy = e.clientY - startY;
win.style.left = `${startLeft + dx}px`;
win.style.top = `${startTop + dy}px`;
});
window.addEventListener('mouseup', () => {
drag = false;
document.body.classList.remove('noselect');
});
// Touch support for dragging
header.addEventListener('touchstart', e => {
if (e.touches.length !== 1 || e.target.closest('button')) return;
drag = true;
bringToFront();
const t = e.touches[0];
startX = t.clientX; startY = t.clientY;
startLeft = win.offsetLeft; startTop = win.offsetTop;
});
window.addEventListener('touchmove', e => {
if (!drag || e.touches.length !== 1) return;
const t = e.touches[0];
const dx = t.clientX - startX;
const dy = t.clientY - startY;
win.style.left = `${startLeft + dx}px`;
win.style.top = `${startTop + dy}px`;
});
window.addEventListener('touchend', () => { drag = false; });
} }
const applyTransform = () => const applyTransform = () =>
@ -265,8 +401,16 @@ img.onload = () => {
}; };
/* ---------- rendering sorted list & dots ------------------------ */ /* ---------- rendering sorted list & dots ------------------------ */
/**
* Filter and sort the currentPlayers, then render them.
*/
function renderList() { function renderList() {
const sorted = [...currentPlayers].sort(currentSort.comparator); // Filter by name prefix
const filtered = currentPlayers.filter(p =>
p.character_name.toLowerCase().startsWith(currentFilter)
);
// Sort filtered list
const sorted = filtered.slice().sort(currentSort.comparator);
render(sorted); render(sorted);
} }
@ -282,7 +426,7 @@ function render(players) {
dot.className = 'dot'; dot.className = 'dot';
dot.style.left = `${x}px`; dot.style.left = `${x}px`;
dot.style.top = `${y}px`; dot.style.top = `${y}px`;
dot.style.background = hue(p.character_name); dot.style.background = getColorFor(p.character_name);
@ -299,7 +443,7 @@ function render(players) {
dots.appendChild(dot); dots.appendChild(dot);
//sidebar //sidebar
const li = document.createElement('li'); const li = document.createElement('li');
const color = hue(p.character_name); const color = getColorFor(p.character_name);
li.style.borderLeftColor = color; li.style.borderLeftColor = color;
li.className = 'player-item'; li.className = 'player-item';
li.innerHTML = ` li.innerHTML = `
@ -364,7 +508,8 @@ function renderTrails(trailData) {
}).join(' '); }).join(' ');
const poly = document.createElementNS('http://www.w3.org/2000/svg', 'polyline'); const poly = document.createElementNS('http://www.w3.org/2000/svg', 'polyline');
poly.setAttribute('points', points); poly.setAttribute('points', points);
poly.setAttribute('stroke', hue(name)); // Use the same color as the player dot for consistency
poly.setAttribute('stroke', getColorFor(name));
poly.setAttribute('fill', 'none'); poly.setAttribute('fill', 'none');
poly.setAttribute('class', 'trail-path'); poly.setAttribute('class', 'trail-path');
trailsContainer.appendChild(poly); trailsContainer.appendChild(poly);
@ -383,8 +528,13 @@ function selectPlayer(p, x, y) {
renderList(); // keep sorted + highlight renderList(); // keep sorted + highlight
} }
/* ---------- chat & command handlers ---------------------------- */ /*
// Initialize WebSocket for chat and commands * ---------- Chat & Command WebSocket Handlers ------------------
* Maintains a persistent WebSocket connection to the /ws/live endpoint
* for receiving chat messages and sending user commands to plugin clients.
* Reconnects automatically on close and logs errors.
*/
// Initialize WebSocket for chat and command streams
function initWebSocket() { function initWebSocket() {
const protocol = location.protocol === 'https:' ? 'wss:' : 'ws:'; const protocol = location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsUrl = `${protocol}//${location.host}${API_BASE}/ws/live`; const wsUrl = `${protocol}//${location.host}${API_BASE}/ws/live`;

View file

@ -1,3 +1,10 @@
/*
* style.css - Core styles for Dereth Tracker Single-Page Application
*
* Defines CSS variables for theming, layout rules for sidebar and map,
* interactive element styling (buttons, inputs), and responsive considerations.
*/
/* CSS Custom Properties for theme colors and sizing */
:root { :root {
--sidebar-width: 280px; --sidebar-width: 280px;
--bg-main: #111; --bg-main: #111;
@ -7,6 +14,10 @@
--text: #eee; --text: #eee;
--accent: #88f; --accent: #88f;
} }
/*
* style.css - Styling for Dereth Tracker SPA frontend.
* Defines layout, theming variables, and component styles (sidebar, map, controls).
*/
/* Placeholder text in chat input should be white */ /* Placeholder text in chat input should be white */
.chat-input::placeholder { .chat-input::placeholder {
color: #fff; color: #fff;
@ -29,13 +40,14 @@ body {
color: var(--text); color: var(--text);
} }
/* ---------- sort buttons --------------------------------------- */
.sort-buttons { .sort-buttons {
/* Container for sorting controls; uses flex layout to distribute buttons equally */
display: flex; display: flex;
gap: 4px; gap: 4px;
margin: 12px 16px 8px; margin: 12px 16px 8px;
} }
.sort-buttons .btn { .sort-buttons .btn {
/* Base styling for each sort button: color, padding, border */
flex: 1; flex: 1;
padding: 6px 8px; padding: 6px 8px;
background: #222; background: #222;
@ -48,6 +60,7 @@ body {
font-size: 0.9rem; font-size: 0.9rem;
} }
.sort-buttons .btn.active { .sort-buttons .btn.active {
/* Active sort button highlighted with accent color */
background: var(--accent); background: var(--accent);
color: #111; color: #111;
border-color: var(--accent); border-color: var(--accent);
@ -73,6 +86,18 @@ body {
margin: 0; margin: 0;
padding: 0; padding: 0;
} }
/* Filter input in sidebar for player list */
.player-filter {
width: 100%;
padding: 6px 8px;
margin-bottom: 12px;
background: var(--card);
color: var(--text);
border: 1px solid #555;
border-radius: 4px;
font-size: 0.9rem;
box-sizing: border-box;
}
#playerList li { #playerList li {
margin: 4px 0; margin: 4px 0;
padding: 6px 8px; padding: 6px 8px;