Johan review
This commit is contained in:
parent
d9b3b403da
commit
7845570819
3 changed files with 53 additions and 31 deletions
|
|
@ -12,8 +12,14 @@ Root directory:
|
||||||
- **README.md**: High-level documentation and usage instructions.
|
- **README.md**: High-level documentation and usage instructions.
|
||||||
- **EVENT_FORMATS.json**: Example JSON payloads for all event types (`telemetry`, `spawn`, `chat`, `rare`).
|
- **EVENT_FORMATS.json**: Example JSON payloads for all event types (`telemetry`, `spawn`, `chat`, `rare`).
|
||||||
- **db.py**: Legacy SQLite-based storage (telemetry_log & live_state tables, WAL mode, auto-vacuum).
|
- **db.py**: Legacy SQLite-based storage (telemetry_log & live_state tables, WAL mode, auto-vacuum).
|
||||||
- **db_async.py**: Async database definitions for PostgreSQL/TimescaleDB:
|
- **db_async.py**: Async database definitions for PostgreSQL/TimescaleDB:
|
||||||
- Table schemas (SQLAlchemy Core): `telemetry_events`, `char_stats`, `rare_stats`, `rare_stats_sessions`, `spawn_events`.
|
- Table schemas (SQLAlchemy Core):
|
||||||
|
- `telemetry_events`,
|
||||||
|
- `char_stats`,
|
||||||
|
- `rare_stats`,
|
||||||
|
- `rare_stats_sessions`,
|
||||||
|
- `spawn_events`,
|
||||||
|
- `rare_events`.
|
||||||
- `init_db_async()`: Creates tables, enables TimescaleDB extension, and configures a hypertable on `telemetry_events`.
|
- `init_db_async()`: Creates tables, enables TimescaleDB extension, and configures a hypertable on `telemetry_events`.
|
||||||
- **main.py**: The FastAPI application:
|
- **main.py**: The FastAPI application:
|
||||||
- HTTP endpoints: `/debug`, `/live`, `/history`, `/trails`.
|
- HTTP endpoints: `/debug`, `/live`, `/history`, `/trails`.
|
||||||
|
|
|
||||||
10
Dockerfile
10
Dockerfile
|
|
@ -35,13 +35,7 @@ ENV DATABASE_URL=postgresql://postgres:password@db:5432/dereth \
|
||||||
DB_MAX_SQL_LENGTH=1000000000 \
|
DB_MAX_SQL_LENGTH=1000000000 \
|
||||||
DB_MAX_SQL_VARIABLES=32766 \
|
DB_MAX_SQL_VARIABLES=32766 \
|
||||||
DB_WAL_AUTOCHECKPOINT_PAGES=1000 \
|
DB_WAL_AUTOCHECKPOINT_PAGES=1000 \
|
||||||
SHARED_SECRET=your_shared_secret # Secret for plugin authentication
|
SHARED_SECRET=your_shared_secret
|
||||||
|
|
||||||
## Launch the FastAPI app using Uvicorn
|
## Launch the FastAPI app using Uvicorn
|
||||||
CMD [
|
CMD ["uvicorn","main:app","--host","0.0.0.0","--port","8765","--reload","--workers","1"]
|
||||||
"uvicorn", "main:app",
|
|
||||||
"--host", "0.0.0.0",
|
|
||||||
"--port", "8765",
|
|
||||||
"--reload", # auto-restart on code changes
|
|
||||||
"--workers", "1"
|
|
||||||
]
|
|
||||||
|
|
|
||||||
50
db_async.py
50
db_async.py
|
|
@ -7,6 +7,7 @@ import os
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from databases import Database
|
from databases import Database
|
||||||
from sqlalchemy import MetaData, Table, Column, Integer, String, Float, DateTime, text
|
from sqlalchemy import MetaData, Table, Column, Integer, String, Float, DateTime, text
|
||||||
|
from sqlalchemy import Index
|
||||||
|
|
||||||
# Environment: Postgres/TimescaleDB connection URL
|
# Environment: Postgres/TimescaleDB connection URL
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/dereth")
|
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/dereth")
|
||||||
|
|
@ -43,6 +44,12 @@ telemetry_events = Table(
|
||||||
Column("mem_handles", Integer, nullable=True),
|
Column("mem_handles", Integer, nullable=True),
|
||||||
Column("latency_ms", Float, nullable=True),
|
Column("latency_ms", Float, nullable=True),
|
||||||
)
|
)
|
||||||
|
# Composite index to accelerate Grafana queries filtering by character_name then ordering by timestamp
|
||||||
|
Index(
|
||||||
|
'ix_telemetry_events_char_ts',
|
||||||
|
telemetry_events.c.character_name,
|
||||||
|
telemetry_events.c.timestamp
|
||||||
|
)
|
||||||
|
|
||||||
# Table for persistent total kills per character
|
# Table for persistent total kills per character
|
||||||
char_stats = Table(
|
char_stats = Table(
|
||||||
|
|
@ -108,26 +115,41 @@ async def init_db_async():
|
||||||
engine = sqlalchemy.create_engine(DATABASE_URL)
|
engine = sqlalchemy.create_engine(DATABASE_URL)
|
||||||
# Reflects metadata definitions into actual database tables via SQLAlchemy
|
# Reflects metadata definitions into actual database tables via SQLAlchemy
|
||||||
metadata.create_all(engine)
|
metadata.create_all(engine)
|
||||||
# Enable TimescaleDB extension and convert telemetry_events to hypertable
|
# Ensure TimescaleDB extension is installed and telemetry_events is a hypertable
|
||||||
# Use a transactional context to ensure DDL statements are committed
|
# Run DDL in autocommit mode so errors don't abort subsequent statements
|
||||||
with engine.begin() as conn:
|
try:
|
||||||
# Enable or update TimescaleDB extension
|
with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||||
# Install or confirm TimescaleDB extension to support hypertables
|
# Install extension if missing
|
||||||
try:
|
try:
|
||||||
conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb"))
|
conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Warning: failed to create extension timescaledb: {e}")
|
print(f"Warning: failed to create extension timescaledb: {e}")
|
||||||
# Update TimescaleDB extension if an older version exists
|
# Convert to hypertable, migrating existing data and skipping default index creation
|
||||||
try:
|
|
||||||
conn.execute(text("ALTER EXTENSION timescaledb UPDATE"))
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Warning: failed to update timescaledb extension: {e}")
|
|
||||||
# Create hypertable for telemetry_events, skip default indexes to avoid collisions
|
|
||||||
# Transform telemetry_events into a hypertable partitioned by timestamp
|
|
||||||
try:
|
try:
|
||||||
conn.execute(text(
|
conn.execute(text(
|
||||||
"SELECT create_hypertable('telemetry_events', 'timestamp', \
|
"SELECT create_hypertable('telemetry_events', 'timestamp', "
|
||||||
if_not_exists => true, create_default_indexes => false)"
|
"if_not_exists => true, migrate_data => true, create_default_indexes => false)"
|
||||||
))
|
))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Warning: failed to create hypertable telemetry_events: {e}")
|
print(f"Warning: failed to create hypertable telemetry_events: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: timescale extension/hypertable setup failed: {e}")
|
||||||
|
# Ensure composite index exists for efficient time-series queries by character
|
||||||
|
try:
|
||||||
|
with engine.connect() as conn:
|
||||||
|
conn.execute(text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS ix_telemetry_events_char_ts "
|
||||||
|
"ON telemetry_events (character_name, timestamp)"
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: failed to create composite index ix_telemetry_events_char_ts: {e}")
|
||||||
|
# Disable parallel workers at the system level to avoid OOMs from large parallel scans
|
||||||
|
try:
|
||||||
|
# Apply settings outside transaction for ALTER SYSTEM
|
||||||
|
conn2 = engine.connect().execution_options(isolation_level="AUTOCOMMIT")
|
||||||
|
conn2.execute(text("ALTER SYSTEM SET max_parallel_workers_per_gather = 0"))
|
||||||
|
conn2.execute(text("ALTER SYSTEM SET max_parallel_workers = 0"))
|
||||||
|
conn2.execute(text("SELECT pg_reload_conf()"))
|
||||||
|
conn2.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: failed to disable parallel workers: {e}")
|
||||||
Loading…
Add table
Add a link
Reference in a new issue