Files
trading-daemon/dashboard/server.py

318 lines
11 KiB
Python
Raw Normal View History

2026-01-25 18:02:20 +01:00
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import requests
import os
2026-01-25 18:02:20 +01:00
from typing import Optional, Dict, Any
app = FastAPI(title="Trading Dashboard API")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# Serve static files
app.mount("/static", StaticFiles(directory="dashboard/public"), name="static")
@app.get("/")
async def read_index():
return FileResponse('dashboard/public/index.html')
2026-01-25 18:02:20 +01:00
# QuestDB Konfiguration
DB_USER = os.getenv("DB_USER", "admin")
DB_PASSWORD = os.getenv("DB_PASSWORD", "quest")
DB_AUTH = (DB_USER, DB_PASSWORD) if DB_USER and DB_PASSWORD else None
DB_HOST = os.getenv("DB_HOST", "questdb")
2026-01-25 18:02:20 +01:00
DB_URL = f"http://{DB_HOST}:9000"
2026-01-25 18:02:20 +01:00
# Hilfsfunktionen
def query_questdb(query: str, timeout: int = 10) -> Optional[Dict[str, Any]]:
"""Zentrale QuestDB-Abfrage-Funktion"""
try:
response = requests.get(f"{DB_URL}/exec", params={'query': query}, auth=DB_AUTH, timeout=timeout)
if response.status_code == 200:
return response.json()
else:
raise HTTPException(status_code=response.status_code, detail=f"QuestDB error: {response.text}")
except requests.exceptions.Timeout:
raise HTTPException(status_code=504, detail="QuestDB query timeout")
except requests.exceptions.RequestException as e:
raise HTTPException(status_code=500, detail=f"QuestDB connection error: {str(e)}")
def format_questdb_response(data: Dict[str, Any]) -> Dict[str, Any]:
"""Einheitliche Formatierung der QuestDB-Antworten"""
if not data:
return {'columns': [], 'dataset': []}
return data
# API Endpunkte
@app.get("/api/trades")
2026-01-25 18:02:20 +01:00
async def get_trades(isin: str = None, days: int = 7):
2026-01-25 17:46:47 +01:00
"""
2026-01-25 18:02:20 +01:00
Gibt aggregierte Analyse aller Trades zurück (nicht einzelne Trades).
Nutzt vorberechnete Daten aus analytics_exchange_daily.
2026-01-25 17:46:47 +01:00
"""
if isin:
2026-01-25 18:02:20 +01:00
# Für spezifische ISIN: hole aus trades Tabelle
query = f"""
select
date_trunc('day', timestamp) as date,
count(*) as trade_count,
sum(price * quantity) as volume,
avg(price) as avg_price
from trades
where isin = '{isin}'
and timestamp > dateadd('d', -{days}, now())
group by date
order by date desc
"""
else:
# Aggregierte Daten aus analytics_exchange_daily
query = f"""
select
timestamp as date,
exchange,
trade_count,
volume
from analytics_exchange_daily
where timestamp >= dateadd('d', -{days}, now())
order by date desc, exchange asc
"""
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
return format_questdb_response(data)
@app.get("/api/metadata")
async def get_metadata():
2026-01-25 18:02:20 +01:00
"""Gibt alle Metadata zurück"""
query = "select * from metadata"
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
return format_questdb_response(data)
@app.get("/api/summary")
2026-01-25 18:02:20 +01:00
async def get_summary():
2026-01-25 17:46:47 +01:00
"""
2026-01-25 18:02:20 +01:00
Gibt Zusammenfassung zurück. Nutzt analytics_daily_summary für total_trades (alle Trades).
"""
# Hole Gesamtzahl aller Trades aus analytics_daily_summary
query = """
select
sum(total_trades) as total_trades,
sum(total_volume) as total_volume
from analytics_daily_summary
2026-01-25 17:46:47 +01:00
"""
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
if data and data.get('dataset') and data['dataset']:
total_trades = data['dataset'][0][0] if data['dataset'][0][0] else 0
total_volume = data['dataset'][0][1] if data['dataset'][0][1] else 0.0
# Formatiere für Kompatibilität
return {
'columns': [
{'name': 'continent'},
{'name': 'trade_count'},
{'name': 'total_volume'}
],
'dataset': [['All', total_trades, total_volume]]
}
# Fallback: Original Query
query = """
select
coalesce(m.continent, 'Unknown') as continent,
count(*) as trade_count,
sum(t.price * t.quantity) as total_volume
from trades t
left join metadata m on t.isin = m.isin
group by continent
"""
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
return format_questdb_response(data)
@app.get("/api/statistics/total-trades")
async def get_total_trades():
"""Gibt Gesamtzahl aller Trades zurück (aus analytics_daily_summary)"""
query = "select sum(total_trades) as total from analytics_daily_summary"
data = query_questdb(query)
if data and data.get('dataset') and data['dataset']:
total = data['dataset'][0][0] if data['dataset'][0][0] else 0
return {'total_trades': total}
return {'total_trades': 0}
@app.get("/api/statistics/moving-average")
async def get_moving_average(days: int = 7, exchange: str = None):
"""
Gibt Moving Average Daten für Tradezahlen und Volumen je Exchange zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
query = f"""
select
timestamp as date,
exchange,
trade_count,
volume,
ma{days}_count as ma_count,
ma{days}_volume as ma_volume
from analytics_exchange_daily
where timestamp >= dateadd('d', -{days}, now())
"""
if exchange:
query += f" and exchange = '{exchange}'"
query += " order by date asc, exchange asc"
data = query_questdb(query, timeout=5)
return format_questdb_response(data)
@app.get("/api/statistics/volume-changes")
async def get_volume_changes(days: int = 7):
"""
Gibt Änderungen in Volumen und Anzahl je Exchange zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
query = f"""
select
timestamp as date,
exchange,
trade_count,
volume,
count_change_pct,
volume_change_pct,
trend
from analytics_volume_changes
where period_days = {days}
and timestamp >= dateadd('d', -{days}, now())
order by date desc, exchange asc
"""
data = query_questdb(query, timeout=5)
return format_questdb_response(data)
@app.get("/api/statistics/stock-trends")
async def get_stock_trends(days: int = 7, limit: int = 20):
"""
Gibt Trendanalyse für häufig gehandelte Aktien zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
query = f"""
select
timestamp as date,
isin,
trade_count,
volume,
count_change_pct,
volume_change_pct
from analytics_stock_trends
where period_days = {days}
and timestamp >= dateadd('d', -{days}, now())
order by volume desc
limit {limit}
"""
data = query_questdb(query, timeout=5)
return format_questdb_response(data)
@app.get("/api/analytics")
async def get_analytics(
metric: str = "volume",
group_by: str = "day",
sub_group_by: str = None,
date_from: str = None,
date_to: str = None,
isins: str = None,
continents: str = None
):
2026-01-25 18:02:20 +01:00
"""Analytics Endpunkt für Report Builder"""
2026-01-25 16:54:34 +01:00
composite_keys = ["exchange_continent", "exchange_sector"]
2026-01-25 13:03:08 +01:00
needs_metadata = any([
2026-01-25 16:54:34 +01:00
group_by in ["name", "continent", "sector"] + composite_keys,
sub_group_by in ["name", "continent", "sector"] + composite_keys,
2026-01-25 13:03:08 +01:00
continents is not None
])
t_prefix = "t." if needs_metadata else ""
m_prefix = "m." if needs_metadata else ""
metrics_map = {
2026-01-25 13:03:08 +01:00
"volume": f"sum({t_prefix}price * {t_prefix}quantity)",
"count": f"count(*)",
"avg_price": f"avg({t_prefix}price)"
}
groups_map = {
2026-01-25 13:03:08 +01:00
"day": f"date_trunc('day', {t_prefix}timestamp)",
"month": f"date_trunc('month', {t_prefix}timestamp)",
"exchange": f"{t_prefix}exchange",
"isin": f"{t_prefix}isin",
"name": f"coalesce({m_prefix}name, {t_prefix}isin)" if needs_metadata else "isin",
"continent": f"coalesce({m_prefix}continent, 'Unknown')" if needs_metadata else "'Unknown'",
2026-01-25 16:54:34 +01:00
"sector": f"coalesce({m_prefix}sector, 'Unknown')" if needs_metadata else "'Unknown'",
"exchange_continent": f"concat({t_prefix}exchange, ' - ', coalesce({m_prefix}continent, 'Unknown'))" if needs_metadata else "'Unknown'",
"exchange_sector": f"concat({t_prefix}exchange, ' - ', coalesce({m_prefix}sector, 'Unknown'))" if needs_metadata else "'Unknown'"
}
2026-01-25 17:36:29 +01:00
selected_metric = metrics_map.get(metric, metrics_map["volume"])
selected_group = groups_map.get(group_by, groups_map["day"])
2026-01-25 17:36:29 +01:00
query = f"select {selected_group} as label"
2026-01-25 17:36:29 +01:00
if sub_group_by and sub_group_by in groups_map:
query += f", {groups_map[sub_group_by]} as sub_label"
if metric == 'all':
query += f", count(*) as value_count, sum({t_prefix}price * {t_prefix}quantity) as value_volume from trades"
else:
query += f", {selected_metric} as value from trades"
if needs_metadata:
query += " t left join metadata m on t.isin = m.isin"
2026-01-25 17:36:29 +01:00
query += " where 1=1"
2026-01-25 17:36:29 +01:00
if date_from:
query += f" and {t_prefix}timestamp >= '{date_from}'"
if date_to:
query += f" and {t_prefix}timestamp <= '{date_to}'"
2026-01-25 17:36:29 +01:00
if isins:
isins_list = ",".join([f"'{i.strip()}'" for i in isins.split(",")])
query += f" and {t_prefix}isin in ({isins_list})"
2026-01-25 17:36:29 +01:00
if continents and needs_metadata:
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
query += f" and {m_prefix}continent in ({cont_list})"
2026-01-25 17:36:29 +01:00
query += f" group by {selected_group}"
if sub_group_by and sub_group_by in groups_map:
query += f", {groups_map[sub_group_by]}"
query += " order by label asc"
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
return format_questdb_response(data)
@app.get("/api/metadata/search")
async def search_metadata(q: str):
2026-01-25 18:02:20 +01:00
"""Case-insensitive search for ISIN or Name"""
query = f"select isin, name from metadata where isin ilike '%{q}%' or name ilike '%{q}%' limit 10"
2026-01-25 18:02:20 +01:00
data = query_questdb(query)
return format_questdb_response(data)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)