Files
trading-daemon/dashboard/server.py
Melchior Reimers 64ffd9aa32
All checks were successful
Deployment / deploy-docker (push) Successful in 9s
updated
2026-01-25 17:46:47 +01:00

321 lines
12 KiB
Python

from fastapi import FastAPI, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import requests
import os
import pandas as pd
app = FastAPI(title="Trading Dashboard API")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# Serve static files
app.mount("/static", StaticFiles(directory="dashboard/public"), name="static")
@app.get("/")
async def read_index():
return FileResponse('dashboard/public/index.html')
DB_USER = os.getenv("DB_USER", "admin")
DB_PASSWORD = os.getenv("DB_PASSWORD", "quest")
DB_AUTH = (DB_USER, DB_PASSWORD) if DB_USER and DB_PASSWORD else None
DB_HOST = os.getenv("DB_HOST", "questdb")
@app.get("/api/trades")
async def get_trades(isin: str = None, days: int = 7, limit: int = 1000):
"""
Gibt Trades zurück. Standardmäßig limitiert auf 1000 für Performance.
Für Dashboard-Übersicht werden nur die neuesten Trades benötigt.
"""
query = f"select * from trades where timestamp > dateadd('d', -{days}, now())"
if isin:
query += f" and isin = '{isin}'"
query += f" order by timestamp desc limit {limit}"
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/metadata")
async def get_metadata():
query = "select * from metadata"
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/summary")
async def get_summary(days: int = 7):
"""
Gibt Zusammenfassung zurück. Optimiert für schnelle Abfrage.
Falls vorberechnete Daten verfügbar sind, verwende diese.
"""
# Versuche zuerst, aus analytics_exchange_daily zu aggregieren (schneller)
# Falls das nicht funktioniert, falle auf die ursprüngliche Query zurück
try:
# Aggregiere aus analytics_exchange_daily für die letzten N Tage
# Dies ist schneller als eine JOIN-Query auf alle Trades
query = f"""
select
'All' as continent,
sum(trade_count) as trade_count,
sum(volume) as total_volume
from analytics_exchange_daily
where timestamp >= dateadd('d', -{days}, now())
"""
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
if response.status_code == 200:
data = response.json()
# Wenn Daten vorhanden, verwende diese
if data.get('dataset') and len(data['dataset']) > 0:
# Formatiere für Kompatibilität mit dem Frontend
result = {
'columns': [
{'name': 'continent'},
{'name': 'trade_count'},
{'name': 'total_volume'}
],
'dataset': [[row[0], row[1], row[2]] for row in data['dataset']]
}
return result
except Exception:
# Fallback auf ursprüngliche Query
pass
# Fallback: Original Query mit Limit für Performance
query = f"""
select
coalesce(m.continent, 'Unknown') as continent,
count(*) as trade_count,
sum(t.price * t.quantity) as total_volume
from trades t
left join metadata m on t.isin = m.isin
where t.timestamp > dateadd('d', -{days}, now())
group by continent
"""
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=10)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/analytics")
async def get_analytics(
metric: str = "volume",
group_by: str = "day",
sub_group_by: str = None,
date_from: str = None,
date_to: str = None,
isins: str = None,
continents: str = None
):
# Determine if we need to join metadata
# Determine if we need to join metadata
composite_keys = ["exchange_continent", "exchange_sector"]
needs_metadata = any([
group_by in ["name", "continent", "sector"] + composite_keys,
sub_group_by in ["name", "continent", "sector"] + composite_keys,
continents is not None
])
# Use prefixes only if joining
t_prefix = "t." if needs_metadata else ""
m_prefix = "m." if needs_metadata else ""
metrics_map = {
"volume": f"sum({t_prefix}price * {t_prefix}quantity)",
"count": f"count(*)",
"avg_price": f"avg({t_prefix}price)"
}
groups_map = {
"day": f"date_trunc('day', {t_prefix}timestamp)",
"month": f"date_trunc('month', {t_prefix}timestamp)",
"exchange": f"{t_prefix}exchange",
"isin": f"{t_prefix}isin",
"name": f"coalesce({m_prefix}name, {t_prefix}isin)" if needs_metadata else "isin",
"continent": f"coalesce({m_prefix}continent, 'Unknown')" if needs_metadata else "'Unknown'",
"sector": f"coalesce({m_prefix}sector, 'Unknown')" if needs_metadata else "'Unknown'",
"exchange_continent": f"concat({t_prefix}exchange, ' - ', coalesce({m_prefix}continent, 'Unknown'))" if needs_metadata else "'Unknown'",
"exchange_sector": f"concat({t_prefix}exchange, ' - ', coalesce({m_prefix}sector, 'Unknown'))" if needs_metadata else "'Unknown'"
}
selected_metric = metrics_map.get(metric, metrics_map["volume"])
selected_group = groups_map.get(group_by, groups_map["day"])
query = f"select {selected_group} as label"
if sub_group_by and sub_group_by in groups_map:
query += f", {groups_map[sub_group_by]} as sub_label"
if metric == 'all':
query += f", count(*) as value_count, sum({t_prefix}price * {t_prefix}quantity) as value_volume from trades"
else:
query += f", {selected_metric} as value from trades"
if needs_metadata:
query += " t left join metadata m on t.isin = m.isin"
query += " where 1=1"
if date_from:
query += f" and {t_prefix}timestamp >= '{date_from}'"
if date_to:
query += f" and {t_prefix}timestamp <= '{date_to}'"
if isins:
isins_list = ",".join([f"'{i.strip()}'" for i in isins.split(",")])
query += f" and {t_prefix}isin in ({isins_list})"
if continents and needs_metadata:
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
query += f" and {m_prefix}continent in ({cont_list})"
query += f" group by {selected_group}"
if sub_group_by and sub_group_by in groups_map:
query += f", {groups_map[sub_group_by]}"
query += " order by label asc"
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/metadata/search")
async def search_metadata(q: str):
# Case-insensitive search for ISIN or Name
query = f"select isin, name from metadata where isin ilike '%{q}%' or name ilike '%{q}%' limit 10"
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/statistics/moving-average")
async def get_moving_average(days: int = 7, exchange: str = None):
"""
Gibt Moving Average Daten für Tradezahlen und Volumen je Exchange zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
Verwendet vorberechnete Daten aus analytics_exchange_daily für schnelle Antwortzeiten.
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
# Hole Daten aus der vorberechneten analytics_exchange_daily Tabelle
query = f"""
select
timestamp as date,
exchange,
trade_count,
volume,
ma{days}_count as ma_count,
ma{days}_volume as ma_volume
from analytics_exchange_daily
where timestamp >= dateadd('d', -{days}, now())
"""
if exchange:
query += f" and exchange = '{exchange}'"
query += " order by date asc, exchange asc"
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/statistics/volume-changes")
async def get_volume_changes(days: int = 7):
"""
Gibt Änderungen in Volumen und Anzahl je Exchange zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
Verwendet vorberechnete Daten aus analytics_volume_changes für schnelle Antwortzeiten.
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
query = f"""
select
timestamp as date,
exchange,
trade_count,
volume,
count_change_pct,
volume_change_pct,
trend
from analytics_volume_changes
where period_days = {days}
order by date desc, exchange asc
"""
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/statistics/stock-trends")
async def get_stock_trends(days: int = 7, limit: int = 20):
"""
Gibt Trendanalyse für häufig gehandelte Aktien zurück.
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
Verwendet vorberechnete Daten aus analytics_stock_trends für schnelle Antwortzeiten.
"""
if days not in [7, 30, 42, 69, 180, 365]:
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
# Hole Top-Aktien nach Volumen für den Zeitraum
query = f"""
select
timestamp as date,
isin,
trade_count,
volume,
count_change_pct,
volume_change_pct
from analytics_stock_trends
where period_days = {days}
order by volume desc
limit {limit}
"""
try:
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
if response.status_code == 200:
return response.json()
throw_http_error(response)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
def throw_http_error(res):
raise HTTPException(status_code=res.status_code, detail=f"QuestDB error: {res.text}")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)