This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,10 @@
|
||||
from fastapi import FastAPI, HTTPException, Depends
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
import requests
|
||||
import os
|
||||
import pandas as pd
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
app = FastAPI(title="Trading Dashboard API")
|
||||
|
||||
@@ -22,97 +22,210 @@ app.mount("/static", StaticFiles(directory="dashboard/public"), name="static")
|
||||
async def read_index():
|
||||
return FileResponse('dashboard/public/index.html')
|
||||
|
||||
# QuestDB Konfiguration
|
||||
DB_USER = os.getenv("DB_USER", "admin")
|
||||
DB_PASSWORD = os.getenv("DB_PASSWORD", "quest")
|
||||
DB_AUTH = (DB_USER, DB_PASSWORD) if DB_USER and DB_PASSWORD else None
|
||||
DB_HOST = os.getenv("DB_HOST", "questdb")
|
||||
DB_URL = f"http://{DB_HOST}:9000"
|
||||
|
||||
@app.get("/api/trades")
|
||||
async def get_trades(isin: str = None, days: int = 7, limit: int = 1000):
|
||||
"""
|
||||
Gibt Trades zurück. Standardmäßig limitiert auf 1000 für Performance.
|
||||
Für Dashboard-Übersicht werden nur die neuesten Trades benötigt.
|
||||
"""
|
||||
query = f"select * from trades where timestamp > dateadd('d', -{days}, now())"
|
||||
if isin:
|
||||
query += f" and isin = '{isin}'"
|
||||
query += f" order by timestamp desc limit {limit}"
|
||||
|
||||
# Hilfsfunktionen
|
||||
def query_questdb(query: str, timeout: int = 10) -> Optional[Dict[str, Any]]:
|
||||
"""Zentrale QuestDB-Abfrage-Funktion"""
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
response = requests.get(f"{DB_URL}/exec", params={'query': query}, auth=DB_AUTH, timeout=timeout)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
else:
|
||||
raise HTTPException(status_code=response.status_code, detail=f"QuestDB error: {response.text}")
|
||||
except requests.exceptions.Timeout:
|
||||
raise HTTPException(status_code=504, detail="QuestDB query timeout")
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise HTTPException(status_code=500, detail=f"QuestDB connection error: {str(e)}")
|
||||
|
||||
def format_questdb_response(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Einheitliche Formatierung der QuestDB-Antworten"""
|
||||
if not data:
|
||||
return {'columns': [], 'dataset': []}
|
||||
return data
|
||||
|
||||
# API Endpunkte
|
||||
@app.get("/api/trades")
|
||||
async def get_trades(isin: str = None, days: int = 7):
|
||||
"""
|
||||
Gibt aggregierte Analyse aller Trades zurück (nicht einzelne Trades).
|
||||
Nutzt vorberechnete Daten aus analytics_exchange_daily.
|
||||
"""
|
||||
if isin:
|
||||
# Für spezifische ISIN: hole aus trades Tabelle
|
||||
query = f"""
|
||||
select
|
||||
date_trunc('day', timestamp) as date,
|
||||
count(*) as trade_count,
|
||||
sum(price * quantity) as volume,
|
||||
avg(price) as avg_price
|
||||
from trades
|
||||
where isin = '{isin}'
|
||||
and timestamp > dateadd('d', -{days}, now())
|
||||
group by date
|
||||
order by date desc
|
||||
"""
|
||||
else:
|
||||
# Aggregierte Daten aus analytics_exchange_daily
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume
|
||||
from analytics_exchange_daily
|
||||
where timestamp >= dateadd('d', -{days}, now())
|
||||
order by date desc, exchange asc
|
||||
"""
|
||||
|
||||
data = query_questdb(query)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/metadata")
|
||||
async def get_metadata():
|
||||
"""Gibt alle Metadata zurück"""
|
||||
query = "select * from metadata"
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
data = query_questdb(query)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/summary")
|
||||
async def get_summary(days: int = 7):
|
||||
async def get_summary():
|
||||
"""
|
||||
Gibt Zusammenfassung zurück. Optimiert für schnelle Abfrage.
|
||||
Falls vorberechnete Daten verfügbar sind, verwende diese.
|
||||
Gibt Zusammenfassung zurück. Nutzt analytics_daily_summary für total_trades (alle Trades).
|
||||
"""
|
||||
# Hole Gesamtzahl aller Trades aus analytics_daily_summary
|
||||
query = """
|
||||
select
|
||||
sum(total_trades) as total_trades,
|
||||
sum(total_volume) as total_volume
|
||||
from analytics_daily_summary
|
||||
"""
|
||||
# Versuche zuerst, aus analytics_exchange_daily zu aggregieren (schneller)
|
||||
# Falls das nicht funktioniert, falle auf die ursprüngliche Query zurück
|
||||
try:
|
||||
# Aggregiere aus analytics_exchange_daily für die letzten N Tage
|
||||
# Dies ist schneller als eine JOIN-Query auf alle Trades
|
||||
query = f"""
|
||||
select
|
||||
'All' as continent,
|
||||
sum(trade_count) as trade_count,
|
||||
sum(volume) as total_volume
|
||||
from analytics_exchange_daily
|
||||
where timestamp >= dateadd('d', -{days}, now())
|
||||
"""
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
# Wenn Daten vorhanden, verwende diese
|
||||
if data.get('dataset') and len(data['dataset']) > 0:
|
||||
# Formatiere für Kompatibilität mit dem Frontend
|
||||
result = {
|
||||
'columns': [
|
||||
{'name': 'continent'},
|
||||
{'name': 'trade_count'},
|
||||
{'name': 'total_volume'}
|
||||
],
|
||||
'dataset': [[row[0], row[1], row[2]] for row in data['dataset']]
|
||||
}
|
||||
return result
|
||||
except Exception:
|
||||
# Fallback auf ursprüngliche Query
|
||||
pass
|
||||
|
||||
# Fallback: Original Query mit Limit für Performance
|
||||
query = f"""
|
||||
data = query_questdb(query)
|
||||
if data and data.get('dataset') and data['dataset']:
|
||||
total_trades = data['dataset'][0][0] if data['dataset'][0][0] else 0
|
||||
total_volume = data['dataset'][0][1] if data['dataset'][0][1] else 0.0
|
||||
|
||||
# Formatiere für Kompatibilität
|
||||
return {
|
||||
'columns': [
|
||||
{'name': 'continent'},
|
||||
{'name': 'trade_count'},
|
||||
{'name': 'total_volume'}
|
||||
],
|
||||
'dataset': [['All', total_trades, total_volume]]
|
||||
}
|
||||
|
||||
# Fallback: Original Query
|
||||
query = """
|
||||
select
|
||||
coalesce(m.continent, 'Unknown') as continent,
|
||||
count(*) as trade_count,
|
||||
sum(t.price * t.quantity) as total_volume
|
||||
from trades t
|
||||
left join metadata m on t.isin = m.isin
|
||||
where t.timestamp > dateadd('d', -{days}, now())
|
||||
group by continent
|
||||
"""
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=10)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
data = query_questdb(query)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/statistics/total-trades")
|
||||
async def get_total_trades():
|
||||
"""Gibt Gesamtzahl aller Trades zurück (aus analytics_daily_summary)"""
|
||||
query = "select sum(total_trades) as total from analytics_daily_summary"
|
||||
data = query_questdb(query)
|
||||
if data and data.get('dataset') and data['dataset']:
|
||||
total = data['dataset'][0][0] if data['dataset'][0][0] else 0
|
||||
return {'total_trades': total}
|
||||
return {'total_trades': 0}
|
||||
|
||||
@app.get("/api/statistics/moving-average")
|
||||
async def get_moving_average(days: int = 7, exchange: str = None):
|
||||
"""
|
||||
Gibt Moving Average Daten für Tradezahlen und Volumen je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
ma{days}_count as ma_count,
|
||||
ma{days}_volume as ma_volume
|
||||
from analytics_exchange_daily
|
||||
where timestamp >= dateadd('d', -{days}, now())
|
||||
"""
|
||||
|
||||
if exchange:
|
||||
query += f" and exchange = '{exchange}'"
|
||||
|
||||
query += " order by date asc, exchange asc"
|
||||
|
||||
data = query_questdb(query, timeout=5)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/statistics/volume-changes")
|
||||
async def get_volume_changes(days: int = 7):
|
||||
"""
|
||||
Gibt Änderungen in Volumen und Anzahl je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct,
|
||||
trend
|
||||
from analytics_volume_changes
|
||||
where period_days = {days}
|
||||
and timestamp >= dateadd('d', -{days}, now())
|
||||
order by date desc, exchange asc
|
||||
"""
|
||||
|
||||
data = query_questdb(query, timeout=5)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/statistics/stock-trends")
|
||||
async def get_stock_trends(days: int = 7, limit: int = 20):
|
||||
"""
|
||||
Gibt Trendanalyse für häufig gehandelte Aktien zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
isin,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct
|
||||
from analytics_stock_trends
|
||||
where period_days = {days}
|
||||
and timestamp >= dateadd('d', -{days}, now())
|
||||
order by volume desc
|
||||
limit {limit}
|
||||
"""
|
||||
|
||||
data = query_questdb(query, timeout=5)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/analytics")
|
||||
async def get_analytics(
|
||||
@@ -124,8 +237,7 @@ async def get_analytics(
|
||||
isins: str = None,
|
||||
continents: str = None
|
||||
):
|
||||
# Determine if we need to join metadata
|
||||
# Determine if we need to join metadata
|
||||
"""Analytics Endpunkt für Report Builder"""
|
||||
composite_keys = ["exchange_continent", "exchange_sector"]
|
||||
needs_metadata = any([
|
||||
group_by in ["name", "continent", "sector"] + composite_keys,
|
||||
@@ -133,7 +245,6 @@ async def get_analytics(
|
||||
continents is not None
|
||||
])
|
||||
|
||||
# Use prefixes only if joining
|
||||
t_prefix = "t." if needs_metadata else ""
|
||||
m_prefix = "m." if needs_metadata else ""
|
||||
|
||||
@@ -191,129 +302,15 @@ async def get_analytics(
|
||||
|
||||
query += " order by label asc"
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
data = query_questdb(query)
|
||||
return format_questdb_response(data)
|
||||
|
||||
@app.get("/api/metadata/search")
|
||||
async def search_metadata(q: str):
|
||||
# Case-insensitive search for ISIN or Name
|
||||
"""Case-insensitive search for ISIN or Name"""
|
||||
query = f"select isin, name from metadata where isin ilike '%{q}%' or name ilike '%{q}%' limit 10"
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/moving-average")
|
||||
async def get_moving_average(days: int = 7, exchange: str = None):
|
||||
"""
|
||||
Gibt Moving Average Daten für Tradezahlen und Volumen je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
Verwendet vorberechnete Daten aus analytics_exchange_daily für schnelle Antwortzeiten.
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
# Hole Daten aus der vorberechneten analytics_exchange_daily Tabelle
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
ma{days}_count as ma_count,
|
||||
ma{days}_volume as ma_volume
|
||||
from analytics_exchange_daily
|
||||
where timestamp >= dateadd('d', -{days}, now())
|
||||
"""
|
||||
|
||||
if exchange:
|
||||
query += f" and exchange = '{exchange}'"
|
||||
|
||||
query += " order by date asc, exchange asc"
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/volume-changes")
|
||||
async def get_volume_changes(days: int = 7):
|
||||
"""
|
||||
Gibt Änderungen in Volumen und Anzahl je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
Verwendet vorberechnete Daten aus analytics_volume_changes für schnelle Antwortzeiten.
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct,
|
||||
trend
|
||||
from analytics_volume_changes
|
||||
where period_days = {days}
|
||||
order by date desc, exchange asc
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/stock-trends")
|
||||
async def get_stock_trends(days: int = 7, limit: int = 20):
|
||||
"""
|
||||
Gibt Trendanalyse für häufig gehandelte Aktien zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
Verwendet vorberechnete Daten aus analytics_stock_trends für schnelle Antwortzeiten.
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
# Hole Top-Aktien nach Volumen für den Zeitraum
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
isin,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct
|
||||
from analytics_stock_trends
|
||||
where period_days = {days}
|
||||
order by volume desc
|
||||
limit {limit}
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH, timeout=5)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
def throw_http_error(res):
|
||||
raise HTTPException(status_code=res.status_code, detail=f"QuestDB error: {res.text}")
|
||||
data = query_questdb(query)
|
||||
return format_questdb_response(data)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
Reference in New Issue
Block a user