This commit is contained in:
@@ -84,6 +84,23 @@ async def get_trades(isin: str = None, days: int = 7):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
data = query_questdb(query)
|
data = query_questdb(query)
|
||||||
|
|
||||||
|
# Fallback: Wenn analytics_exchange_daily leer ist, berechne direkt aus trades
|
||||||
|
if not data or not data.get('dataset') or len(data.get('dataset', [])) == 0:
|
||||||
|
logger.info(f"analytics_exchange_daily is empty, calculating from trades table")
|
||||||
|
query = f"""
|
||||||
|
select
|
||||||
|
date_trunc('day', timestamp) as date,
|
||||||
|
exchange,
|
||||||
|
count(*) as trade_count,
|
||||||
|
sum(price * quantity) as volume
|
||||||
|
from trades
|
||||||
|
where timestamp >= dateadd('d', -{days}, now())
|
||||||
|
group by date, exchange
|
||||||
|
order by date desc, exchange asc
|
||||||
|
"""
|
||||||
|
data = query_questdb(query)
|
||||||
|
|
||||||
return format_questdb_response(data)
|
return format_questdb_response(data)
|
||||||
|
|
||||||
@app.get("/api/metadata")
|
@app.get("/api/metadata")
|
||||||
@@ -118,6 +135,27 @@ async def get_summary(days: int = None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
data = query_questdb(query)
|
data = query_questdb(query)
|
||||||
|
|
||||||
|
# Fallback: Wenn analytics_daily_summary leer ist, berechne direkt aus trades
|
||||||
|
if not data or not data.get('dataset') or not data['dataset']:
|
||||||
|
logger.info(f"analytics_daily_summary is empty, calculating from trades table")
|
||||||
|
if days:
|
||||||
|
query = f"""
|
||||||
|
select
|
||||||
|
count(*) as total_trades,
|
||||||
|
sum(price * quantity) as total_volume
|
||||||
|
from trades
|
||||||
|
where timestamp >= dateadd('d', -{days}, now())
|
||||||
|
"""
|
||||||
|
else:
|
||||||
|
query = """
|
||||||
|
select
|
||||||
|
count(*) as total_trades,
|
||||||
|
sum(price * quantity) as total_volume
|
||||||
|
from trades
|
||||||
|
"""
|
||||||
|
data = query_questdb(query)
|
||||||
|
|
||||||
if data and data.get('dataset') and data['dataset']:
|
if data and data.get('dataset') and data['dataset']:
|
||||||
total_trades = data['dataset'][0][0] if data['dataset'][0][0] else 0
|
total_trades = data['dataset'][0][0] if data['dataset'][0][0] else 0
|
||||||
total_volume = data['dataset'][0][1] if data['dataset'][0][1] else 0.0
|
total_volume = data['dataset'][0][1] if data['dataset'][0][1] else 0.0
|
||||||
@@ -337,6 +375,27 @@ async def get_moving_average(days: int = 7, exchange: str = None):
|
|||||||
query += " order by date asc, exchange asc"
|
query += " order by date asc, exchange asc"
|
||||||
|
|
||||||
data = query_questdb(query, timeout=5)
|
data = query_questdb(query, timeout=5)
|
||||||
|
|
||||||
|
# Fallback: Wenn analytics_exchange_daily leer ist, berechne direkt aus trades
|
||||||
|
if not data or not data.get('dataset') or len(data.get('dataset', [])) == 0:
|
||||||
|
logger.info(f"analytics_exchange_daily is empty, calculating moving average from trades table")
|
||||||
|
# Berechne Moving Average direkt aus trades (vereinfacht, ohne echte MA-Berechnung)
|
||||||
|
query = f"""
|
||||||
|
select
|
||||||
|
date_trunc('day', timestamp) as date,
|
||||||
|
exchange,
|
||||||
|
count(*) as trade_count,
|
||||||
|
sum(price * quantity) as volume,
|
||||||
|
count(*) as ma_count,
|
||||||
|
sum(price * quantity) as ma_volume
|
||||||
|
from trades
|
||||||
|
where timestamp >= dateadd('d', -{days}, now())
|
||||||
|
"""
|
||||||
|
if exchange:
|
||||||
|
query += f" and exchange = '{exchange}'"
|
||||||
|
query += " group by date, exchange order by date asc, exchange asc"
|
||||||
|
data = query_questdb(query, timeout=10)
|
||||||
|
|
||||||
return format_questdb_response(data)
|
return format_questdb_response(data)
|
||||||
|
|
||||||
@app.get("/api/statistics/volume-changes")
|
@app.get("/api/statistics/volume-changes")
|
||||||
|
|||||||
@@ -613,17 +613,17 @@ class AnalyticsWorker:
|
|||||||
# Hole bereits berechnete Daten
|
# Hole bereits berechnete Daten
|
||||||
existing_dates = self.get_existing_dates('analytics_custom')
|
existing_dates = self.get_existing_dates('analytics_custom')
|
||||||
|
|
||||||
# Generiere alle Tage vom ersten Trade bis gestern
|
# Generiere alle Tage vom ersten Trade bis heute
|
||||||
yesterday = datetime.date.today() - datetime.timedelta(days=1)
|
today = datetime.date.today()
|
||||||
all_dates = []
|
all_dates = []
|
||||||
current = first_date
|
current = first_date
|
||||||
while current <= yesterday:
|
while current <= today:
|
||||||
all_dates.append(current)
|
all_dates.append(current)
|
||||||
current += datetime.timedelta(days=1)
|
current += datetime.timedelta(days=1)
|
||||||
|
|
||||||
# Finde fehlende Tage
|
# Finde fehlende Tage
|
||||||
missing_dates = [d for d in all_dates if d not in existing_dates]
|
missing_dates = [d for d in all_dates if d not in existing_dates]
|
||||||
logger.info(f"Found {len(missing_dates)} missing dates to calculate (from {len(all_dates)} total dates)")
|
logger.info(f"Found {len(missing_dates)} missing dates to calculate (from {len(all_dates)} total dates, first: {first_date}, last: {today})")
|
||||||
return sorted(missing_dates)
|
return sorted(missing_dates)
|
||||||
|
|
||||||
def process_missing_dates(self):
|
def process_missing_dates(self):
|
||||||
|
|||||||
Reference in New Issue
Block a user