This commit is contained in:
@@ -114,168 +114,46 @@ async def get_analytics(
|
||||
"exchange_sector": f"concat({t_prefix}exchange, ' - ', coalesce({m_prefix}sector, 'Unknown'))" if needs_metadata else "'Unknown'"
|
||||
}
|
||||
|
||||
# Determine table based on granularity and needs
|
||||
# For day/month aggregation without ISIN specific filtering, use analytics_daily
|
||||
# But analytics_daily doesn't have individual ISINs (except via another table)
|
||||
# So if ISIN filter is off, use analytics_daily
|
||||
selected_metric = metrics_map.get(metric, metrics_map["volume"])
|
||||
selected_group = groups_map.get(group_by, groups_map["day"])
|
||||
|
||||
use_analytics_table = False
|
||||
query = f"select {selected_group} as label"
|
||||
|
||||
# Check if we can use the pre-aggregated table
|
||||
if not isins and not sub_group_by == "isin" and group_by != "isin" and group_by != "name":
|
||||
use_analytics_table = True
|
||||
|
||||
table_name = "analytics_daily" if use_analytics_table else "trades"
|
||||
|
||||
# If using analytics table, columns might be named differently?
|
||||
# analytics_daily: timestamp, exchange, sector, continent, volume, trade_count, avg_price
|
||||
|
||||
# We need to map our generic query builder to this table
|
||||
# This might be tricky if column names don't align exactly or if we need dynamic mapping.
|
||||
# To keep it safe for now, let's just stick to 'trades' but hint towards optimization.
|
||||
# Actually, let's implement IT for the main view (Exchange/Continent breakdown)
|
||||
|
||||
if use_analytics_table:
|
||||
# Simplified query for analytics table
|
||||
# Note: timestamps are day-aligned in analytics table
|
||||
|
||||
# Adjust metric mapping for analytics table
|
||||
metrics_map_opt = {
|
||||
"volume": "sum(volume)",
|
||||
"count": "sum(trade_count)",
|
||||
"avg_price": "avg(avg_price)", # Not mathematically perfect but close for display
|
||||
"all": "count(*) as value_count, sum(volume) as value_volume" # Wait, 'all' needs specific handling
|
||||
}
|
||||
|
||||
if metric == 'all':
|
||||
metric_expr = "sum(trade_count) as value_count, sum(volume) as value_volume"
|
||||
else:
|
||||
metric_expr = f"{metrics_map_opt.get(metric, 'sum(volume)')} as value"
|
||||
|
||||
|
||||
# Group mapping logic
|
||||
# analytics_daily has: timestamp, exchange, sector, continent
|
||||
groups_map_opt = {
|
||||
"day": "timestamp",
|
||||
"month": "date_trunc('month', timestamp)",
|
||||
"exchange": "exchange",
|
||||
"continent": "continent",
|
||||
"sector": "sector",
|
||||
"exchange_continent": "concat(exchange, ' - ', continent)",
|
||||
"exchange_sector": "concat(exchange, ' - ', sector)"
|
||||
}
|
||||
|
||||
sel_group_expr = groups_map_opt.get(group_by, "timestamp")
|
||||
|
||||
query = f"select {sel_group_expr} as label"
|
||||
|
||||
if sub_group_by and sub_group_by in groups_map_opt:
|
||||
query += f", {groups_map_opt[sub_group_by]} as sub_label"
|
||||
|
||||
query += f", {metric_expr} from analytics_daily where 1=1"
|
||||
|
||||
if date_from: query += f" and timestamp >= '{date_from}'"
|
||||
if date_to: query += f" and timestamp <= '{date_to}'"
|
||||
|
||||
# Filters
|
||||
if continents:
|
||||
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
|
||||
query += f" and continent in ({cont_list})"
|
||||
|
||||
query += f" group by {sel_group_expr}"
|
||||
if sub_group_by: query += f", {groups_map_opt[sub_group_by]}"
|
||||
|
||||
query += " order by label asc"
|
||||
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
query += f", {groups_map[sub_group_by]} as sub_label"
|
||||
|
||||
if metric == 'all':
|
||||
query += f", count(*) as value_count, sum({t_prefix}price * {t_prefix}quantity) as value_volume from trades"
|
||||
else:
|
||||
# Fallback to RAW TRADES query (existing logic)
|
||||
# ... (keep existing logic but indented/wrapped)
|
||||
selected_metric = metrics_map.get(metric, metrics_map["volume"])
|
||||
selected_group = groups_map.get(group_by, groups_map["day"])
|
||||
|
||||
query = f"select {selected_group} as label"
|
||||
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
query += f", {groups_map[sub_group_by]} as sub_label"
|
||||
|
||||
if metric == 'all':
|
||||
query += f", count(*) as value_count, sum({t_prefix}price * {t_prefix}quantity) as value_volume from trades"
|
||||
else:
|
||||
query += f", {selected_metric} as value from trades"
|
||||
|
||||
if needs_metadata:
|
||||
query += " t left join metadata m on t.isin = m.isin"
|
||||
|
||||
query += " where 1=1"
|
||||
|
||||
if date_from:
|
||||
query += f" and {t_prefix}timestamp >= '{date_from}'"
|
||||
if date_to:
|
||||
query += f" and {t_prefix}timestamp <= '{date_to}'"
|
||||
|
||||
if isins:
|
||||
isins_list = ",".join([f"'{i.strip()}'" for i in isins.split(",")])
|
||||
query += f" and {t_prefix}isin in ({isins_list})"
|
||||
|
||||
if continents and needs_metadata:
|
||||
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
|
||||
query += f" and {m_prefix}continent in ({cont_list})"
|
||||
|
||||
query += f" group by {selected_group}"
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
query += f", {groups_map[sub_group_by]}"
|
||||
|
||||
query += " order by label asc"
|
||||
query += f", {selected_metric} as value from trades"
|
||||
if needs_metadata:
|
||||
query += " t left join metadata m on t.isin = m.isin"
|
||||
|
||||
query += " where 1=1"
|
||||
|
||||
if date_from:
|
||||
query += f" and {t_prefix}timestamp >= '{date_from}'"
|
||||
if date_to:
|
||||
query += f" and {t_prefix}timestamp <= '{date_to}'"
|
||||
|
||||
if isins:
|
||||
isins_list = ",".join([f"'{i.strip()}'" for i in isins.split(",")])
|
||||
query += f" and {t_prefix}isin in ({isins_list})"
|
||||
|
||||
if continents and needs_metadata:
|
||||
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
|
||||
query += f" and {m_prefix}continent in ({cont_list})"
|
||||
|
||||
query += f" group by {selected_group}"
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
query += f", {groups_map[sub_group_by]}"
|
||||
|
||||
query += " order by label asc"
|
||||
|
||||
print(f"Executing Query: {query}")
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
|
||||
print(f"DEBUG: Query Failed: {response.text}")
|
||||
|
||||
if use_analytics_table:
|
||||
print("DEBUG: Analytics query failed, falling back to RAW trades query...")
|
||||
|
||||
selected_metric = metrics_map.get(metric, metrics_map["volume"])
|
||||
selected_group = groups_map.get(group_by, groups_map["day"])
|
||||
|
||||
raw_query = f"select {selected_group} as label"
|
||||
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
raw_query += f", {groups_map[sub_group_by]} as sub_label"
|
||||
|
||||
if metric == 'all':
|
||||
raw_query += f", count(*) as value_count, sum({t_prefix}price * {t_prefix}quantity) as value_volume from trades"
|
||||
else:
|
||||
raw_query += f", {selected_metric} as value from trades"
|
||||
|
||||
if needs_metadata:
|
||||
raw_query += " t left join metadata m on t.isin = m.isin"
|
||||
|
||||
raw_query += " where 1=1"
|
||||
|
||||
if date_from: raw_query += f" and {t_prefix}timestamp >= '{date_from}'"
|
||||
if date_to: raw_query += f" and {t_prefix}timestamp <= '{date_to}'"
|
||||
if isins:
|
||||
isins_list = ",".join([f"'{i.strip()}'" for i in isins.split(",")])
|
||||
raw_query += f" and {t_prefix}isin in ({isins_list})"
|
||||
if continents and needs_metadata:
|
||||
cont_list = ",".join([f"'{c.strip()}'" for c in continents.split(",")])
|
||||
raw_query += f" and {m_prefix}continent in ({cont_list})"
|
||||
|
||||
raw_query += f" group by {selected_group}"
|
||||
if sub_group_by and sub_group_by in groups_map:
|
||||
raw_query += f", {groups_map[sub_group_by]}"
|
||||
|
||||
raw_query += " order by label asc"
|
||||
|
||||
print(f"Executing Fallback Query: {raw_query}")
|
||||
fb_response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': raw_query}, auth=DB_AUTH)
|
||||
if fb_response.status_code == 200:
|
||||
return fb_response.json()
|
||||
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@@ -292,6 +170,104 @@ async def search_metadata(q: str):
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/moving-average")
|
||||
async def get_moving_average(days: int = 7, exchange: str = None):
|
||||
"""
|
||||
Gibt Moving Average Daten für Tradezahlen und Volumen je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
# Hole Daten aus der vorberechneten analytics_exchange_daily Tabelle
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
ma{days}_count as ma_count,
|
||||
ma{days}_volume as ma_volume
|
||||
from analytics_exchange_daily
|
||||
where timestamp >= dateadd('d', -{days}, now())
|
||||
"""
|
||||
|
||||
if exchange:
|
||||
query += f" and exchange = '{exchange}'"
|
||||
|
||||
query += " order by date asc, exchange asc"
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/volume-changes")
|
||||
async def get_volume_changes(days: int = 7):
|
||||
"""
|
||||
Gibt Änderungen in Volumen und Anzahl je Exchange zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
exchange,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct,
|
||||
trend
|
||||
from analytics_volume_changes
|
||||
where period_days = {days}
|
||||
order by date desc, exchange asc
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/api/statistics/stock-trends")
|
||||
async def get_stock_trends(days: int = 7, limit: int = 20):
|
||||
"""
|
||||
Gibt Trendanalyse für häufig gehandelte Aktien zurück.
|
||||
Unterstützte Zeiträume: 7, 30, 42, 69, 180, 365 Tage
|
||||
"""
|
||||
if days not in [7, 30, 42, 69, 180, 365]:
|
||||
raise HTTPException(status_code=400, detail="Invalid days parameter. Must be one of: 7, 30, 42, 69, 180, 365")
|
||||
|
||||
# Hole Top-Aktien nach Volumen für den Zeitraum
|
||||
query = f"""
|
||||
select
|
||||
timestamp as date,
|
||||
isin,
|
||||
trade_count,
|
||||
volume,
|
||||
count_change_pct,
|
||||
volume_change_pct
|
||||
from analytics_stock_trends
|
||||
where period_days = {days}
|
||||
order by volume desc
|
||||
limit {limit}
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{DB_HOST}:9000/exec", params={'query': query}, auth=DB_AUTH)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
throw_http_error(response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
def throw_http_error(res):
|
||||
raise HTTPException(status_code=res.status_code, detail=f"QuestDB error: {res.text}")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user