diff --git a/src/analytics/__pycache__/worker.cpython-313.pyc b/src/analytics/__pycache__/worker.cpython-313.pyc index 9fc3e69..563a8ba 100644 Binary files a/src/analytics/__pycache__/worker.cpython-313.pyc and b/src/analytics/__pycache__/worker.cpython-313.pyc differ diff --git a/src/analytics/worker.py b/src/analytics/worker.py index d1688f5..d825d99 100644 --- a/src/analytics/worker.py +++ b/src/analytics/worker.py @@ -561,12 +561,38 @@ class AnalyticsWorker: """Verarbeitet alle Analytics für einen bestimmten Tag""" logger.info(f"Processing analytics for {date}") - # Custom Analytics (wichtigste Berechnung für Performance) - logger.info(f"Calculating custom analytics for {date}...") - custom_data = self.calculate_custom_analytics_daily(date) - if custom_data: - self.save_analytics_data('analytics_custom', custom_data) - logger.info(f"Saved {len(custom_data)} custom analytics rows for {date}") + # Prüfe ob es Trades für diesen Tag gibt + date_str = date.strftime('%Y-%m-%d') + check_query = f"select count(*) from trades where date_trunc('day', timestamp) = '{date_str}'" + check_data = self.query_questdb(check_query) + trade_count = 0 + if check_data and check_data.get('dataset') and check_data['dataset'][0][0]: + trade_count = check_data['dataset'][0][0] + + if trade_count == 0: + logger.info(f"No trades found for {date}, creating empty analytics entry") + # Erstelle einen leeren Eintrag, damit der Tag als "verarbeitet" gilt + empty_entry = [{ + 'date': date, + 'y_axis': 'volume', + 'group_by': 'exchange', + 'exchange_filter': 'all', + 'x_value': datetime.datetime.combine(date, datetime.time.min).replace(tzinfo=datetime.timezone.utc), + 'group_value': '', + 'y_value': 0 + }] + self.save_analytics_data('analytics_custom', empty_entry) + logger.info(f"Saved empty analytics entry for {date}") + else: + # Custom Analytics (wichtigste Berechnung für Performance) + logger.info(f"Calculating custom analytics for {date} ({trade_count} trades)...") + custom_data = self.calculate_custom_analytics_daily(date) + if custom_data: + self.save_analytics_data('analytics_custom', custom_data) + logger.info(f"Saved {len(custom_data)} custom analytics rows for {date}") + else: + # Auch wenn keine Daten zurückkommen, erstelle leeren Eintrag + logger.warning(f"No custom analytics data returned for {date} despite {trade_count} trades") logger.info(f"Completed processing for {date}") @@ -621,9 +647,29 @@ class AnalyticsWorker: all_dates.append(current) current += datetime.timedelta(days=1) + # Debug: Prüfe das letzte Trade-Datum + query_last = "select max(date_trunc('day', timestamp)) as last_date from trades" + data_last = self.query_questdb(query_last) + last_trade_date = None + if data_last and data_last.get('dataset') and data_last['dataset'][0][0]: + last_date_value = data_last['dataset'][0][0] + if isinstance(last_date_value, str): + last_trade_date = datetime.datetime.fromisoformat(last_date_value.replace('Z', '+00:00')).date() + else: + last_trade_date = datetime.datetime.fromtimestamp(last_date_value / 1000000, tz=datetime.timezone.utc).date() + # Finde fehlende Tage missing_dates = [d for d in all_dates if d not in existing_dates] - logger.info(f"Found {len(missing_dates)} missing dates to calculate (from {len(all_dates)} total dates, first: {first_date}, last: {today})") + + logger.info(f"Date range: first trade={first_date}, last trade={last_trade_date}, today={today}") + logger.info(f"Existing dates in analytics_custom: {len(existing_dates)} dates") + if existing_dates: + existing_sorted = sorted(existing_dates) + logger.info(f" First existing: {existing_sorted[0]}, Last existing: {existing_sorted[-1]}") + logger.info(f"Found {len(missing_dates)} missing dates to calculate (from {len(all_dates)} total dates)") + if missing_dates: + logger.info(f" First missing: {missing_dates[0]}, Last missing: {missing_dates[-1]}") + return sorted(missing_dates) def process_missing_dates(self):