updated
All checks were successful
Deployment / deploy-docker (push) Successful in 16s

This commit is contained in:
Melchior Reimers
2026-01-25 18:38:37 +01:00
parent 03d080b0d9
commit 66c472be05
2 changed files with 88 additions and 12 deletions

View File

@@ -203,14 +203,24 @@
updateUrlParams(); updateUrlParams();
} }
async function loadStatistics() { async function loadStatistics(forceReload = false) {
const days = parseInt(document.getElementById('statisticsPeriod')?.value || '7'); const days = parseInt(document.getElementById('statisticsPeriod')?.value || '7');
// Lade die Graphen (ohne fetchData aufzurufen, um Zucken zu vermeiden)
await Promise.all([ // Beim ersten Laden oder bei expliziter Anforderung: Daten laden
loadMovingAverage(days), if (forceReload || !store.trades || store.trades.length === 0) {
loadVolumeChanges(days), await fetchData(true); // skipCharts = true, damit Graphen nicht doppelt geladen werden
loadStockTrends(days) }
]);
// Lade die Graphen
try {
await Promise.all([
loadMovingAverage(days),
loadVolumeChanges(days),
loadStockTrends(days)
]);
} catch (err) {
console.error('Error loading statistics:', err);
}
updateUrlParams(); updateUrlParams();
} }
@@ -225,7 +235,12 @@
return; return;
} }
const ctx = document.getElementById('movingAverageChart').getContext('2d'); const canvas = document.getElementById('movingAverageChart');
if (!canvas) {
console.error('Canvas element movingAverageChart not found');
return;
}
const ctx = canvas.getContext('2d');
if (charts.movingAverage) charts.movingAverage.destroy(); if (charts.movingAverage) charts.movingAverage.destroy();
const dateIdx = columns.findIndex(c => c.name === 'date' || c.name === 'timestamp'); const dateIdx = columns.findIndex(c => c.name === 'date' || c.name === 'timestamp');
@@ -340,7 +355,12 @@
return; return;
} }
const ctx = document.getElementById('volumeChangesChart').getContext('2d'); const canvas = document.getElementById('volumeChangesChart');
if (!canvas) {
console.error('Canvas element volumeChangesChart not found');
return;
}
const ctx = canvas.getContext('2d');
if (charts.volumeChanges) charts.volumeChanges.destroy(); if (charts.volumeChanges) charts.volumeChanges.destroy();
const exchangeIdx = columns.findIndex(c => c.name === 'exchange'); const exchangeIdx = columns.findIndex(c => c.name === 'exchange');
@@ -425,7 +445,12 @@
return; return;
} }
const ctx = document.getElementById('stockTrendsChart').getContext('2d'); const canvas = document.getElementById('stockTrendsChart');
if (!canvas) {
console.error('Canvas element stockTrendsChart not found');
return;
}
const ctx = canvas.getContext('2d');
const tableContainer = document.getElementById('stockTrendsTable'); const tableContainer = document.getElementById('stockTrendsTable');
if (charts.stockTrends) charts.stockTrends.destroy(); if (charts.stockTrends) charts.stockTrends.destroy();
@@ -747,7 +772,8 @@
fetchData(true); // skipCharts = true fetchData(true); // skipCharts = true
}, 30000); }, 30000);
setTimeout(() => loadStatistics(), 1000); // Lade Statistiken nach kurzer Verzögerung (forceReload = true beim ersten Laden)
setTimeout(() => loadStatistics(true), 1000);
}; };
</script> </script>
</body> </body>

View File

@@ -26,6 +26,31 @@ class AnalyticsWorker:
self.last_processed_timestamp = None self.last_processed_timestamp = None
self.db_url = DB_URL self.db_url = DB_URL
def query_questdb(self, query: str, timeout: int = 30) -> Optional[Dict]:
"""Zentrale QuestDB-Abfrage-Funktion"""
try:
response = requests.get(f"{self.db_url}/exec", params={'query': query}, auth=DB_AUTH, timeout=timeout)
if response.status_code == 200:
data = response.json()
# Prüfe auf Fehler in der JSON-Antwort (z.B. "table does not exist")
if isinstance(data, dict) and 'error' in data:
# Für "table does not exist" Fehler, geben wir None zurück (wird als leere Tabelle behandelt)
if 'does not exist' in str(data.get('error', '')):
return None
logger.debug(f"QuestDB query error: {data.get('error')}")
return None
return data
else:
# Bei 400/404 Fehlern (z.B. Tabelle existiert nicht), geben wir None zurück
if response.status_code in [400, 404]:
logger.debug(f"QuestDB query failed (table may not exist): {response.status_code}")
return None
logger.error(f"QuestDB query failed: {response.status_code} - {response.text}")
return None
except Exception as e:
logger.debug(f"Error querying QuestDB: {e}")
return None
def wait_for_questdb(self, max_retries: int = 30, retry_delay: int = 2): def wait_for_questdb(self, max_retries: int = 30, retry_delay: int = 2):
"""Wartet bis QuestDB verfügbar ist""" """Wartet bis QuestDB verfügbar ist"""
logger.info("Waiting for QuestDB to be available...") logger.info("Waiting for QuestDB to be available...")
@@ -545,6 +570,31 @@ class AnalyticsWorker:
logger.info(f"Completed processing for {date}") logger.info(f"Completed processing for {date}")
def get_existing_dates(self, table_name: str) -> set:
"""Holt alle bereits berechneten Daten aus einer Analytics-Tabelle"""
query = f"select distinct date_trunc('day', timestamp) as date from {table_name}"
data = self.query_questdb(query)
if not data:
# Tabelle existiert noch nicht - das ist OK beim ersten Start
return set()
# Prüfe auf Fehler in der Antwort (z.B. "table does not exist")
if isinstance(data, dict) and 'error' in data:
logger.debug(f"Table {table_name} does not exist yet: {data.get('error')}")
return set()
if not data.get('dataset'):
return set()
dates = set()
for row in data['dataset']:
if row and row[0]:
if isinstance(row[0], str):
dates.add(datetime.datetime.fromisoformat(row[0].replace('Z', '+00:00')).date())
elif isinstance(row[0], (int, float)):
dates.add(datetime.datetime.fromtimestamp(row[0] / 1000000, tz=datetime.timezone.utc).date())
return dates
def get_missing_dates(self) -> List[datetime.date]: def get_missing_dates(self) -> List[datetime.date]:
"""Ermittelt fehlende Tage, die noch berechnet werden müssen""" """Ermittelt fehlende Tage, die noch berechnet werden müssen"""
# Hole das Datum des ersten Trades # Hole das Datum des ersten Trades
@@ -561,7 +611,7 @@ class AnalyticsWorker:
first_date = datetime.datetime.fromtimestamp(first_date_value / 1000000, tz=datetime.timezone.utc).date() first_date = datetime.datetime.fromtimestamp(first_date_value / 1000000, tz=datetime.timezone.utc).date()
# Hole bereits berechnete Daten # Hole bereits berechnete Daten
existing_dates = self.get_existing_dates('analytics_daily_summary') existing_dates = self.get_existing_dates('analytics_custom')
# Generiere alle Tage vom ersten Trade bis gestern # Generiere alle Tage vom ersten Trade bis gestern
yesterday = datetime.date.today() - datetime.timedelta(days=1) yesterday = datetime.date.today() - datetime.timedelta(days=1)