updated
All checks were successful
Deployment / deploy-docker (push) Successful in 17s

This commit is contained in:
Melchior Reimers
2026-01-25 17:36:29 +01:00
parent 4f4d734643
commit 33f5c90fce
8 changed files with 996 additions and 355 deletions

View File

@@ -6,7 +6,6 @@ import requests
from src.exchanges.eix import EIXExchange
from src.exchanges.ls import LSExchange
from src.database.questdb_client import DatabaseClient
from src.analytics.worker import AnalyticsWorker
logging.basicConfig(
level=logging.INFO,
@@ -28,6 +27,8 @@ def get_last_trade_timestamp(db_url, exchange_name):
data = response.json()
if data['dataset']:
# QuestDB returns timestamp in micros since epoch by default in some views, or ISO
# Let's assume the timestamp is in the dataset
# ILP timestamps are stored as designated timestamps.
ts_value = data['dataset'][0][0] # Adjust index based on column order
if isinstance(ts_value, str):
return datetime.datetime.fromisoformat(ts_value.replace('Z', '+00:00'))
@@ -44,8 +45,14 @@ def run_task(historical=False):
eix = EIXExchange()
ls = LSExchange()
# Pass last_ts to fetcher to allow smart filtering
# daemon.py runs daily, so we want to fetch everything since DB state
# BUT we need to be careful: eix.py's fetch_latest_trades needs 'since_date' argument
# We can't pass it here directly in the tuple easily because last_ts is calculated inside the loop.
# We will modify the loop below to handle args dynamically
exchanges_to_process = [
(eix, {'limit': None if historical else 5}),
(eix, {'limit': None if historical else 5}), # Default limit 5 for safety if no historical
(ls, {'include_yesterday': historical})
]
@@ -84,14 +91,6 @@ def run_task(historical=False):
except Exception as e:
logger.error(f"Error processing exchange {exchange.name}: {e}")
def run_analytics(db_url="questdb", db_port=9000):
try:
worker = AnalyticsWorker(db_host=db_url, db_port=db_port, auth=DB_AUTH)
worker.initialize_tables()
worker.run_aggregation()
except Exception as e:
logger.error(f"Analytics aggregation failed: {e}")
def main():
logger.info("Trading Daemon started.")
@@ -112,12 +111,10 @@ def main():
if is_empty:
logger.info("Database is empty or table doesn't exist. Triggering initial historical fetch...")
run_task(historical=True)
run_analytics()
else:
logger.info("Found existing data in database. Triggering catch-up sync...")
# Run a normal task to fetch any missing data since the last run
run_task(historical=False)
run_analytics()
logger.info("Catch-up sync completed. Waiting for scheduled run at 23:00.")
while True:
@@ -125,7 +122,6 @@ def main():
# Täglich um 23:00 Uhr
if now.hour == 23 and now.minute == 0:
run_task(historical=False)
run_analytics()
# Warte 61s, um Mehrfachausführung in derselben Minute zu verhindern
time.sleep(61)