270 lines
9.4 KiB
Python
270 lines
9.4 KiB
Python
|
|
import requests
|
||
|
|
import gzip
|
||
|
|
import json
|
||
|
|
import io
|
||
|
|
from datetime import datetime, timedelta, timezone
|
||
|
|
from typing import List, Optional
|
||
|
|
from .base import BaseExchange, Trade
|
||
|
|
from bs4 import BeautifulSoup
|
||
|
|
|
||
|
|
# Browser User-Agent für Zugriff
|
||
|
|
HEADERS = {
|
||
|
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||
|
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
class DeutscheBoerseBase(BaseExchange):
|
||
|
|
"""Basisklasse für Deutsche Börse Exchanges (Xetra, Frankfurt, Quotrix)"""
|
||
|
|
|
||
|
|
@property
|
||
|
|
def base_url(self) -> str:
|
||
|
|
"""Override in subclasses"""
|
||
|
|
raise NotImplementedError
|
||
|
|
|
||
|
|
@property
|
||
|
|
def name(self) -> str:
|
||
|
|
raise NotImplementedError
|
||
|
|
|
||
|
|
def _get_file_list(self) -> List[str]:
|
||
|
|
"""Parst die Verzeichnisseite und extrahiert alle Dateinamen"""
|
||
|
|
try:
|
||
|
|
response = requests.get(self.base_url, headers=HEADERS, timeout=30)
|
||
|
|
response.raise_for_status()
|
||
|
|
|
||
|
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||
|
|
files = []
|
||
|
|
|
||
|
|
# Deutsche Börse listet Dateien als Links auf
|
||
|
|
for link in soup.find_all('a'):
|
||
|
|
href = link.get('href', '')
|
||
|
|
# Nur posttrade JSON.gz Dateien
|
||
|
|
if 'posttrade' in href and href.endswith('.json.gz'):
|
||
|
|
files.append(href)
|
||
|
|
|
||
|
|
return files
|
||
|
|
except Exception as e:
|
||
|
|
print(f"Error fetching file list from {self.base_url}: {e}")
|
||
|
|
return []
|
||
|
|
|
||
|
|
def _filter_files_for_date(self, files: List[str], target_date: datetime.date) -> List[str]:
|
||
|
|
"""
|
||
|
|
Filtert Dateien für ein bestimmtes Datum.
|
||
|
|
Dateiformat: *posttrade-YYYY-MM-DDTHH:MM:SS*.json.gz
|
||
|
|
|
||
|
|
Da Handel bis 22:00 MEZ geht (21:00/20:00 UTC), müssen wir auch
|
||
|
|
Dateien nach Mitternacht UTC berücksichtigen.
|
||
|
|
"""
|
||
|
|
filtered = []
|
||
|
|
|
||
|
|
# Für den Vortag: Dateien vom target_date UND vom Folgetag (bis ~02:00 UTC)
|
||
|
|
target_str = target_date.strftime('%Y-%m-%d')
|
||
|
|
next_day = target_date + timedelta(days=1)
|
||
|
|
next_day_str = next_day.strftime('%Y-%m-%d')
|
||
|
|
|
||
|
|
for file in files:
|
||
|
|
# Extrahiere Datum aus Dateiname
|
||
|
|
# Format: posttrade-2026-01-26T21:30:00.json.gz
|
||
|
|
if target_str in file:
|
||
|
|
filtered.append(file)
|
||
|
|
elif next_day_str in file:
|
||
|
|
# Prüfe ob es eine frühe Datei vom nächsten Tag ist (< 03:00 UTC)
|
||
|
|
try:
|
||
|
|
# Finde Timestamp im Dateinamen
|
||
|
|
parts = file.split('posttrade-')
|
||
|
|
if len(parts) > 1:
|
||
|
|
ts_part = parts[1].split('.json.gz')[0]
|
||
|
|
file_dt = datetime.fromisoformat(ts_part)
|
||
|
|
if file_dt.hour < 3: # Frühe Morgenstunden gehören noch zum Vortag
|
||
|
|
filtered.append(file)
|
||
|
|
except Exception:
|
||
|
|
pass
|
||
|
|
|
||
|
|
return filtered
|
||
|
|
|
||
|
|
def _download_and_parse_file(self, file_url: str) -> List[Trade]:
|
||
|
|
"""Lädt eine JSON.gz Datei herunter und parst die Trades"""
|
||
|
|
trades = []
|
||
|
|
|
||
|
|
try:
|
||
|
|
# Vollständige URL erstellen
|
||
|
|
if not file_url.startswith('http'):
|
||
|
|
full_url = f"{self.base_url.rstrip('/')}/{file_url.lstrip('/')}"
|
||
|
|
else:
|
||
|
|
full_url = file_url
|
||
|
|
|
||
|
|
response = requests.get(full_url, headers=HEADERS, timeout=60)
|
||
|
|
response.raise_for_status()
|
||
|
|
|
||
|
|
# Gzip entpacken
|
||
|
|
with gzip.GzipFile(fileobj=io.BytesIO(response.content)) as f:
|
||
|
|
json_data = json.load(f)
|
||
|
|
|
||
|
|
# Trades parsen
|
||
|
|
# Deutsche Börse JSON Format (RTS1/RTS2):
|
||
|
|
# Typische Felder: TrdDt, TrdTm, ISIN, Pric, Qty, TrdCcy, etc.
|
||
|
|
for record in json_data:
|
||
|
|
try:
|
||
|
|
trade = self._parse_trade_record(record)
|
||
|
|
if trade:
|
||
|
|
trades.append(trade)
|
||
|
|
except Exception as e:
|
||
|
|
print(f"Error parsing trade record: {e}")
|
||
|
|
continue
|
||
|
|
|
||
|
|
except Exception as e:
|
||
|
|
print(f"Error downloading/parsing {file_url}: {e}")
|
||
|
|
|
||
|
|
return trades
|
||
|
|
|
||
|
|
def _parse_trade_record(self, record: dict) -> Optional[Trade]:
|
||
|
|
"""
|
||
|
|
Parst einen einzelnen Trade-Record aus dem JSON.
|
||
|
|
Deutsche Börse verwendet RTS1/RTS2 Format.
|
||
|
|
|
||
|
|
Wichtige Felder:
|
||
|
|
- TrdDt: Trading Date (YYYY-MM-DD)
|
||
|
|
- TrdTm: Trading Time (HH:MM:SS.ffffff)
|
||
|
|
- ISIN: Instrument Identifier
|
||
|
|
- FinInstrmId.Id: Alternative ISIN Feld
|
||
|
|
- Pric.Pric.MntryVal.Amt: Preis
|
||
|
|
- Qty.Unit: Menge
|
||
|
|
"""
|
||
|
|
try:
|
||
|
|
# ISIN extrahieren
|
||
|
|
isin = record.get('ISIN') or record.get('FinInstrmId', {}).get('Id', '')
|
||
|
|
if not isin:
|
||
|
|
return None
|
||
|
|
|
||
|
|
# Preis extrahieren (verschiedene mögliche Pfade)
|
||
|
|
price = None
|
||
|
|
if 'Pric' in record:
|
||
|
|
pric = record['Pric']
|
||
|
|
if isinstance(pric, dict):
|
||
|
|
if 'Pric' in pric:
|
||
|
|
inner = pric['Pric']
|
||
|
|
if 'MntryVal' in inner:
|
||
|
|
price = float(inner['MntryVal'].get('Amt', 0))
|
||
|
|
elif 'Amt' in inner:
|
||
|
|
price = float(inner['Amt'])
|
||
|
|
elif 'MntryVal' in pric:
|
||
|
|
price = float(pric['MntryVal'].get('Amt', 0))
|
||
|
|
elif isinstance(pric, (int, float)):
|
||
|
|
price = float(pric)
|
||
|
|
|
||
|
|
if price is None or price <= 0:
|
||
|
|
return None
|
||
|
|
|
||
|
|
# Menge extrahieren
|
||
|
|
quantity = None
|
||
|
|
if 'Qty' in record:
|
||
|
|
qty = record['Qty']
|
||
|
|
if isinstance(qty, dict):
|
||
|
|
quantity = float(qty.get('Unit', qty.get('Qty', 0)))
|
||
|
|
elif isinstance(qty, (int, float)):
|
||
|
|
quantity = float(qty)
|
||
|
|
|
||
|
|
if quantity is None or quantity <= 0:
|
||
|
|
return None
|
||
|
|
|
||
|
|
# Timestamp extrahieren
|
||
|
|
trd_dt = record.get('TrdDt', '')
|
||
|
|
trd_tm = record.get('TrdTm', '00:00:00')
|
||
|
|
|
||
|
|
if not trd_dt:
|
||
|
|
return None
|
||
|
|
|
||
|
|
# Kombiniere Datum und Zeit
|
||
|
|
ts_str = f"{trd_dt}T{trd_tm}"
|
||
|
|
# Entferne Mikrosekunden wenn zu lang
|
||
|
|
if '.' in ts_str:
|
||
|
|
parts = ts_str.split('.')
|
||
|
|
if len(parts[1]) > 6:
|
||
|
|
ts_str = parts[0] + '.' + parts[1][:6]
|
||
|
|
|
||
|
|
# Parse als UTC (Deutsche Börse liefert UTC)
|
||
|
|
timestamp = datetime.fromisoformat(ts_str)
|
||
|
|
if timestamp.tzinfo is None:
|
||
|
|
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
||
|
|
|
||
|
|
return Trade(
|
||
|
|
exchange=self.name,
|
||
|
|
symbol=isin, # Symbol = ISIN
|
||
|
|
isin=isin,
|
||
|
|
price=price,
|
||
|
|
quantity=quantity,
|
||
|
|
timestamp=timestamp
|
||
|
|
)
|
||
|
|
|
||
|
|
except Exception as e:
|
||
|
|
print(f"Error parsing record: {e}")
|
||
|
|
return None
|
||
|
|
|
||
|
|
def fetch_latest_trades(self, include_yesterday: bool = True, since_date: datetime = None) -> List[Trade]:
|
||
|
|
"""
|
||
|
|
Holt alle Trades vom Vortag (oder seit since_date).
|
||
|
|
"""
|
||
|
|
all_trades = []
|
||
|
|
|
||
|
|
# Bestimme Zieldatum
|
||
|
|
if since_date:
|
||
|
|
target_date = since_date.date() if hasattr(since_date, 'date') else since_date
|
||
|
|
else:
|
||
|
|
# Standard: Vortag
|
||
|
|
target_date = (datetime.now(timezone.utc) - timedelta(days=1)).date()
|
||
|
|
|
||
|
|
print(f"[{self.name}] Fetching trades for date: {target_date}")
|
||
|
|
|
||
|
|
# Dateiliste holen
|
||
|
|
files = self._get_file_list()
|
||
|
|
print(f"[{self.name}] Found {len(files)} total files")
|
||
|
|
|
||
|
|
# Dateien für Zieldatum filtern
|
||
|
|
target_files = self._filter_files_for_date(files, target_date)
|
||
|
|
print(f"[{self.name}] {len(target_files)} files match target date")
|
||
|
|
|
||
|
|
# Alle passenden Dateien herunterladen und parsen
|
||
|
|
for file in target_files:
|
||
|
|
trades = self._download_and_parse_file(file)
|
||
|
|
all_trades.extend(trades)
|
||
|
|
print(f"[{self.name}] Parsed {len(trades)} trades from {file}")
|
||
|
|
|
||
|
|
print(f"[{self.name}] Total trades fetched: {len(all_trades)}")
|
||
|
|
return all_trades
|
||
|
|
|
||
|
|
|
||
|
|
class XetraExchange(DeutscheBoerseBase):
|
||
|
|
"""Xetra (Deutsche Börse) - DETR"""
|
||
|
|
|
||
|
|
@property
|
||
|
|
def base_url(self) -> str:
|
||
|
|
return "https://mfs.deutsche-boerse.com/DETR-posttrade"
|
||
|
|
|
||
|
|
@property
|
||
|
|
def name(self) -> str:
|
||
|
|
return "XETRA"
|
||
|
|
|
||
|
|
|
||
|
|
class FrankfurtExchange(DeutscheBoerseBase):
|
||
|
|
"""Börse Frankfurt - DFRA"""
|
||
|
|
|
||
|
|
@property
|
||
|
|
def base_url(self) -> str:
|
||
|
|
return "https://mfs.deutsche-boerse.com/DFRA-posttrade"
|
||
|
|
|
||
|
|
@property
|
||
|
|
def name(self) -> str:
|
||
|
|
return "FRA"
|
||
|
|
|
||
|
|
|
||
|
|
class QuotrixExchange(DeutscheBoerseBase):
|
||
|
|
"""Quotrix (Düsseldorf/Tradegate) - DGAT"""
|
||
|
|
|
||
|
|
@property
|
||
|
|
def base_url(self) -> str:
|
||
|
|
return "https://mfs.deutsche-boerse.com/DGAT-posttrade"
|
||
|
|
|
||
|
|
@property
|
||
|
|
def name(self) -> str:
|
||
|
|
return "QUOTRIX"
|