Add Börsenag exchanges (DUSA, DUSB, DUSC, DUSD, HAMA, HAMB, HANA, HANB)
- New boersenag.py with support for Düsseldorf, Hamburg, and Hannover exchanges - Proper .gitignore to exclude venv and temp files
This commit is contained in:
48
.gitignore
vendored
Normal file
48
.gitignore
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual environments
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.venv/
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Test data
|
||||
*.gz
|
||||
!requirements.txt
|
||||
354
src/exchanges/boersenag.py
Normal file
354
src/exchanges/boersenag.py
Normal file
@@ -0,0 +1,354 @@
|
||||
"""
|
||||
Börsenag Exchange Fetcher
|
||||
Unterstützt: DUSA, DUSB, DUSC, DUSD, HAMA, HAMB, HANA, HANB
|
||||
|
||||
Datenquelle: https://www.boersenag.de/mifid-ii-delayed-data/
|
||||
URL-Format: https://cld42.boersenag.de/m13data/data/Mifir13DelayedData_{MIC}_{SEQUENCE}_{TIMESTAMP}.csv
|
||||
"""
|
||||
|
||||
import requests
|
||||
import time
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional
|
||||
from .base import BaseExchange, Trade
|
||||
import re
|
||||
|
||||
# Rate-Limiting Konfiguration
|
||||
RATE_LIMIT_DELAY = 0.3 # Sekunden zwischen Requests
|
||||
|
||||
# Browser User-Agent für Zugriff
|
||||
HEADERS = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'text/csv, text/plain, */*',
|
||||
'Accept-Language': 'de-DE,de;q=0.9,en;q=0.8',
|
||||
'Referer': 'https://www.boersenag.de/',
|
||||
}
|
||||
|
||||
# Exchange-Konfiguration
|
||||
BOERSENAG_EXCHANGES = {
|
||||
'DUSA': {
|
||||
'name': 'DUSA',
|
||||
'full_name': 'Börse Düsseldorf Regulierter Markt',
|
||||
'mic': 'DUSA',
|
||||
},
|
||||
'DUSB': {
|
||||
'name': 'DUSB',
|
||||
'full_name': 'Börse Düsseldorf Freiverkehr',
|
||||
'mic': 'DUSB',
|
||||
},
|
||||
'DUSC': {
|
||||
'name': 'DUSC',
|
||||
'full_name': 'Börse Düsseldorf Quotrix Regulierter Markt',
|
||||
'mic': 'DUSC',
|
||||
},
|
||||
'DUSD': {
|
||||
'name': 'DUSD',
|
||||
'full_name': 'Börse Düsseldorf Quotrix Freiverkehr',
|
||||
'mic': 'DUSD',
|
||||
},
|
||||
'HAMA': {
|
||||
'name': 'HAMA',
|
||||
'full_name': 'Börse Hamburg Regulierter Markt',
|
||||
'mic': 'HAMA',
|
||||
},
|
||||
'HAMB': {
|
||||
'name': 'HAMB',
|
||||
'full_name': 'Börse Hamburg Freiverkehr',
|
||||
'mic': 'HAMB',
|
||||
},
|
||||
'HANA': {
|
||||
'name': 'HANA',
|
||||
'full_name': 'Börse Hannover Regulierter Markt',
|
||||
'mic': 'HANA',
|
||||
},
|
||||
'HANB': {
|
||||
'name': 'HANB',
|
||||
'full_name': 'Börse Hannover Freiverkehr',
|
||||
'mic': 'HANB',
|
||||
},
|
||||
}
|
||||
|
||||
BASE_URL = "https://cld42.boersenag.de/m13data/data"
|
||||
|
||||
|
||||
class BoersenagBase(BaseExchange):
|
||||
"""
|
||||
Basisklasse für Börsenag Exchanges (DUSA, DUSB, DUSC, DUSD, HAMA, HAMB, HANA, HANB)
|
||||
|
||||
CSV Format (Semikolon-separiert):
|
||||
MIC; ISIN; displayName; time; price; size; supplement
|
||||
- time: "28.01.2026 15:48:42" (deutsches Format)
|
||||
- price: "46,18" (deutsches Dezimalformat)
|
||||
- size: Menge (kann 0 sein für Kurse ohne Trade)
|
||||
- supplement: "bez " = bezahlt (echter Trade), "G " = Geld (Bid), "B " = Brief (Ask)
|
||||
"""
|
||||
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
"""MIC Code für die Börse"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.mic
|
||||
|
||||
def _generate_file_urls(self, target_date: datetime.date) -> List[str]:
|
||||
"""
|
||||
Generiert mögliche Datei-URLs für ein bestimmtes Datum.
|
||||
Format: Mifir13DelayedData_{MIC}_{SEQUENCE}_{TIMESTAMP}.csv
|
||||
|
||||
Die Dateien werden stündlich mit einem Zeitstempel generiert.
|
||||
Wir versuchen verschiedene Sequenznummern und Zeitstempel.
|
||||
"""
|
||||
urls = []
|
||||
|
||||
# Formatiere Datum im URL-Format: YYYYMMDD
|
||||
date_str = target_date.strftime('%Y%m%d')
|
||||
|
||||
# Mögliche Sequenznummern (beobachtet: 000000DF, aber könnte variieren)
|
||||
sequences = ['000000DF', '00000000', '000000DD', '000000DE']
|
||||
|
||||
# Generiere URLs für verschiedene Uhrzeiten (alle 15 Minuten)
|
||||
for hour in range(0, 24):
|
||||
for minute in [0, 15, 30, 45]:
|
||||
timestamp = f"{date_str}{hour:02d}{minute:02d}000000"
|
||||
for seq in sequences:
|
||||
url = f"{BASE_URL}/Mifir13DelayedData_{self.mic}_{seq}_{timestamp}.csv"
|
||||
urls.append(url)
|
||||
|
||||
# Versuche auch die einfachste Form mit 0000000000
|
||||
for seq in sequences:
|
||||
url = f"{BASE_URL}/Mifir13DelayedData_{self.mic}_{seq}_{date_str}0000000000.csv"
|
||||
urls.append(url)
|
||||
|
||||
return urls
|
||||
|
||||
def _parse_german_datetime(self, dt_str: str) -> Optional[datetime]:
|
||||
"""Parst deutsches Datumsformat: DD.MM.YYYY HH:MM:SS"""
|
||||
try:
|
||||
# Format: "28.01.2026 15:48:42"
|
||||
dt = datetime.strptime(dt_str.strip(), '%d.%m.%Y %H:%M:%S')
|
||||
# In UTC konvertieren (Deutsche Zeit = MEZ/MESZ, hier vereinfacht als UTC+1)
|
||||
# Für korrektes Handling würde pytz benötigt
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def _parse_german_number(self, num_str: str) -> Optional[float]:
|
||||
"""Parst deutsches Zahlenformat: 1.234,56 -> 1234.56"""
|
||||
try:
|
||||
# Entferne Tausender-Trennzeichen (Punkt) und ersetze Dezimalkomma
|
||||
clean = num_str.strip().replace('.', '').replace(',', '.')
|
||||
return float(clean)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def _download_and_parse_file(self, url: str) -> List[Trade]:
|
||||
"""Lädt eine CSV-Datei herunter und parst die Trades"""
|
||||
trades = []
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=HEADERS, timeout=30)
|
||||
|
||||
if response.status_code == 404:
|
||||
return []
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
content = response.text
|
||||
if not content.strip():
|
||||
return []
|
||||
|
||||
lines = content.strip().split('\n')
|
||||
if len(lines) < 2: # Nur Header, keine Daten
|
||||
return []
|
||||
|
||||
# Erste Zeile ist Header
|
||||
# MIC; ISIN; displayName; time; price; size; supplement
|
||||
for line in lines[1:]:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
||||
trade = self._parse_csv_line(line)
|
||||
if trade:
|
||||
trades.append(trade)
|
||||
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code != 404:
|
||||
print(f"[{self.name}] HTTP error: {e}")
|
||||
except Exception as e:
|
||||
print(f"[{self.name}] Error downloading {url}: {e}")
|
||||
|
||||
return trades
|
||||
|
||||
def _parse_csv_line(self, line: str) -> Optional[Trade]:
|
||||
"""Parst eine einzelne CSV-Zeile"""
|
||||
try:
|
||||
# CSV ist Semikolon-separiert
|
||||
parts = line.split(';')
|
||||
if len(parts) < 7:
|
||||
return None
|
||||
|
||||
mic = parts[0].strip()
|
||||
isin = parts[1].strip()
|
||||
display_name = parts[2].strip().strip('"')
|
||||
time_str = parts[3].strip()
|
||||
price_str = parts[4].strip()
|
||||
size_str = parts[5].strip()
|
||||
supplement = parts[6].strip().strip('"').strip()
|
||||
|
||||
# Validiere ISIN
|
||||
if not isin or len(isin) != 12:
|
||||
return None
|
||||
|
||||
# Parse Timestamp
|
||||
timestamp = self._parse_german_datetime(time_str)
|
||||
if not timestamp:
|
||||
return None
|
||||
|
||||
# Parse Preis
|
||||
price = self._parse_german_number(price_str)
|
||||
if price is None or price <= 0:
|
||||
return None
|
||||
|
||||
# Parse Menge
|
||||
try:
|
||||
size = float(size_str)
|
||||
except ValueError:
|
||||
size = 0
|
||||
|
||||
# Nur echte Trades (size > 0) oder "bez" (bezahlt) aufnehmen
|
||||
# "G" = Geld (Bid), "B" = Brief (Ask) sind Kurse, keine Trades
|
||||
is_trade = size > 0 or 'bez' in supplement.lower()
|
||||
|
||||
if not is_trade:
|
||||
return None
|
||||
|
||||
# Bei size = 0 aber "bez" nehmen wir an, dass die Menge unbekannt ist (setze auf 1)
|
||||
if size <= 0:
|
||||
size = 1
|
||||
|
||||
return Trade(
|
||||
exchange=self.name,
|
||||
symbol=isin,
|
||||
isin=isin,
|
||||
price=price,
|
||||
quantity=size,
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def _get_last_trading_day(self, from_date: datetime.date) -> datetime.date:
|
||||
"""Findet den letzten Handelstag (überspringt Wochenenden)"""
|
||||
date = from_date
|
||||
if date.weekday() == 5: # Samstag
|
||||
date = date - timedelta(days=1)
|
||||
elif date.weekday() == 6: # Sonntag
|
||||
date = date - timedelta(days=2)
|
||||
return date
|
||||
|
||||
def fetch_latest_trades(self, include_yesterday: bool = True, since_date: datetime = None) -> List[Trade]:
|
||||
"""Holt alle Trades vom letzten Handelstag"""
|
||||
all_trades = []
|
||||
|
||||
# Bestimme Zieldatum
|
||||
if since_date:
|
||||
target_date = since_date.date() if hasattr(since_date, 'date') else since_date
|
||||
else:
|
||||
target_date = (datetime.now(timezone.utc) - timedelta(days=1)).date()
|
||||
|
||||
# Überspringe Wochenenden
|
||||
original_date = target_date
|
||||
target_date = self._get_last_trading_day(target_date)
|
||||
|
||||
if target_date != original_date:
|
||||
print(f"[{self.name}] Skipping weekend: {original_date} -> {target_date}")
|
||||
|
||||
print(f"[{self.name}] Fetching trades for date: {target_date}")
|
||||
|
||||
# Generiere mögliche URLs
|
||||
urls = self._generate_file_urls(target_date)
|
||||
|
||||
successful = 0
|
||||
total_urls = len(urls)
|
||||
|
||||
# Versuche verschiedene URLs
|
||||
for i, url in enumerate(urls):
|
||||
trades = self._download_and_parse_file(url)
|
||||
if trades:
|
||||
all_trades.extend(trades)
|
||||
successful += 1
|
||||
print(f"[{self.name}] Found {len(trades)} trades from: {url.split('/')[-1]}")
|
||||
# Bei Erfolg müssen wir nicht alle anderen URLs probieren
|
||||
break
|
||||
|
||||
# Rate-Limiting
|
||||
if i < total_urls - 1:
|
||||
time.sleep(RATE_LIMIT_DELAY)
|
||||
|
||||
# Nach 20 fehlgeschlagenen Versuchen abbrechen
|
||||
if i > 20 and successful == 0:
|
||||
break
|
||||
|
||||
print(f"[{self.name}] Total trades fetched: {len(all_trades)}")
|
||||
return all_trades
|
||||
|
||||
|
||||
# Konkrete Exchange-Klassen
|
||||
class DUSAExchange(BoersenagBase):
|
||||
"""Börse Düsseldorf Regulierter Markt"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "DUSA"
|
||||
|
||||
|
||||
class DUSBExchange(BoersenagBase):
|
||||
"""Börse Düsseldorf Freiverkehr"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "DUSB"
|
||||
|
||||
|
||||
class DUSCExchange(BoersenagBase):
|
||||
"""Börse Düsseldorf Quotrix Regulierter Markt"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "DUSC"
|
||||
|
||||
|
||||
class DUSDExchange(BoersenagBase):
|
||||
"""Börse Düsseldorf Quotrix Freiverkehr"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "DUSD"
|
||||
|
||||
|
||||
class HAMAExchange(BoersenagBase):
|
||||
"""Börse Hamburg Regulierter Markt"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "HAMA"
|
||||
|
||||
|
||||
class HAMBExchange(BoersenagBase):
|
||||
"""Börse Hamburg Freiverkehr"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "HAMB"
|
||||
|
||||
|
||||
class HANAExchange(BoersenagBase):
|
||||
"""Börse Hannover Regulierter Markt"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "HANA"
|
||||
|
||||
|
||||
class HANBExchange(BoersenagBase):
|
||||
"""Börse Hannover Freiverkehr"""
|
||||
@property
|
||||
def mic(self) -> str:
|
||||
return "HANB"
|
||||
Reference in New Issue
Block a user