now downloads historical eix dat
All checks were successful
Deployment / deploy-docker (push) Successful in 14s
All checks were successful
Deployment / deploy-docker (push) Successful in 14s
This commit is contained in:
@@ -12,7 +12,7 @@ class EIXExchange(BaseExchange):
|
||||
def name(self) -> str:
|
||||
return "EIX"
|
||||
|
||||
def fetch_latest_trades(self, limit: int = 1) -> List[Trade]:
|
||||
def fetch_latest_trades(self, limit: int = 1, since_date: datetime = None) -> List[Trade]:
|
||||
# EIX stores its file list in a separate API endpoint
|
||||
url = "https://european-investor-exchange.com/api/official-trades"
|
||||
try:
|
||||
@@ -23,12 +23,58 @@ class EIXExchange(BaseExchange):
|
||||
print(f"Error fetching EIX file list: {e}")
|
||||
return []
|
||||
|
||||
trades = []
|
||||
count = 0
|
||||
# Filter files based on date in filename if since_date provided
|
||||
# Format: "kursblatt/2025/Kursblatt.2025-07-14.1752526803105.csv"
|
||||
filtered_files = []
|
||||
for item in files_list:
|
||||
file_key = item.get('fileName')
|
||||
if not file_key:
|
||||
continue
|
||||
|
||||
if since_date:
|
||||
try:
|
||||
# Extract date from filename: Kursblatt.YYYY-MM-DD
|
||||
parts = file_key.split('/')[-1].split('.')
|
||||
# parts example: ['Kursblatt', '2025-07-14', '1752526803105', 'csv']
|
||||
if len(parts) >= 2:
|
||||
date_str = parts[1]
|
||||
file_date = datetime.strptime(date_str, "%Y-%m-%d").replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
# Check if file date is newer than since_date (compare dates only)
|
||||
if file_date.date() > since_date.date():
|
||||
filtered_files.append(item)
|
||||
continue
|
||||
# If same day, we might need to check it too, but EIX seems to be daily files
|
||||
if file_date.date() == since_date.date():
|
||||
filtered_files.append(item)
|
||||
continue
|
||||
except Exception:
|
||||
# If parsing fails, default to including it (safety) or skipping?
|
||||
# Let's include it if we are not sure
|
||||
filtered_files.append(item)
|
||||
else:
|
||||
filtered_files.append(item)
|
||||
|
||||
# Sort files to process oldest to newest if doing a sync, or newest to oldest?
|
||||
# If we have limit=1 (default), we usually want the newest.
|
||||
# But if we are syncing history (since_date set), we probably want all of them.
|
||||
|
||||
# Logic: If since_date is set, we ignore limit (or use it as safety cap) and process ALL new files
|
||||
if since_date:
|
||||
files_to_process = filtered_files
|
||||
# Sort by date ? The API list seems chronological.
|
||||
else:
|
||||
# Default behavior: take the last N files (API returns oldest first usually?)
|
||||
# Let's assume list is chronological.
|
||||
if limit:
|
||||
files_to_process = files_list[-limit:]
|
||||
else:
|
||||
files_to_process = files_list
|
||||
|
||||
trades = []
|
||||
count = 0
|
||||
for item in files_to_process:
|
||||
file_key = item.get('fileName')
|
||||
|
||||
# Download the CSV
|
||||
csv_url = f"https://european-investor-exchange.com/api/trade-file-contents?key={file_key}"
|
||||
@@ -37,7 +83,8 @@ class EIXExchange(BaseExchange):
|
||||
if csv_response.status_code == 200:
|
||||
trades.extend(self._parse_csv(csv_response.text))
|
||||
count += 1
|
||||
if limit and count >= limit:
|
||||
# Only enforce limit if since_date is NOT set
|
||||
if not since_date and limit and count >= limit:
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"Error downloading EIX CSV {file_key}: {e}")
|
||||
|
||||
Reference in New Issue
Block a user