feat: add automatic historical fetch on startup if DB is empty
All checks were successful
Deployment / deploy-docker (push) Successful in 4s

This commit is contained in:
Melchior Reimers
2026-01-23 17:44:06 +01:00
parent aa278db9dd
commit 47a8ceab32
3 changed files with 55 additions and 27 deletions

View File

@@ -12,7 +12,7 @@ class EIXExchange(BaseExchange):
def name(self) -> str:
return "EIX"
def fetch_latest_trades(self) -> List[Trade]:
def fetch_latest_trades(self, limit: int = 1) -> List[Trade]:
url = "https://european-investor-exchange.com/en/trade-list"
response = requests.get(url)
response.raise_for_status()
@@ -23,13 +23,11 @@ class EIXExchange(BaseExchange):
return []
data = json.loads(next_data_script.string)
# The structure according to subagent: data['props']['pageProps']['rowsData']
rows_data = data.get('props', {}).get('pageProps', {}).get('rowsData', [])
trades = []
count = 0
for row in rows_data:
# We only want the most recent ones. For simplicity, let's pick the first one which is likely the latest.
# In a real daemon, we might want to track which ones we already processed.
file_key = row.get('key')
if not file_key:
continue
@@ -39,8 +37,9 @@ class EIXExchange(BaseExchange):
csv_response = requests.get(csv_url)
if csv_response.status_code == 200:
trades.extend(self._parse_csv(csv_response.text))
# Break after one file for demonstration or handle multiple
break
count += 1
if limit and count >= limit:
break
return trades

View File

@@ -8,20 +8,22 @@ class LSExchange(BaseExchange):
def name(self) -> str:
return "LS"
def fetch_latest_trades(self) -> List[Trade]:
# Today's trades endpoint
url = "https://www.ls-x.de/_rpc/json/.lstc/instrument/list/lstctradestoday"
def fetch_latest_trades(self, include_yesterday: bool = False) -> List[Trade]:
endpoints = ["https://www.ls-x.de/_rpc/json/.lstc/instrument/list/lstctradestoday"]
if include_yesterday:
endpoints.append("https://www.ls-x.de/_rpc/json/.lstc/instrument/list/lstctradesyesterday")
# We might need headers to mimic a browser or handle disclaimer
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': 'application/json',
'Referer': 'https://www.ls-tc.de/'
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
all_trades = []
for url in endpoints:
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
import csv
import io
@@ -42,7 +44,7 @@ class LSExchange(BaseExchange):
ts_str = time_str.replace('Z', '+00:00')
timestamp = datetime.fromisoformat(ts_str)
trades.append(Trade(
all_trades.append(Trade(
exchange=self.name,
symbol=symbol,
isin=isin,
@@ -52,7 +54,6 @@ class LSExchange(BaseExchange):
))
except Exception:
continue
return trades
except Exception as e:
print(f"Error fetching LS data: {e}")
return []
except Exception as e:
print(f"Error fetching LS data from {url}: {e}")
return all_trades