first commit

This commit is contained in:
Melchior Reimers
2026-01-23 16:30:35 +01:00
commit ccfee645e8
10 changed files with 333 additions and 0 deletions

50
README.md Normal file
View File

@@ -0,0 +1,50 @@
# Trading Data Daemon
Ein modularer Daemon zum Herunterladen und Speichern von Handelsdaten von verschiedenen Börsen in einer Time-Series-Datenbank.
## Unterstützte Exchanges
- **European Investor Exchange (EIX)**: Lädt tägliche Kursblatt-CSVs herunter.
- **Lang & Schwarz (LS)**: Fragt die heutigen Trades über deren JSON/CSV-RPC ab.
## Architektur
- `src/exchanges/base.py`: Basisklasse für neue Börsen (einfach erweiterbar).
- `src/database/questdb_client.py`: Speichert Daten in QuestDB via Influx Line Protocol (ILP).
- `daemon.py`: Der Orchestrator, der die Daten abruft und speichert.
## Installation und Setup
### 1. QuestDB (Timeseries DB) starten
Am einfachsten via Docker Compose:
```bash
docker-compose up -d
```
QuestDB ist dann unter `http://localhost:9000` erreichbar.
### 2. Python Abhängigkeiten installieren
```bash
pip install -r requirements.txt
```
### 3. Systemd Service einrichten
Kopiere die Dateien nach `/etc/systemd/system/`:
```bash
sudo cp systemd/trading-daemon.service /etc/systemd/system/
sudo cp systemd/trading-daemon.timer /etc/systemd/system/
```
Pfade in `trading-daemon.service` müssen ggf. angepasst werden (aktuell auf `/Users/melchiorreimers/...` gesetzt).
Dienste aktivieren:
```bash
sudo systemctl daemon-reload
sudo systemctl enable --now trading-daemon.timer
```
### 4. Manuell testen
```bash
python3 daemon.py
```
## Erweiterung
Um eine neue Börse hinzuzufügen, erstelle einfach eine neue Klasse in `src/exchanges/`, die von `BaseExchange` erbt und implementiere `fetch_latest_trades()`. Füge sie dann in `daemon.py` zur Liste hinzu.

39
daemon.py Normal file
View File

@@ -0,0 +1,39 @@
import time
import logging
from src.exchanges.eix import EIXExchange
from src.exchanges.ls import LSExchange
from src.database.questdb_client import DatabaseClient
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger("TradingDaemon")
def main():
logger.info("Starting Trading Data Fetcher")
# Initialize components
exchanges = [
EIXExchange(),
LSExchange()
]
db = DatabaseClient()
# Process each exchange
for exchange in exchanges:
try:
logger.info(f"Fetching data from {exchange.name}...")
trades = exchange.fetch_latest_trades()
logger.info(f"Fetched {len(trades)} trades from {exchange.name}.")
if trades:
db.save_trades(trades)
logger.info(f"Stored {len(trades)} trades in database.")
except Exception as e:
logger.error(f"Error processing exchange {exchange.name}: {e}")
logger.info("Fetching cycle complete.")
if __name__ == "__main__":
main()

15
docker-compose.yml Normal file
View File

@@ -0,0 +1,15 @@
version: '3.8'
services:
questdb:
image: questdb/questdb:latest
container_name: questdb
ports:
- "9000:9000"
- "8812:8812"
- "9009:9009"
volumes:
- questdb_data:/root/.questdb
volumes:
questdb_data:

2
requirements.txt Normal file
View File

@@ -0,0 +1,2 @@
requests
beautifulsoup4

View File

@@ -0,0 +1,46 @@
import requests
import time
from typing import List
from ..exchanges.base import Trade
class DatabaseClient:
def __init__(self, host: str = "localhost", port: int = 9000):
self.host = host
self.port = port
self.url = f"http://{host}:{port}/write"
def save_trades(self, trades: List[Trade]):
if not trades:
return
lines = []
for trade in trades:
# QuestDB Influx Line Protocol format:
# table_name,tag1=val1,tag2=val2 field1=val1,field2=val2 timestamp
# We use microseconds for timestamp (nanoseconds is standard for ILP)
# Clean symbols for ILP
symbol = trade.symbol.replace(" ", "\\ ").replace(",", "\\,")
exchange = trade.exchange
line = f"trades,exchange={exchange},symbol={symbol},isin={trade.isin} " \
f"price={trade.price},quantity={trade.quantity} " \
f"{int(trade.timestamp.timestamp() * 1e9)}"
lines.append(line)
payload = "\n".join(lines) + "\n"
try:
response = requests.post(self.url, data=payload, params={'precision': 'ns'})
if response.status_code != 204:
print(f"Error saving to QuestDB: {response.text}")
except Exception as e:
print(f"Could not connect to QuestDB at {self.url}: {e}")
# Fallback: print to console or save to file
self._fallback_save(trades)
def _fallback_save(self, trades: List[Trade]):
# Just log to a file for now if QuestDB is not available
with open("trades_fallback.log", "a") as f:
for t in trades:
f.write(f"{t.timestamp} | {t.exchange} | {t.symbol} | {t.price} | {t.quantity}\n")

25
src/exchanges/base.py Normal file
View File

@@ -0,0 +1,25 @@
import abc
from datetime import datetime
from typing import List, Dict, Any
class Trade:
def __init__(self, exchange: str, symbol: str, price: float, quantity: float, timestamp: datetime, isin: str = None):
self.exchange = exchange
self.symbol = symbol
self.isin = isin
self.price = price
self.quantity = quantity
self.timestamp = timestamp
def __repr__(self):
return f"Trade({self.exchange}, {self.symbol}, {self.price}, {self.quantity}, {self.timestamp})"
class BaseExchange(abc.ABC):
@abc.abstractmethod
def fetch_latest_trades(self) -> List[Trade]:
pass
@property
@abc.abstractmethod
def name(self) -> str:
pass

75
src/exchanges/eix.py Normal file
View File

@@ -0,0 +1,75 @@
import requests
import json
from bs4 import BeautifulSoup
from datetime import datetime
from typing import List
from .base import BaseExchange, Trade
import csv
import io
class EIXExchange(BaseExchange):
@property
def name(self) -> str:
return "EIX"
def fetch_latest_trades(self) -> List[Trade]:
url = "https://european-investor-exchange.com/en/trade-list"
response = requests.get(url)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser')
next_data_script = soup.find('script', id='__NEXT_DATA__')
if not next_data_script:
return []
data = json.loads(next_data_script.string)
# The structure according to subagent: data['props']['pageProps']['rowsData']
rows_data = data.get('props', {}).get('pageProps', {}).get('rowsData', [])
trades = []
for row in rows_data:
# We only want the most recent ones. For simplicity, let's pick the first one which is likely the latest.
# In a real daemon, we might want to track which ones we already processed.
file_key = row.get('key')
if not file_key:
continue
# Download the CSV
csv_url = f"https://european-investor-exchange.com/api/trade-file-contents?key={file_key}"
csv_response = requests.get(csv_url)
if csv_response.status_code == 200:
trades.extend(self._parse_csv(csv_response.text))
# Break after one file for demonstration or handle multiple
break
return trades
def _parse_csv(self, csv_text: str) -> List[Trade]:
trades = []
f = io.StringIO(csv_text)
# Header: Trading day & Trading time UTC,Instrument Identifier,Quantity,Unit Price,Price Currency,Venue Identifier,Side
reader = csv.DictReader(f, delimiter=',')
for row in reader:
try:
price = float(row['Unit Price'])
quantity = float(row['Quantity'])
isin = row['Instrument Identifier']
symbol = isin # Often symbol is unknown, use ISIN
time_str = row['Trading day & Trading time UTC']
# Format: 2026-01-22T06:30:00.617Z
# Python 3.11+ supports ISO with Z, otherwise we strip Z
ts_str = time_str.replace('Z', '+00:00')
timestamp = datetime.fromisoformat(ts_str)
trades.append(Trade(
exchange=self.name,
symbol=symbol,
isin=isin,
price=price,
quantity=quantity,
timestamp=timestamp
))
except Exception:
continue
return trades

58
src/exchanges/ls.py Normal file
View File

@@ -0,0 +1,58 @@
import requests
from datetime import datetime
from typing import List
from .base import BaseExchange, Trade
class LSExchange(BaseExchange):
@property
def name(self) -> str:
return "LS"
def fetch_latest_trades(self) -> List[Trade]:
# Today's trades endpoint
url = "https://www.ls-x.de/_rpc/json/.lstc/instrument/list/lstctradestoday"
# We might need headers to mimic a browser or handle disclaimer
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': 'application/json',
'Referer': 'https://www.ls-tc.de/'
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
import csv
import io
f = io.StringIO(response.text)
# Header: isin;displayName;tradeTime;price;currency;size;orderId
reader = csv.DictReader(f, delimiter=';')
trades = []
for item in reader:
try:
price = float(item['price'].replace(',', '.'))
quantity = float(item['size'].replace(',', '.'))
isin = item['isin']
symbol = item['displayName']
time_str = item['tradeTime']
# Format: 2026-01-23T07:30:00.992000Z
ts_str = time_str.replace('Z', '+00:00')
timestamp = datetime.fromisoformat(ts_str)
trades.append(Trade(
exchange=self.name,
symbol=symbol,
isin=isin,
price=price,
quantity=quantity,
timestamp=timestamp
))
except Exception:
continue
return trades
except Exception as e:
print(f"Error fetching LS data: {e}")
return []

View File

@@ -0,0 +1,14 @@
[Unit]
Description=Trading Data Fetcher Service
After=network.target
[Service]
Type=oneshot
User=melchiorreimers
WorkingDirectory=/Users/melchiorreimers/.gemini/antigravity/scratch/trading_daemon
ExecStart=/usr/bin/python3 /Users/melchiorreimers/.gemini/antigravity/scratch/trading_daemon/daemon.py
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,9 @@
[Unit]
Description=Timer for Trading Data Fetcher
[Timer]
OnCalendar=*-*-* *:00:00
Persistent=true
[Install]
WantedBy=timers.target