feat: move daemon to docker with daily schedule and deduplication
All checks were successful
Deployment / deploy-docker (push) Successful in 17s
All checks were successful
Deployment / deploy-docker (push) Successful in 17s
This commit is contained in:
@@ -6,37 +6,27 @@ on:
|
|||||||
- main
|
- main
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-to-host:
|
deploy-docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Deploy via SSH
|
- name: Deploy via SSH
|
||||||
uses: appleboy/ssh-action@v0.1.10
|
uses: appleboy/ssh-action@v0.1.10
|
||||||
with:
|
with:
|
||||||
host: 172.17.0.1 # Standard-IP des Docker-Hosts aus Sicht des Containers
|
host: 172.17.0.1
|
||||||
username: root # Dein Deployment-Nutzer
|
username: root
|
||||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
script: |
|
script: |
|
||||||
mkdir -p /root/docker-files/trading-daemon
|
mkdir -p /root/docker-files/trading-daemon
|
||||||
cd /root/docker-files/trading-daemon
|
cd /root/docker-files/trading-daemon
|
||||||
|
|
||||||
# Repository klonen oder pullen
|
# Repository aktualisieren
|
||||||
if [ ! -d ".git" ]; then
|
if [ ! -d ".git" ]; then
|
||||||
git clone https://git.bana.space/krumbelfix/trading-daemon.git .
|
git clone https://git.bana.space/krumbelfix/trading-daemon.git .
|
||||||
else
|
else
|
||||||
git pull origin main
|
git pull origin main
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Virtual Environment aktualisieren
|
# Docker Container neu bauen und starten
|
||||||
python3 -m venv venv
|
docker-compose up -d --build
|
||||||
./venv/bin/pip install -r requirements.txt
|
|
||||||
|
|
||||||
# Systemd Update
|
echo "Deployment abgeschlossen. Container laufen."
|
||||||
cp systemd/trading-daemon.service /etc/systemd/system/
|
|
||||||
cp systemd/trading-daemon.timer /etc/systemd/system/
|
|
||||||
systemctl daemon-reload
|
|
||||||
|
|
||||||
# Neustart
|
|
||||||
systemctl restart trading-daemon.timer
|
|
||||||
systemctl start trading-daemon.service
|
|
||||||
|
|
||||||
echo "Deployment auf dem Host-System erfolgreich."
|
|
||||||
|
|||||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Wir nutzen einfach ein kurzes Script oder lassen den Daemon in einem Loop laufen.
|
||||||
|
# Da der User "täglich um 23h" möchte, bauen wir die Zeitsteuerung in den Daemon ein
|
||||||
|
# ODER wir nutzen einen einfachen Sleep-Loop.
|
||||||
|
|
||||||
|
CMD ["python", "daemon.py"]
|
||||||
71
daemon.py
71
daemon.py
@@ -1,5 +1,8 @@
|
|||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
from src.exchanges.eix import EIXExchange
|
from src.exchanges.eix import EIXExchange
|
||||||
from src.exchanges.ls import LSExchange
|
from src.exchanges.ls import LSExchange
|
||||||
from src.database.questdb_client import DatabaseClient
|
from src.database.questdb_client import DatabaseClient
|
||||||
@@ -10,30 +13,74 @@ logging.basicConfig(
|
|||||||
)
|
)
|
||||||
logger = logging.getLogger("TradingDaemon")
|
logger = logging.getLogger("TradingDaemon")
|
||||||
|
|
||||||
def main():
|
DB_USER = os.getenv("DB_USER", "admin")
|
||||||
logger.info("Starting Trading Data Fetcher")
|
DB_PASSWORD = os.getenv("DB_PASSWORD", "quest")
|
||||||
|
DB_AUTH = (DB_USER, DB_PASSWORD) if DB_USER and DB_PASSWORD else None
|
||||||
|
|
||||||
# Initialize components
|
def get_last_trade_timestamp(db_url, exchange_name):
|
||||||
|
# QuestDB query: get the latest timestamp for a specific exchange
|
||||||
|
query = f"trades where exchange = '{exchange_name}' latest by timestamp"
|
||||||
|
try:
|
||||||
|
# Using the /exec endpoint to get data
|
||||||
|
response = requests.get(f"{db_url}/exec", params={'query': query}, auth=DB_AUTH)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
if data['dataset']:
|
||||||
|
# QuestDB returns timestamp in micros since epoch by default in some views, or ISO
|
||||||
|
# Let's assume the timestamp is in the dataset
|
||||||
|
# ILP timestamps are stored as designated timestamps.
|
||||||
|
ts_value = data['dataset'][0][0] # Adjust index based on column order
|
||||||
|
if isinstance(ts_value, str):
|
||||||
|
return datetime.datetime.fromisoformat(ts_value.replace('Z', '+00:00'))
|
||||||
|
else:
|
||||||
|
return datetime.datetime.fromtimestamp(ts_value / 1000000, tz=datetime.timezone.utc)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"No existing data for {exchange_name} or DB unreachable: {e}")
|
||||||
|
return datetime.datetime.min.replace(tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
|
def run_task():
|
||||||
|
logger.info("Starting Trading Data Fetcher task...")
|
||||||
exchanges = [
|
exchanges = [
|
||||||
EIXExchange(),
|
EIXExchange(),
|
||||||
LSExchange()
|
LSExchange()
|
||||||
]
|
]
|
||||||
db = DatabaseClient()
|
db = DatabaseClient(host="questdb", user=DB_USER, password=DB_PASSWORD)
|
||||||
|
|
||||||
# Process each exchange
|
|
||||||
for exchange in exchanges:
|
for exchange in exchanges:
|
||||||
try:
|
try:
|
||||||
logger.info(f"Fetching data from {exchange.name}...")
|
db_url = "http://questdb:9000"
|
||||||
trades = exchange.fetch_latest_trades()
|
last_ts = get_last_trade_timestamp(db_url, exchange.name)
|
||||||
logger.info(f"Fetched {len(trades)} trades from {exchange.name}.")
|
|
||||||
|
|
||||||
if trades:
|
logger.info(f"Fetching data from {exchange.name} (Filtering trades older than {last_ts})...")
|
||||||
db.save_trades(trades)
|
trades = exchange.fetch_latest_trades()
|
||||||
logger.info(f"Stored {len(trades)} trades in database.")
|
|
||||||
|
# Deduplizierung: Nur Trades nehmen, die neuer sind als der letzte in der DB
|
||||||
|
new_trades = [
|
||||||
|
t for t in trades
|
||||||
|
if t.timestamp.replace(tzinfo=datetime.timezone.utc) > last_ts.replace(tzinfo=datetime.timezone.utc)
|
||||||
|
]
|
||||||
|
|
||||||
|
logger.info(f"Found {len(trades)} total trades, {len(new_trades)} are new.")
|
||||||
|
|
||||||
|
if new_trades:
|
||||||
|
db.save_trades(new_trades)
|
||||||
|
logger.info(f"Stored {len(new_trades)} new trades in QuestDB.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error processing exchange {exchange.name}: {e}")
|
logger.error(f"Error processing exchange {exchange.name}: {e}")
|
||||||
|
|
||||||
logger.info("Fetching cycle complete.")
|
def main():
|
||||||
|
logger.info("Trading Daemon started. Waiting for 23:00 to run task.")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
# Täglich um 23:00 Uhr
|
||||||
|
if now.hour == 23 and now.minute == 0:
|
||||||
|
run_task()
|
||||||
|
# Warte 61s, um Mehrfachausführung in derselben Minute zu verhindern
|
||||||
|
time.sleep(61)
|
||||||
|
|
||||||
|
# Check alle 30 Sekunden
|
||||||
|
time.sleep(30)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -10,6 +10,16 @@ services:
|
|||||||
- "9009:9009"
|
- "9009:9009"
|
||||||
volumes:
|
volumes:
|
||||||
- questdb_data:/root/.questdb
|
- questdb_data:/root/.questdb
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
fetcher:
|
||||||
|
build: .
|
||||||
|
container_name: trading_fetcher
|
||||||
|
depends_on:
|
||||||
|
- questdb
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
questdb_data:
|
questdb_data:
|
||||||
|
|||||||
@@ -4,10 +4,11 @@ from typing import List
|
|||||||
from ..exchanges.base import Trade
|
from ..exchanges.base import Trade
|
||||||
|
|
||||||
class DatabaseClient:
|
class DatabaseClient:
|
||||||
def __init__(self, host: str = "localhost", port: int = 9000):
|
def __init__(self, host: str = "localhost", port: int = 9000, user: str = None, password: str = None):
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
self.url = f"http://{host}:{port}/write"
|
self.url = f"http://{host}:{port}/write"
|
||||||
|
self.auth = (user, password) if user and password else None
|
||||||
|
|
||||||
def save_trades(self, trades: List[Trade]):
|
def save_trades(self, trades: List[Trade]):
|
||||||
if not trades:
|
if not trades:
|
||||||
@@ -15,10 +16,6 @@ class DatabaseClient:
|
|||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
for trade in trades:
|
for trade in trades:
|
||||||
# QuestDB Influx Line Protocol format:
|
|
||||||
# table_name,tag1=val1,tag2=val2 field1=val1,field2=val2 timestamp
|
|
||||||
# We use microseconds for timestamp (nanoseconds is standard for ILP)
|
|
||||||
|
|
||||||
# Clean symbols for ILP
|
# Clean symbols for ILP
|
||||||
symbol = trade.symbol.replace(" ", "\\ ").replace(",", "\\,")
|
symbol = trade.symbol.replace(" ", "\\ ").replace(",", "\\,")
|
||||||
exchange = trade.exchange
|
exchange = trade.exchange
|
||||||
@@ -31,8 +28,13 @@ class DatabaseClient:
|
|||||||
payload = "\n".join(lines) + "\n"
|
payload = "\n".join(lines) + "\n"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(self.url, data=payload, params={'precision': 'ns'})
|
response = requests.post(
|
||||||
if response.status_code != 204:
|
self.url,
|
||||||
|
data=payload,
|
||||||
|
params={'precision': 'ns'},
|
||||||
|
auth=self.auth
|
||||||
|
)
|
||||||
|
if response.status_code not in [204, 200]:
|
||||||
print(f"Error saving to QuestDB: {response.text}")
|
print(f"Error saving to QuestDB: {response.text}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Could not connect to QuestDB at {self.url}: {e}")
|
print(f"Could not connect to QuestDB at {self.url}: {e}")
|
||||||
|
|||||||
Reference in New Issue
Block a user