670 lines
29 KiB
Plaintext
670 lines
29 KiB
Plaintext
|
|
#!/usr/bin/env python3
|
|||
|
|
"""
|
|||
|
|
DOLPHIN Nautilus Event-Driven Trader
|
|||
|
|
"""
|
|||
|
|
import sys
|
|||
|
|
import json
|
|||
|
|
import math
|
|||
|
|
import time
|
|||
|
|
import signal
|
|||
|
|
import threading
|
|||
|
|
from concurrent.futures import ThreadPoolExecutor
|
|||
|
|
from datetime import datetime, timezone
|
|||
|
|
from pathlib import Path
|
|||
|
|
from collections import deque
|
|||
|
|
|
|||
|
|
# Stablecoins / pegged assets that must never be traded
|
|||
|
|
_STABLECOIN_SYMBOLS = frozenset({
|
|||
|
|
'USDCUSDT', 'BUSDUSDT', 'FDUSDUSDT', 'USDTUSDT', 'TUSDUSDT',
|
|||
|
|
'DAIUSDT', 'FRAXUSDT', 'USDDUSDT', 'USTCUSDT', 'EURUSDT',
|
|||
|
|
})
|
|||
|
|
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict')
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict/nautilus_dolphin')
|
|||
|
|
|
|||
|
|
from nautilus_dolphin.nautilus.proxy_boost_engine import create_d_liq_engine
|
|||
|
|
from nautilus_dolphin.nautilus.adaptive_circuit_breaker import AdaptiveCircuitBreaker
|
|||
|
|
from nautilus_dolphin.nautilus.ob_features import OBFeatureEngine
|
|||
|
|
from nautilus_dolphin.nautilus.ob_provider import MockOBProvider
|
|||
|
|
|
|||
|
|
try:
|
|||
|
|
from ch_writer import ch_put, ts_us as _ch_ts_us
|
|||
|
|
except ImportError:
|
|||
|
|
def ch_put(*a, **kw): pass
|
|||
|
|
def _ch_ts_us(): return 0
|
|||
|
|
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict/prod')
|
|||
|
|
from dolphin_exit_handler import install_exit_handler
|
|||
|
|
install_exit_handler("nautilus_trader")
|
|||
|
|
|
|||
|
|
HZ_CLUSTER = "dolphin"
|
|||
|
|
HZ_HOST = "127.0.0.1:5701"
|
|||
|
|
EIGEN_DIR = Path('/mnt/dolphinng6_data/eigenvalues')
|
|||
|
|
|
|||
|
|
CAPITAL_DISK_CHECKPOINT = Path("/tmp/dolphin_capital_checkpoint.json")
|
|||
|
|
|
|||
|
|
ENGINE_KWARGS = dict(
|
|||
|
|
initial_capital=25000.0, vel_div_threshold=-0.02, vel_div_extreme=-0.05,
|
|||
|
|
min_leverage=0.5, max_leverage=8.0, # note: create_d_liq_engine overrides to D_LIQ_SOFT_CAP=8.0
|
|||
|
|
leverage_convexity=3.0,
|
|||
|
|
fraction=0.20, fixed_tp_pct=0.0095, stop_pct=1.0, max_hold_bars=250, # gold spec: 250
|
|||
|
|
use_direction_confirm=True, dc_lookback_bars=7, dc_min_magnitude_bps=0.75,
|
|||
|
|
dc_skip_contradicts=True, dc_leverage_boost=1.0, dc_leverage_reduce=0.5,
|
|||
|
|
use_asset_selection=True, min_irp_alignment=0.0, # gold spec: no IRP filter
|
|||
|
|
use_sp_fees=True, use_sp_slippage=True,
|
|||
|
|
sp_maker_entry_rate=0.62, sp_maker_exit_rate=0.50,
|
|||
|
|
use_ob_edge=True, ob_edge_bps=5.0, ob_confirm_rate=0.40,
|
|||
|
|
lookback=100, use_alpha_layers=True, use_dynamic_leverage=True, seed=42,
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
BTC_VOL_WINDOW = 50
|
|||
|
|
# Gold-calibrated from full 5-year BTC history: 0.00026414 (stricter, ~2.7x tighter).
|
|||
|
|
# 2026-04-07: switched to 56-day gold window value (0.00009868) — the exact threshold
|
|||
|
|
# used in the T=2155 ROI=+189% backtest. More permissive; paper trading to gather data.
|
|||
|
|
VOL_P60_THRESHOLD = 0.00009868
|
|||
|
|
|
|||
|
|
# Algorithm Versioning
|
|||
|
|
# v1_shakedown: v50-v150 (noise bug), loose vol gate
|
|||
|
|
# v2_gold_fix: CORRECTED v50-v750 macro divergence (matches parquet backtest)
|
|||
|
|
ALGO_VERSION = "v2_gold_fix_v50-v750"
|
|||
|
|
|
|||
|
|
# Persistent, version-tagged trade log (survives reboots; sorts by date)
|
|||
|
|
_LOG_DIR = "/mnt/dolphinng5_predict/prod/logs"
|
|||
|
|
import os as _os; _os.makedirs(_LOG_DIR, exist_ok=True)
|
|||
|
|
_LOG_DATE = datetime.now(timezone.utc).strftime("%Y%m%d")
|
|||
|
|
TRADE_LOG = f"{_LOG_DIR}/nautilus_trader_{_LOG_DATE}_{ALGO_VERSION}.log"
|
|||
|
|
running = True
|
|||
|
|
|
|||
|
|
def log(msg):
|
|||
|
|
ts = datetime.now(timezone.utc).isoformat()
|
|||
|
|
line = f"[{ts}] {msg}"
|
|||
|
|
print(line, flush=True)
|
|||
|
|
with open(TRADE_LOG, 'a') as f:
|
|||
|
|
f.write(line + '\n')
|
|||
|
|
|
|||
|
|
class DolphinLiveTrader:
|
|||
|
|
def __init__(self):
|
|||
|
|
self.eng = None
|
|||
|
|
self.hz_client = None
|
|||
|
|
self.features_map = None
|
|||
|
|
self.safety_map = None
|
|||
|
|
self.pnl_map = None
|
|||
|
|
self.state_map = None
|
|||
|
|
self.heartbeat_map = None
|
|||
|
|
self.eng_lock = threading.Lock()
|
|||
|
|
self._dedup_lock = threading.Lock() # guards atomic check-and-set on last_scan_number
|
|||
|
|
self._scan_executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="scan")
|
|||
|
|
self.last_scan_number = -1
|
|||
|
|
self.last_file_mtime = 0
|
|||
|
|
self.bar_idx = 0
|
|||
|
|
self.current_day = None
|
|||
|
|
self.trades_executed = 0
|
|||
|
|
self.scans_processed = 0
|
|||
|
|
self.btc_prices = deque(maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
self.cached_posture = "APEX"
|
|||
|
|
self.posture_cache_time = 0
|
|||
|
|
self.ob_assets = []
|
|||
|
|
self.ob_eng = None
|
|||
|
|
self.acb = None
|
|||
|
|
self.last_w750_vel = None
|
|||
|
|
self._pending_entries: dict = {} # trade_id → entry snapshot (for CH trade_events)
|
|||
|
|
self._exf_log_time = 0.0 # throttle for on_exf_update logging
|
|||
|
|
|
|||
|
|
def _build_engine(self):
|
|||
|
|
log("Building NDAlphaEngine...")
|
|||
|
|
self.eng = create_d_liq_engine(**ENGINE_KWARGS)
|
|||
|
|
log(f" Engine: {type(self.eng).__name__}")
|
|||
|
|
log(f" Leverage: soft={self.eng.base_max_leverage}x abs={self.eng.abs_max_leverage}x")
|
|||
|
|
|
|||
|
|
if EIGEN_DIR.exists():
|
|||
|
|
try:
|
|||
|
|
date_strings = sorted([d.name for d in EIGEN_DIR.iterdir() if d.is_dir()])
|
|||
|
|
self.acb = AdaptiveCircuitBreaker()
|
|||
|
|
self.acb.preload_w750(date_strings)
|
|||
|
|
self.eng.set_acb(self.acb)
|
|||
|
|
log(" ACBv6: loaded")
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" ACBv6: {e}")
|
|||
|
|
else:
|
|||
|
|
self.acb = AdaptiveCircuitBreaker()
|
|||
|
|
self.eng.set_acb(self.acb)
|
|||
|
|
log(" ACBv6: loaded (no preload dates)")
|
|||
|
|
|
|||
|
|
self.eng.set_esoteric_hazard_multiplier(0.0) # gold spec: init guard, MUST precede set_mc_forewarner
|
|||
|
|
log(f" Hazard: set_esoteric_hazard_multiplier(0.0) — soft={self.eng.base_max_leverage}x")
|
|||
|
|
|
|||
|
|
MC_MODELS_DIR = '/mnt/dolphinng5_predict/nautilus_dolphin/mc_results/models'
|
|||
|
|
MC_BASE_CFG = {
|
|||
|
|
'trial_id': 0, 'vel_div_threshold': -0.020, 'vel_div_extreme': -0.050,
|
|||
|
|
'use_direction_confirm': True, 'dc_lookback_bars': 7,
|
|||
|
|
'dc_min_magnitude_bps': 0.75, 'dc_skip_contradicts': True,
|
|||
|
|
'dc_leverage_boost': 1.00, 'dc_leverage_reduce': 0.50,
|
|||
|
|
'vd_trend_lookback': 10, 'min_leverage': 0.50, 'max_leverage': 8.00, # gold spec
|
|||
|
|
'leverage_convexity': 3.00, 'fraction': 0.20, 'use_alpha_layers': True,
|
|||
|
|
'use_dynamic_leverage': True, 'fixed_tp_pct': 0.0095, 'stop_pct': 1.00,
|
|||
|
|
'max_hold_bars': 250, 'use_sp_fees': True, 'use_sp_slippage': True, # gold spec
|
|||
|
|
'sp_maker_entry_rate': 0.62, 'sp_maker_exit_rate': 0.50,
|
|||
|
|
'use_ob_edge': True, 'ob_edge_bps': 5.00, 'ob_confirm_rate': 0.40,
|
|||
|
|
'ob_imbalance_bias': -0.09, 'ob_depth_scale': 1.00,
|
|||
|
|
'use_asset_selection': True, 'min_irp_alignment': 0.0, 'lookback': 100, # gold spec
|
|||
|
|
'acb_beta_high': 0.80, 'acb_beta_low': 0.20, 'acb_w750_threshold_pct': 60,
|
|||
|
|
}
|
|||
|
|
if Path(MC_MODELS_DIR).exists():
|
|||
|
|
try:
|
|||
|
|
from mc.mc_ml import DolphinForewarner
|
|||
|
|
forewarner = DolphinForewarner(models_dir=MC_MODELS_DIR)
|
|||
|
|
self.eng.set_mc_forewarner(forewarner, MC_BASE_CFG)
|
|||
|
|
log(" MC-Forewarner: wired")
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" MC-Forewarner: {e}")
|
|||
|
|
|
|||
|
|
def _connect_hz(self):
|
|||
|
|
log("Connecting to Hazelcast...")
|
|||
|
|
import hazelcast
|
|||
|
|
self.hz_client = hazelcast.HazelcastClient(cluster_name=HZ_CLUSTER, cluster_members=[HZ_HOST])
|
|||
|
|
self.features_map = self.hz_client.get_map("DOLPHIN_FEATURES")
|
|||
|
|
self.safety_map = self.hz_client.get_map("DOLPHIN_SAFETY")
|
|||
|
|
self.pnl_map = self.hz_client.get_map("DOLPHIN_PNL_BLUE")
|
|||
|
|
self.state_map = self.hz_client.get_map("DOLPHIN_STATE_BLUE")
|
|||
|
|
self.heartbeat_map = self.hz_client.get_map("DOLPHIN_HEARTBEAT")
|
|||
|
|
# Immediate heartbeat — prevents Cat1=0 during startup gap
|
|||
|
|
try:
|
|||
|
|
self.heartbeat_map.blocking().put('nautilus_flow_heartbeat', json.dumps({
|
|||
|
|
'ts': time.time(),
|
|||
|
|
'iso': datetime.now(timezone.utc).isoformat(),
|
|||
|
|
'phase': 'starting',
|
|||
|
|
'flow': 'nautilus_event_trader',
|
|||
|
|
}))
|
|||
|
|
except Exception:
|
|||
|
|
pass
|
|||
|
|
log(" Hz connected")
|
|||
|
|
|
|||
|
|
def _read_posture(self):
|
|||
|
|
now = time.time()
|
|||
|
|
if now - self.posture_cache_time < 10:
|
|||
|
|
return self.cached_posture
|
|||
|
|
try:
|
|||
|
|
posture_raw = self.safety_map.blocking().get("latest") or self.safety_map.blocking().get("posture")
|
|||
|
|
if posture_raw:
|
|||
|
|
if isinstance(posture_raw, str):
|
|||
|
|
try:
|
|||
|
|
parsed = json.loads(posture_raw)
|
|||
|
|
self.cached_posture = parsed.get("posture", posture_raw)
|
|||
|
|
except (json.JSONDecodeError, AttributeError):
|
|||
|
|
self.cached_posture = posture_raw
|
|||
|
|
else:
|
|||
|
|
self.cached_posture = posture_raw.get("posture", "APEX")
|
|||
|
|
self.posture_cache_time = now
|
|||
|
|
except:
|
|||
|
|
pass
|
|||
|
|
return self.cached_posture
|
|||
|
|
|
|||
|
|
def _rollover_day(self):
|
|||
|
|
today = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
|||
|
|
if today == self.current_day:
|
|||
|
|
return
|
|||
|
|
posture = self._read_posture()
|
|||
|
|
with self.eng_lock:
|
|||
|
|
if today != self.current_day: # double-checked: only one thread calls begin_day
|
|||
|
|
if getattr(self, 'acb', None):
|
|||
|
|
try:
|
|||
|
|
exf_raw = self.features_map.blocking().get('exf_latest') if self.features_map else None
|
|||
|
|
es_raw = self.features_map.blocking().get('latest_eigen_scan') if self.features_map else None
|
|||
|
|
|
|||
|
|
exf_snapshot = json.loads(exf_raw) if isinstance(exf_raw, str) else (exf_raw or {})
|
|||
|
|
eigen_scan = json.loads(es_raw) if isinstance(es_raw, str) else (es_raw or {})
|
|||
|
|
|
|||
|
|
w750_vel = eigen_scan.get('w750_velocity', 0.0)
|
|||
|
|
|
|||
|
|
if exf_snapshot:
|
|||
|
|
self.acb.get_dynamic_boost_from_hz(
|
|||
|
|
date_str=today,
|
|||
|
|
exf_snapshot=exf_snapshot,
|
|||
|
|
w750_velocity=float(w750_vel) if w750_vel else None
|
|||
|
|
)
|
|||
|
|
log(f"ACB: Pre-warmed cache for {today} from HZ")
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f"ACB Rollover Error: {e}")
|
|||
|
|
|
|||
|
|
self.eng.begin_day(today, posture=posture)
|
|||
|
|
self.bar_idx = 0
|
|||
|
|
self.current_day = today
|
|||
|
|
log(f"begin_day({today}) called with posture={posture}")
|
|||
|
|
|
|||
|
|
def _compute_vol_ok(self, scan):
|
|||
|
|
assets = scan.get('assets', [])
|
|||
|
|
prices = scan.get('asset_prices', [])
|
|||
|
|
if not assets or not prices:
|
|||
|
|
return True
|
|||
|
|
prices_dict = dict(zip(assets, prices))
|
|||
|
|
btc_price = prices_dict.get('BTCUSDT')
|
|||
|
|
if btc_price is None:
|
|||
|
|
return True
|
|||
|
|
self.btc_prices.append(float(btc_price))
|
|||
|
|
if len(self.btc_prices) < BTC_VOL_WINDOW:
|
|||
|
|
return True
|
|||
|
|
import numpy as np
|
|||
|
|
arr = np.array(self.btc_prices)
|
|||
|
|
dvol = float(np.std(np.diff(arr) / arr[:-1]))
|
|||
|
|
return dvol > VOL_P60_THRESHOLD
|
|||
|
|
|
|||
|
|
@staticmethod
|
|||
|
|
def _normalize_ng7(scan: dict) -> dict:
|
|||
|
|
"""
|
|||
|
|
Promote NG7-format scan to NG5-compatible flat dict.
|
|||
|
|
NG7 embeds eigenvalue windows and prices inside result{} — the engine
|
|||
|
|
expects flat top-level fields. Mapping derived from continuous_convert.py:
|
|||
|
|
vel_div = w50_velocity − w750_velocity (fast minus slow eigenvalue velocity)
|
|||
|
|
w50_velocity = multi_window_results["50"].tracking_data.lambda_max_velocity
|
|||
|
|
w750_velocity = multi_window_results["750"].tracking_data.lambda_max_velocity
|
|||
|
|
assets = sorted(current_prices.keys()), BTCUSDT always last
|
|||
|
|
"""
|
|||
|
|
result = scan.get('result') or {}
|
|||
|
|
mw = result.get('multi_window_results') or {}
|
|||
|
|
|
|||
|
|
def _vel(win):
|
|||
|
|
v = (mw.get(str(win)) or {}).get('tracking_data', {}).get('lambda_max_velocity')
|
|||
|
|
try:
|
|||
|
|
f = float(v)
|
|||
|
|
return f if math.isfinite(f) else 0.0
|
|||
|
|
except (TypeError, ValueError):
|
|||
|
|
return 0.0
|
|||
|
|
|
|||
|
|
v50 = _vel(50)
|
|||
|
|
v150 = _vel(150)
|
|||
|
|
v750 = _vel(750)
|
|||
|
|
|
|||
|
|
cp = (result.get('pricing_data') or {}).get('current_prices') or {}
|
|||
|
|
assets = [a for a in cp if a != 'BTCUSDT']
|
|||
|
|
if 'BTCUSDT' in cp:
|
|||
|
|
assets.append('BTCUSDT') # BTC always last — matches NG5/Arrow convention
|
|||
|
|
prices = [float(cp[a]) for a in assets]
|
|||
|
|
|
|||
|
|
instability = float((result.get('regime_prediction') or {})
|
|||
|
|
.get('instability_score') or 0.0)
|
|||
|
|
|
|||
|
|
return {
|
|||
|
|
**scan,
|
|||
|
|
'vel_div': v50 - v750,
|
|||
|
|
'w50_velocity': v50,
|
|||
|
|
'w750_velocity': v750,
|
|||
|
|
'assets': assets,
|
|||
|
|
'asset_prices': prices,
|
|||
|
|
'instability_50': instability,
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
def on_scan(self, event):
|
|||
|
|
"""Reactor-thread entry point — dispatches immediately to worker thread."""
|
|||
|
|
if not event.value:
|
|||
|
|
return
|
|||
|
|
listener_time = time.time()
|
|||
|
|
self._scan_executor.submit(self._process_scan, event, listener_time)
|
|||
|
|
|
|||
|
|
def _process_scan(self, event, listener_time):
|
|||
|
|
try:
|
|||
|
|
if not event.value:
|
|||
|
|
return
|
|||
|
|
|
|||
|
|
scan = json.loads(event.value) if isinstance(event.value, str) else event.value
|
|||
|
|
|
|||
|
|
# Normalise NG7 format → NG5-compatible flat dict before any field access
|
|||
|
|
if scan.get('version') == 'NG7':
|
|||
|
|
scan = self._normalize_ng7(scan)
|
|||
|
|
|
|||
|
|
scan_number = int(scan.get('scan_number') or 0)
|
|||
|
|
|
|||
|
|
# Dedup: scan_number is authoritative (monotonically increasing).
|
|||
|
|
# file_mtime / timestamp are unreliable across NG7 restart probes.
|
|||
|
|
with self._dedup_lock:
|
|||
|
|
if scan_number > 0 and scan_number <= self.last_scan_number:
|
|||
|
|
return
|
|||
|
|
self.last_scan_number = scan_number
|
|||
|
|
self.scans_processed += 1
|
|||
|
|
|
|||
|
|
self._rollover_day()
|
|||
|
|
|
|||
|
|
assets = scan.get('assets') or []
|
|||
|
|
if assets and not self.ob_assets:
|
|||
|
|
self._wire_obf(assets)
|
|||
|
|
|
|||
|
|
prices = scan.get('asset_prices') or []
|
|||
|
|
if assets and prices and len(assets) != len(prices):
|
|||
|
|
log(f"WARN scan #{scan_number}: assets/prices mismatch "
|
|||
|
|
f"({len(assets)}≠{len(prices)}) — dropped")
|
|||
|
|
return
|
|||
|
|
prices_dict = dict(zip(assets, prices)) if assets and prices else {}
|
|||
|
|
# Remove stablecoins — they should never be selected as a trade asset
|
|||
|
|
for sym in _STABLECOIN_SYMBOLS:
|
|||
|
|
prices_dict.pop(sym, None)
|
|||
|
|
|
|||
|
|
vol_ok = self._compute_vol_ok(scan)
|
|||
|
|
|
|||
|
|
vel_div = float(scan.get('vel_div') or 0.0)
|
|||
|
|
if not math.isfinite(vel_div):
|
|||
|
|
log(f"WARN scan #{scan_number}: non-finite vel_div={vel_div} — clamped to 0.0")
|
|||
|
|
vel_div = 0.0
|
|||
|
|
|
|||
|
|
v50_vel = float(scan.get('w50_velocity') or 0.0)
|
|||
|
|
v750_vel = float(scan.get('w750_velocity') or 0.0)
|
|||
|
|
if not math.isfinite(v50_vel): v50_vel = 0.0
|
|||
|
|
if not math.isfinite(v750_vel): v750_vel = 0.0
|
|||
|
|
self.last_w750_vel = v750_vel
|
|||
|
|
|
|||
|
|
# Feed live OB data into OBF engine for this bar (AGENT_SPEC_OBF_LIVE_SWITCHOVER)
|
|||
|
|
if self.ob_eng is not None and self.ob_assets:
|
|||
|
|
self.ob_eng.step_live(self.ob_assets, self.bar_idx)
|
|||
|
|
|
|||
|
|
# Live posture sync — update engine posture + regime_dd_halt together
|
|||
|
|
posture_now = self._read_posture()
|
|||
|
|
with self.eng_lock:
|
|||
|
|
prev_posture = getattr(self.eng, '_day_posture', 'APEX')
|
|||
|
|
if posture_now != prev_posture:
|
|||
|
|
self.eng._day_posture = posture_now
|
|||
|
|
if posture_now in ('TURTLE', 'HIBERNATE'):
|
|||
|
|
self.eng.regime_dd_halt = True
|
|||
|
|
log(f"POSTURE_SYNC: {posture_now} — halt set")
|
|||
|
|
else:
|
|||
|
|
self.eng.regime_dd_halt = False
|
|||
|
|
log(f"POSTURE_SYNC: {posture_now} — halt lifted")
|
|||
|
|
|
|||
|
|
step_start = time.time()
|
|||
|
|
with self.eng_lock:
|
|||
|
|
result = self.eng.step_bar(
|
|||
|
|
bar_idx=self.bar_idx, vel_div=vel_div, prices=prices_dict,
|
|||
|
|
vol_regime_ok=vol_ok, v50_vel=v50_vel, v750_vel=v750_vel
|
|||
|
|
)
|
|||
|
|
self.bar_idx += 1
|
|||
|
|
scan_to_fill_ms = (time.time() - listener_time) * 1000
|
|||
|
|
step_bar_ms = (time.time() - step_start) * 1000
|
|||
|
|
log(f"LATENCY scan #{scan_number}: scan→fill={scan_to_fill_ms:.1f}ms step_bar={step_bar_ms:.1f}ms vel_div={vel_div:.5f}")
|
|||
|
|
|
|||
|
|
ch_put("eigen_scans", {
|
|||
|
|
"ts": _ch_ts_us(),
|
|||
|
|
"scan_number": scan_number,
|
|||
|
|
"scan_uuid": str(scan.get("scan_uuid") or ""),
|
|||
|
|
"vel_div": vel_div,
|
|||
|
|
"w50_velocity": v50_vel,
|
|||
|
|
"w750_velocity": v750_vel,
|
|||
|
|
"instability_50": float(scan.get("instability_50") or 0.0),
|
|||
|
|
"scan_to_fill_ms": scan_to_fill_ms,
|
|||
|
|
"step_bar_ms": step_bar_ms,
|
|||
|
|
})
|
|||
|
|
|
|||
|
|
if result.get('entry'):
|
|||
|
|
self.trades_executed += 1
|
|||
|
|
e = result['entry']
|
|||
|
|
log(f"ENTRY: {e} [{ALGO_VERSION}]")
|
|||
|
|
# Cache entry fields for CH trade_events on exit
|
|||
|
|
tid = e.get('trade_id')
|
|||
|
|
if tid:
|
|||
|
|
self._pending_entries[tid] = {
|
|||
|
|
'asset': e.get('asset', ''),
|
|||
|
|
'side': 'SHORT' if e.get('direction', -1) == -1 else 'LONG',
|
|||
|
|
'entry_price': float(e.get('entry_price', 0) or 0),
|
|||
|
|
'quantity': round(float(e.get('notional', 0) or 0) / float(e.get('entry_price', 1) or 1), 6),
|
|||
|
|
'leverage': float(e.get('leverage', 0) or 0),
|
|||
|
|
'vel_div_entry': float(e.get('vel_div', 0) or 0),
|
|||
|
|
'boost_at_entry': float(getattr(getattr(self, 'eng', None), 'acb_boost', 1.0) or 1.0),
|
|||
|
|
'beta_at_entry': float(getattr(getattr(self, 'eng', None), 'acb_beta', 1.0) or 1.0),
|
|||
|
|
'posture': posture_now,
|
|||
|
|
'entry_ts': _ch_ts_us(),
|
|||
|
|
'entry_date': (self.current_day or ''),
|
|||
|
|
}
|
|||
|
|
if result.get('exit'):
|
|||
|
|
x = result['exit']
|
|||
|
|
log(f"EXIT: {x} [{ALGO_VERSION}]")
|
|||
|
|
tid = x.get('trade_id')
|
|||
|
|
pending = self._pending_entries.pop(tid, {}) if tid else {}
|
|||
|
|
if pending:
|
|||
|
|
# exact bar price the engine exited against — prices_dict is still in scope
|
|||
|
|
exit_price = float(prices_dict.get(pending['asset'], 0) or 0)
|
|||
|
|
ch_put("trade_events", {
|
|||
|
|
"ts": _ch_ts_us(),
|
|||
|
|
"date": pending['entry_date'],
|
|||
|
|
"strategy": "blue",
|
|||
|
|
"asset": pending['asset'],
|
|||
|
|
"side": pending['side'],
|
|||
|
|
"entry_price": pending['entry_price'],
|
|||
|
|
"exit_price": exit_price,
|
|||
|
|
"quantity": pending['quantity'],
|
|||
|
|
"pnl": float(x.get('net_pnl', 0) or 0),
|
|||
|
|
"pnl_pct": float(x.get('pnl_pct', 0) or 0),
|
|||
|
|
"exit_reason": str(x.get('reason', 'UNKNOWN')),
|
|||
|
|
"vel_div_entry": pending['vel_div_entry'],
|
|||
|
|
"boost_at_entry": pending['boost_at_entry'],
|
|||
|
|
"beta_at_entry": pending['beta_at_entry'],
|
|||
|
|
"posture": pending['posture'],
|
|||
|
|
"leverage": pending['leverage'],
|
|||
|
|
"bars_held": int(x.get('bars_held', 0) or 0),
|
|||
|
|
"regime_signal": 0,
|
|||
|
|
})
|
|||
|
|
|
|||
|
|
self._push_state(scan_number, vel_div, vol_ok, self._read_posture())
|
|||
|
|
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f"ERROR in _process_scan: {e}")
|
|||
|
|
|
|||
|
|
def on_exf_update(self, event):
|
|||
|
|
if not event.value: return
|
|||
|
|
snapshot = json.loads(event.value) if isinstance(event.value, str) else event.value
|
|||
|
|
if not self.current_day or not self.acb: return
|
|||
|
|
try:
|
|||
|
|
w750_vel = getattr(self, 'last_w750_vel', None)
|
|||
|
|
acb_info = self.acb.get_dynamic_boost_from_hz(
|
|||
|
|
date_str=self.current_day,
|
|||
|
|
exf_snapshot=snapshot,
|
|||
|
|
w750_velocity=float(w750_vel) if w750_vel else None
|
|||
|
|
)
|
|||
|
|
with self.eng_lock:
|
|||
|
|
if hasattr(self.eng, 'update_acb_boost'):
|
|||
|
|
self.eng.update_acb_boost(
|
|||
|
|
boost=acb_info['boost'],
|
|||
|
|
beta=acb_info['beta']
|
|||
|
|
)
|
|||
|
|
now = time.time()
|
|||
|
|
if now - self._exf_log_time >= 300:
|
|||
|
|
self._exf_log_time = now
|
|||
|
|
log(f"ACB subday: boost={acb_info['boost']:.4f} beta={acb_info['beta']:.4f} "
|
|||
|
|
f"signals={acb_info['signals']:.1f} src={acb_info.get('source','?')}")
|
|||
|
|
# ACB_EXIT disabled: update_acb_boost() called to keep boost/beta current
|
|||
|
|
# (ACBv6 intact), but SUBDAY_ACB_NORMALIZATION exits are suppressed.
|
|||
|
|
except ValueError as e:
|
|||
|
|
log(f"ACB Stale Data Fallback: {e}")
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f"on_exf_update Error: {e}")
|
|||
|
|
|
|||
|
|
def _wire_obf(self, assets):
|
|||
|
|
if not assets or self.ob_assets:
|
|||
|
|
return
|
|||
|
|
self.ob_assets = assets
|
|||
|
|
from nautilus_dolphin.nautilus.hz_ob_provider import HZOBProvider
|
|||
|
|
live_ob = HZOBProvider(
|
|||
|
|
hz_cluster=HZ_CLUSTER,
|
|||
|
|
hz_host=HZ_HOST,
|
|||
|
|
assets=assets,
|
|||
|
|
)
|
|||
|
|
self.ob_eng = OBFeatureEngine(live_ob)
|
|||
|
|
# No preload_date() call — live mode uses step_live() per scan
|
|||
|
|
self.eng.set_ob_engine(self.ob_eng)
|
|||
|
|
log(f" OBF wired: HZOBProvider, {len(assets)} assets (LIVE mode)")
|
|||
|
|
|
|||
|
|
def _save_capital(self):
|
|||
|
|
"""Persist capital to HZ (primary) and disk (fallback) so restarts survive HZ loss."""
|
|||
|
|
capital = getattr(self.eng, 'capital', None)
|
|||
|
|
if capital is None or not math.isfinite(capital) or capital < 1.0:
|
|||
|
|
return
|
|||
|
|
payload = json.dumps({'capital': capital, 'ts': time.time()})
|
|||
|
|
# Primary: Hazelcast
|
|||
|
|
try:
|
|||
|
|
self.state_map.blocking().put('capital_checkpoint', payload)
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" capital HZ save failed: {e}")
|
|||
|
|
# Secondary: local disk (survives HZ restart)
|
|||
|
|
try:
|
|||
|
|
CAPITAL_DISK_CHECKPOINT.write_text(payload)
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" capital disk save failed: {e}")
|
|||
|
|
|
|||
|
|
def _restore_capital(self):
|
|||
|
|
"""On startup, restore capital from HZ or disk checkpoint."""
|
|||
|
|
def _try_load(raw, source):
|
|||
|
|
if not raw:
|
|||
|
|
return False
|
|||
|
|
try:
|
|||
|
|
data = json.loads(raw)
|
|||
|
|
saved = float(data.get('capital', 0))
|
|||
|
|
age_h = (time.time() - data.get('ts', 0)) / 3600
|
|||
|
|
if saved >= 1.0 and math.isfinite(saved) and age_h < 72:
|
|||
|
|
self.eng.capital = saved
|
|||
|
|
log(f" Capital restored from {source}: ${saved:,.2f} (age {age_h:.1f}h)")
|
|||
|
|
return True
|
|||
|
|
except Exception:
|
|||
|
|
pass
|
|||
|
|
return False
|
|||
|
|
|
|||
|
|
# Primary: Hazelcast
|
|||
|
|
try:
|
|||
|
|
raw = self.state_map.blocking().get('capital_checkpoint')
|
|||
|
|
if _try_load(raw, 'HZ'):
|
|||
|
|
return
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" capital HZ restore failed: {e}")
|
|||
|
|
|
|||
|
|
# Secondary: disk fallback
|
|||
|
|
try:
|
|||
|
|
if CAPITAL_DISK_CHECKPOINT.exists():
|
|||
|
|
raw = CAPITAL_DISK_CHECKPOINT.read_text()
|
|||
|
|
if _try_load(raw, 'disk'):
|
|||
|
|
return
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" capital disk restore failed: {e}")
|
|||
|
|
|
|||
|
|
log(" Capital: no valid checkpoint — starting at initial_capital")
|
|||
|
|
|
|||
|
|
def _push_state(self, scan_number, vel_div, vol_ok, posture):
|
|||
|
|
try:
|
|||
|
|
with self.eng_lock:
|
|||
|
|
capital = getattr(self.eng, 'capital', 25000.0)
|
|||
|
|
# Engine uses a single NDPosition object, not a list
|
|||
|
|
pos = getattr(self.eng, 'position', None)
|
|||
|
|
if pos is not None:
|
|||
|
|
open_notional = float(getattr(pos, 'notional', 0) or 0)
|
|||
|
|
open_positions_list = [{
|
|||
|
|
'asset': pos.asset,
|
|||
|
|
'side': 'SHORT' if pos.direction == -1 else 'LONG',
|
|||
|
|
'entry_price': pos.entry_price,
|
|||
|
|
'quantity': round(open_notional / pos.entry_price, 6) if pos.entry_price else 0,
|
|||
|
|
'notional': open_notional,
|
|||
|
|
'leverage': float(getattr(pos, 'leverage', 0) or 0),
|
|||
|
|
'unrealized_pnl': round(pos.pnl_pct * open_notional, 2),
|
|||
|
|
}]
|
|||
|
|
else:
|
|||
|
|
open_notional = 0.0
|
|||
|
|
open_positions_list = []
|
|||
|
|
cur_leverage = (open_notional / capital) if capital and capital > 0 and math.isfinite(capital) else 0.0
|
|||
|
|
|
|||
|
|
snapshot = {
|
|||
|
|
'capital': capital if math.isfinite(capital) else None,
|
|||
|
|
'open_positions': open_positions_list,
|
|||
|
|
'algo_version': ALGO_VERSION,
|
|||
|
|
'last_scan_number': scan_number, 'last_vel_div': vel_div,
|
|||
|
|
'vol_ok': vol_ok, 'posture': posture,
|
|||
|
|
'scans_processed': self.scans_processed,
|
|||
|
|
'trades_executed': self.trades_executed,
|
|||
|
|
'bar_idx': self.bar_idx,
|
|||
|
|
'timestamp': datetime.now(timezone.utc).isoformat(),
|
|||
|
|
# Leverage envelope — for TUI slider
|
|||
|
|
'leverage_soft_cap': getattr(self.eng, 'base_max_leverage', 8.0),
|
|||
|
|
'leverage_abs_cap': getattr(self.eng, 'abs_max_leverage', 9.0),
|
|||
|
|
'open_notional': round(open_notional, 2),
|
|||
|
|
'current_leverage': round(cur_leverage, 4),
|
|||
|
|
}
|
|||
|
|
future = self.state_map.put('engine_snapshot', json.dumps(snapshot))
|
|||
|
|
future.add_done_callback(lambda f: None)
|
|||
|
|
# Heartbeat — MHS checks age < 30s; we run every scan (~11s)
|
|||
|
|
if self.heartbeat_map is not None:
|
|||
|
|
hb = json.dumps({
|
|||
|
|
'ts': time.time(),
|
|||
|
|
'iso': datetime.now(timezone.utc).isoformat(),
|
|||
|
|
'run_date': self.current_day,
|
|||
|
|
'phase': 'trading',
|
|||
|
|
'flow': 'nautilus_event_trader',
|
|||
|
|
})
|
|||
|
|
self.heartbeat_map.put('nautilus_flow_heartbeat', hb)
|
|||
|
|
# Persist capital so next restart resumes from here
|
|||
|
|
if capital is not None and math.isfinite(capital) and capital >= 1.0:
|
|||
|
|
self._save_capital()
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f" Failed to push state: {e}")
|
|||
|
|
|
|||
|
|
def run(self):
|
|||
|
|
global running
|
|||
|
|
log("=" * 70)
|
|||
|
|
log("🐬 DOLPHIN Nautilus Event-Driven Trader Starting")
|
|||
|
|
log("=" * 70)
|
|||
|
|
|
|||
|
|
self._build_engine()
|
|||
|
|
self._connect_hz()
|
|||
|
|
self._restore_capital()
|
|||
|
|
self._rollover_day()
|
|||
|
|
|
|||
|
|
def listener(event):
|
|||
|
|
self.on_scan(event)
|
|||
|
|
|
|||
|
|
self.features_map.add_entry_listener(
|
|||
|
|
key='latest_eigen_scan', include_value=True,
|
|||
|
|
updated_func=listener, added_func=listener
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
def exf_listener(event):
|
|||
|
|
self.on_exf_update(event)
|
|||
|
|
|
|||
|
|
self.features_map.add_entry_listener(
|
|||
|
|
key='exf_latest', include_value=True,
|
|||
|
|
updated_func=exf_listener, added_func=exf_listener
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
log("✅ Hz listener registered")
|
|||
|
|
log(f"🏷️ ALGO_VERSION: {ALGO_VERSION}")
|
|||
|
|
log("⏳ Waiting for scans...")
|
|||
|
|
|
|||
|
|
try:
|
|||
|
|
while running:
|
|||
|
|
time.sleep(1)
|
|||
|
|
except KeyboardInterrupt:
|
|||
|
|
log("Interrupted")
|
|||
|
|
finally:
|
|||
|
|
self.shutdown()
|
|||
|
|
|
|||
|
|
def shutdown(self):
|
|||
|
|
log("Shutting down...")
|
|||
|
|
self._scan_executor.shutdown(wait=False)
|
|||
|
|
if self.eng and self.current_day:
|
|||
|
|
try:
|
|||
|
|
with self.eng_lock:
|
|||
|
|
summary = self.eng.end_day()
|
|||
|
|
log(f"end_day: {summary}")
|
|||
|
|
except Exception as e:
|
|||
|
|
log(f"end_day failed: {e}")
|
|||
|
|
if self.hz_client:
|
|||
|
|
try:
|
|||
|
|
self.hz_client.shutdown()
|
|||
|
|
log("Hz disconnected")
|
|||
|
|
except:
|
|||
|
|
pass
|
|||
|
|
log(f"🛑 Stopped. Scans: {self.scans_processed}, Trades: {self.trades_executed}")
|
|||
|
|
|
|||
|
|
def signal_handler(signum, frame):
|
|||
|
|
global running
|
|||
|
|
log(f"Signal {signum} received")
|
|||
|
|
running = False
|
|||
|
|
|
|||
|
|
def main():
|
|||
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|||
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|||
|
|
trader = DolphinLiveTrader()
|
|||
|
|
trader.run()
|
|||
|
|
|
|||
|
|
if __name__ == '__main__':
|
|||
|
|
main()
|