Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
982 lines
45 KiB
Python
Executable File
982 lines
45 KiB
Python
Executable File
#!/usr/bin/env python3
|
||
"""
|
||
DOLPHIN Nautilus Event-Driven Trader
|
||
"""
|
||
import sys
|
||
import json
|
||
import math
|
||
import time
|
||
import signal
|
||
import threading
|
||
from concurrent.futures import ThreadPoolExecutor
|
||
from datetime import datetime, timezone
|
||
from pathlib import Path
|
||
from collections import deque
|
||
|
||
# Stablecoins / pegged assets that must never be traded
|
||
_STABLECOIN_SYMBOLS = frozenset({
|
||
'USDCUSDT', 'BUSDUSDT', 'FDUSDUSDT', 'USDTUSDT', 'TUSDUSDT',
|
||
'DAIUSDT', 'FRAXUSDT', 'USDDUSDT', 'USTCUSDT', 'EURUSDT',
|
||
})
|
||
|
||
sys.path.insert(0, '/mnt/dolphinng5_predict')
|
||
sys.path.insert(0, '/mnt/dolphinng5_predict/nautilus_dolphin')
|
||
|
||
from nautilus_dolphin.nautilus.proxy_boost_engine import create_d_liq_engine
|
||
from nautilus_dolphin.nautilus.esf_alpha_orchestrator import NDPosition
|
||
from nautilus_dolphin.nautilus.adaptive_circuit_breaker import AdaptiveCircuitBreaker
|
||
from nautilus_dolphin.nautilus.ob_features import OBFeatureEngine
|
||
from nautilus_dolphin.nautilus.ob_provider import MockOBProvider
|
||
|
||
try:
|
||
from ch_writer import ch_put, ts_us as _ch_ts_us
|
||
except ImportError:
|
||
def ch_put(*a, **kw): pass
|
||
def _ch_ts_us(): return 0
|
||
|
||
sys.path.insert(0, '/mnt/dolphinng5_predict/prod')
|
||
from dolphin_exit_handler import install_exit_handler
|
||
install_exit_handler("nautilus_trader")
|
||
|
||
HZ_CLUSTER = "dolphin"
|
||
HZ_HOST = "127.0.0.1:5701"
|
||
EIGEN_DIR = Path('/mnt/dolphinng6_data/eigenvalues')
|
||
|
||
CAPITAL_DISK_CHECKPOINT = Path("/tmp/dolphin_capital_checkpoint.json")
|
||
|
||
ENGINE_KWARGS = dict(
|
||
initial_capital=25000.0, vel_div_threshold=-0.02, vel_div_extreme=-0.05,
|
||
min_leverage=0.5, max_leverage=8.0, # note: create_d_liq_engine overrides to D_LIQ_SOFT_CAP=8.0
|
||
leverage_convexity=3.0,
|
||
fraction=0.20, fixed_tp_pct=0.0095, stop_pct=1.0, max_hold_bars=250, # gold spec: 250
|
||
use_direction_confirm=True, dc_lookback_bars=7, dc_min_magnitude_bps=0.75,
|
||
dc_skip_contradicts=True, dc_leverage_boost=1.0, dc_leverage_reduce=0.5,
|
||
use_asset_selection=True, min_irp_alignment=0.0, # gold spec: no IRP filter
|
||
use_sp_fees=True, use_sp_slippage=True,
|
||
sp_maker_entry_rate=0.62, sp_maker_exit_rate=0.50,
|
||
use_ob_edge=True, ob_edge_bps=5.0, ob_confirm_rate=0.40,
|
||
lookback=100, use_alpha_layers=True, use_dynamic_leverage=True, seed=42,
|
||
)
|
||
|
||
BTC_VOL_WINDOW = 50
|
||
|
||
# Per-bucket SL % used when HIBERNATE fires while a position is open.
|
||
# Instead of immediate HIBERNATE_HALT, we arm TP (existing fixed_tp_pct) +
|
||
# a per-bucket stop-loss so the position exits cleanly rather than being
|
||
# force-closed at whatever price the halt fires at.
|
||
# Values derived from AE shadow data + bucket trade analysis (2026-04-19).
|
||
# B3 wide: shadow shows mae_norm 5-5.1 before FIXED_TP; 3.5×ATR fires on noise.
|
||
# B4 tight: 34.8% WR, 0.80 R:R — cut fast, no recovery value.
|
||
# B6 widest: extreme vol (vol_daily_pct 760-864); normal ATR excursions are large.
|
||
_BUCKET_SL_PCT: dict = {
|
||
0: 0.015, # Low-vol high-corr nano-cap
|
||
1: 0.012, # Med-vol low-corr mid-price (XRP/XLM class)
|
||
2: 0.015, # Mega-cap BTC/ETH — default (not traded)
|
||
3: 0.025, # High-vol mid-corr STAR bucket (ENJ/ADA/DOGE) — needs room
|
||
4: 0.008, # Worst bucket (BNB/LTC/LINK) — cut fast
|
||
5: 0.018, # High-vol low-corr micro-price (ATOM/TRX class)
|
||
6: 0.030, # Extreme-vol mid-corr (FET/ZRX) — widest
|
||
'default': 0.015,
|
||
}
|
||
# Gold-calibrated from full 5-year BTC history: 0.00026414 (stricter, ~2.7x tighter).
|
||
# 2026-04-07: switched to 56-day gold window value (0.00009868) — the exact threshold
|
||
# used in the T=2155 ROI=+189% backtest. More permissive; paper trading to gather data.
|
||
VOL_P60_THRESHOLD = 0.00009868
|
||
|
||
# Algorithm Versioning
|
||
# v1_shakedown: v50-v150 (noise bug), loose vol gate
|
||
# v2_gold_fix: CORRECTED v50-v750 macro divergence (matches parquet backtest)
|
||
ALGO_VERSION = "v2_gold_fix_v50-v750"
|
||
|
||
# Persistent, version-tagged trade log (survives reboots; sorts by date)
|
||
_LOG_DIR = "/mnt/dolphinng5_predict/prod/logs"
|
||
import os as _os; _os.makedirs(_LOG_DIR, exist_ok=True)
|
||
_LOG_DATE = datetime.now(timezone.utc).strftime("%Y%m%d")
|
||
TRADE_LOG = f"{_LOG_DIR}/nautilus_trader_{_LOG_DATE}_{ALGO_VERSION}.log"
|
||
running = True
|
||
|
||
def log(msg):
|
||
ts = datetime.now(timezone.utc).isoformat()
|
||
line = f"[{ts}] {msg}"
|
||
print(line, flush=True)
|
||
with open(TRADE_LOG, 'a') as f:
|
||
f.write(line + '\n')
|
||
|
||
class DolphinLiveTrader:
|
||
def __init__(self):
|
||
self.eng = None
|
||
self.hz_client = None
|
||
self.features_map = None
|
||
self.safety_map = None
|
||
self.pnl_map = None
|
||
self.state_map = None
|
||
self.heartbeat_map = None
|
||
self.eng_lock = threading.Lock()
|
||
self._dedup_lock = threading.Lock() # guards atomic check-and-set on last_scan_number
|
||
self._scan_executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="scan")
|
||
self.last_scan_number = -1
|
||
self.last_file_mtime = 0
|
||
self.bar_idx = 0
|
||
self.current_day = None
|
||
self.trades_executed = 0
|
||
self.scans_processed = 0
|
||
self.btc_prices = deque(maxlen=BTC_VOL_WINDOW + 2)
|
||
self.cached_posture = "APEX"
|
||
self.posture_cache_time = 0
|
||
self.ob_assets = []
|
||
self.ob_eng = None
|
||
self.acb = None
|
||
self.last_w750_vel = None
|
||
self._pending_entries: dict = {} # trade_id → entry snapshot (for CH trade_events)
|
||
self._exf_log_time = 0.0 # throttle for on_exf_update logging
|
||
self._ae = None # AdaptiveExitEngine shadow (parallel, never real exits)
|
||
self._hibernate_protect_active: str | None = None # trade_id being protected
|
||
self._bucket_assignments: dict = {} # asset → KMeans bucket_id (loaded from pkl)
|
||
|
||
def _build_engine(self):
|
||
log("Building NDAlphaEngine...")
|
||
self.eng = create_d_liq_engine(**ENGINE_KWARGS)
|
||
log(f" Engine: {type(self.eng).__name__}")
|
||
log(f" Leverage: soft={self.eng.base_max_leverage}x abs={self.eng.abs_max_leverage}x")
|
||
|
||
if EIGEN_DIR.exists():
|
||
try:
|
||
date_strings = sorted([d.name for d in EIGEN_DIR.iterdir() if d.is_dir()])
|
||
self.acb = AdaptiveCircuitBreaker()
|
||
self.acb.preload_w750(date_strings)
|
||
self.eng.set_acb(self.acb)
|
||
log(" ACBv6: loaded")
|
||
except Exception as e:
|
||
log(f" ACBv6: {e}")
|
||
else:
|
||
self.acb = AdaptiveCircuitBreaker()
|
||
self.eng.set_acb(self.acb)
|
||
log(" ACBv6: loaded (no preload dates)")
|
||
|
||
self.eng.set_esoteric_hazard_multiplier(0.0) # gold spec: init guard, MUST precede set_mc_forewarner
|
||
log(f" Hazard: set_esoteric_hazard_multiplier(0.0) — soft={self.eng.base_max_leverage}x")
|
||
|
||
MC_MODELS_DIR = '/mnt/dolphinng5_predict/nautilus_dolphin/mc_results/models'
|
||
MC_BASE_CFG = {
|
||
'trial_id': 0, 'vel_div_threshold': -0.020, 'vel_div_extreme': -0.050,
|
||
'use_direction_confirm': True, 'dc_lookback_bars': 7,
|
||
'dc_min_magnitude_bps': 0.75, 'dc_skip_contradicts': True,
|
||
'dc_leverage_boost': 1.00, 'dc_leverage_reduce': 0.50,
|
||
'vd_trend_lookback': 10, 'min_leverage': 0.50, 'max_leverage': 8.00, # gold spec
|
||
'leverage_convexity': 3.00, 'fraction': 0.20, 'use_alpha_layers': True,
|
||
'use_dynamic_leverage': True, 'fixed_tp_pct': 0.0095, 'stop_pct': 1.00,
|
||
'max_hold_bars': 250, 'use_sp_fees': True, 'use_sp_slippage': True, # gold spec
|
||
'sp_maker_entry_rate': 0.62, 'sp_maker_exit_rate': 0.50,
|
||
'use_ob_edge': True, 'ob_edge_bps': 5.00, 'ob_confirm_rate': 0.40,
|
||
'ob_imbalance_bias': -0.09, 'ob_depth_scale': 1.00,
|
||
'use_asset_selection': True, 'min_irp_alignment': 0.0, 'lookback': 100, # gold spec
|
||
'acb_beta_high': 0.80, 'acb_beta_low': 0.20, 'acb_w750_threshold_pct': 60,
|
||
}
|
||
if Path(MC_MODELS_DIR).exists():
|
||
try:
|
||
from mc.mc_ml import DolphinForewarner
|
||
forewarner = DolphinForewarner(models_dir=MC_MODELS_DIR)
|
||
self.eng.set_mc_forewarner(forewarner, MC_BASE_CFG)
|
||
log(" MC-Forewarner: wired")
|
||
except Exception as e:
|
||
log(f" MC-Forewarner: {e}")
|
||
|
||
try:
|
||
from adaptive_exit.adaptive_exit_engine import AdaptiveExitEngine
|
||
self._ae = AdaptiveExitEngine.load()
|
||
log(" AdaptiveExitEngine: loaded (shadow mode — no real exits)")
|
||
except Exception as e:
|
||
log(f" AdaptiveExitEngine: {e} — shadow disabled")
|
||
|
||
self._load_bucket_assignments()
|
||
|
||
def _load_bucket_assignments(self):
|
||
"""Load KMeans asset→bucket_id mapping for hibernate protection SL levels."""
|
||
try:
|
||
import pickle
|
||
pkl_path = Path('/mnt/dolphinng5_predict/adaptive_exit/models/bucket_assignments.pkl')
|
||
with open(pkl_path, 'rb') as f:
|
||
data = pickle.load(f)
|
||
self._bucket_assignments = data.get('assignments', {})
|
||
log(f" BucketAssignments: {len(self._bucket_assignments)} assets loaded for hibernate protection")
|
||
except Exception as e:
|
||
log(f" BucketAssignments: {e} — hibernate protect will use default SL={_BUCKET_SL_PCT['default']*100:.1f}%")
|
||
|
||
# ── CH position-state persistence ─────────────────────────────────────────
|
||
|
||
def _ps_write_open(self, tid: str, entry: dict):
|
||
"""Persist OPEN row to position_state on entry. Fire-and-forget via ch_put."""
|
||
try:
|
||
ch_put("position_state", {
|
||
"ts": entry['entry_ts'],
|
||
"trade_id": tid,
|
||
"asset": entry['asset'],
|
||
"direction": -1 if entry['side'] == 'SHORT' else 1,
|
||
"entry_price": entry['entry_price'],
|
||
"quantity": entry['quantity'],
|
||
"notional": round(entry['quantity'] * entry['entry_price'], 4),
|
||
"leverage": entry['leverage'],
|
||
"bucket_id": int(self._bucket_assignments.get(entry['asset'], -1)),
|
||
"entry_bar": self.bar_idx,
|
||
"status": "OPEN",
|
||
"exit_reason": "",
|
||
"pnl": 0.0,
|
||
"bars_held": 0,
|
||
})
|
||
except Exception as e:
|
||
log(f" position_state OPEN write failed: {e}")
|
||
|
||
def _ps_write_closed(self, tid: str, pending: dict, x: dict):
|
||
"""Persist CLOSED row to position_state on exit (supersedes OPEN row via ReplacingMergeTree)."""
|
||
try:
|
||
ch_put("position_state", {
|
||
"ts": _ch_ts_us(),
|
||
"trade_id": tid,
|
||
"asset": pending.get('asset', ''),
|
||
"direction": -1 if pending.get('side') == 'SHORT' else 1,
|
||
"entry_price": pending.get('entry_price', 0.0),
|
||
"quantity": pending.get('quantity', 0.0),
|
||
"notional": round(pending.get('quantity', 0.0) * pending.get('entry_price', 0.0), 4),
|
||
"leverage": pending.get('leverage', 0.0),
|
||
"bucket_id": int(self._bucket_assignments.get(pending.get('asset', ''), -1)),
|
||
"entry_bar": 0,
|
||
"status": "CLOSED",
|
||
"exit_reason": str(x.get('reason', 'UNKNOWN')),
|
||
"pnl": float(x.get('net_pnl', 0) or 0),
|
||
"bars_held": int(x.get('bars_held', 0) or 0),
|
||
})
|
||
except Exception as e:
|
||
log(f" position_state CLOSED write failed: {e}")
|
||
|
||
def _restore_position_state(self):
|
||
"""On startup: check CH for an OPEN position and restore engine state."""
|
||
try:
|
||
import urllib.request, base64 as _b64
|
||
sql = ("SELECT trade_id, asset, direction, entry_price, quantity, "
|
||
"notional, leverage, bucket_id, bars_held "
|
||
"FROM dolphin.position_state FINAL "
|
||
"WHERE status = 'OPEN' "
|
||
"ORDER BY ts DESC LIMIT 1 FORMAT TabSeparated")
|
||
req = urllib.request.Request(
|
||
"http://localhost:8123/?database=dolphin",
|
||
data=sql.encode(),
|
||
headers={"Authorization": "Basic " +
|
||
_b64.b64encode(b"dolphin:dolphin_ch_2026").decode()})
|
||
with urllib.request.urlopen(req, timeout=5) as r:
|
||
row = r.read().decode().strip()
|
||
if not row:
|
||
log(" position_state: no open position to restore")
|
||
return
|
||
|
||
cols = row.split('\t')
|
||
if len(cols) < 9:
|
||
log(f" position_state: unexpected row format: {row}")
|
||
return
|
||
|
||
trade_id = cols[0]
|
||
asset = cols[1]
|
||
direction = int(cols[2])
|
||
entry_price = float(cols[3])
|
||
quantity = float(cols[4])
|
||
notional = float(cols[5])
|
||
leverage = float(cols[6])
|
||
bucket_id = int(cols[7])
|
||
stored_bars = int(cols[8])
|
||
|
||
# Estimate entry_bar so MAX_HOLD countdown continues from where it left off
|
||
restored_entry_bar = max(0, self.bar_idx - stored_bars)
|
||
|
||
pos = NDPosition(
|
||
trade_id = trade_id,
|
||
asset = asset,
|
||
direction = direction,
|
||
entry_price = entry_price,
|
||
entry_bar = restored_entry_bar,
|
||
notional = notional,
|
||
leverage = leverage,
|
||
fraction = notional / max(self.eng.capital * leverage, 1.0),
|
||
entry_vel_div = 0.0,
|
||
bucket_idx = 0, # signal-strength bucket (not KMeans); 0=safe default
|
||
current_price = entry_price,
|
||
)
|
||
with self.eng_lock:
|
||
self.eng.position = pos
|
||
self.eng.exit_manager.setup_position(
|
||
trade_id, entry_price, direction, restored_entry_bar,
|
||
)
|
||
# NOTE: do NOT arm hibernate protect here.
|
||
# _day_posture starts as 'APEX' — the posture sync block on the
|
||
# first incoming scan will detect the APEX→HIBERNATE transition
|
||
# and call _hibernate_protect_position() at the right moment.
|
||
|
||
# Rebuild _pending_entries so the exit CH write fires correctly
|
||
side = 'SHORT' if direction == -1 else 'LONG'
|
||
self._pending_entries[trade_id] = {
|
||
'asset': asset,
|
||
'side': side,
|
||
'entry_price': entry_price,
|
||
'quantity': quantity,
|
||
'leverage': leverage,
|
||
'vel_div_entry': 0.0,
|
||
'boost_at_entry': 1.0,
|
||
'beta_at_entry': 1.0,
|
||
'posture': 'RESTORED',
|
||
'entry_ts': _ch_ts_us(),
|
||
'entry_date': (self.current_day or ''),
|
||
}
|
||
log(f" position_state RESTORED: {asset} {side} entry={entry_price} "
|
||
f"notional={notional:.0f} bars_held≈{stored_bars} trade={trade_id}")
|
||
|
||
except Exception as e:
|
||
log(f" position_state restore error: {e}")
|
||
|
||
def _hibernate_protect_position(self):
|
||
"""Arm per-bucket TP+SL instead of immediate HIBERNATE_HALT.
|
||
|
||
Must be called under eng_lock with an open position.
|
||
Sets stop_pct_override on the live exit_manager state so the position
|
||
exits via FIXED_TP or STOP_LOSS rather than being force-closed.
|
||
Records trade_id in _hibernate_protect_active so the exit path can
|
||
re-label the reason and finalize posture once the position closes.
|
||
"""
|
||
pos = self.eng.position
|
||
if pos is None:
|
||
return
|
||
bucket = self._bucket_assignments.get(pos.asset, 'default')
|
||
sl_pct = _BUCKET_SL_PCT.get(bucket, _BUCKET_SL_PCT['default'])
|
||
tp_pct = self.eng.exit_manager.fixed_tp_pct
|
||
|
||
# Patch the live exit_manager state for this trade_id
|
||
em_state = self.eng.exit_manager._positions.get(pos.trade_id)
|
||
if em_state is not None:
|
||
em_state['stop_pct_override'] = sl_pct
|
||
else:
|
||
# Position not registered in exit_manager (shouldn't happen, but be safe)
|
||
log(f" HIBERNATE_PROTECT: trade {pos.trade_id} not in exit_manager — arming anyway via re-setup")
|
||
self.eng.exit_manager.setup_position(
|
||
pos.trade_id, pos.entry_price, pos.direction, pos.entry_bar,
|
||
stop_pct_override=sl_pct,
|
||
)
|
||
|
||
self._hibernate_protect_active = pos.trade_id
|
||
log(f"HIBERNATE_PROTECT armed: {pos.asset} B{bucket} "
|
||
f"SL={sl_pct*100:.2f}% TP={tp_pct*100:.2f}% trade={pos.trade_id}")
|
||
|
||
def _connect_hz(self):
|
||
log("Connecting to Hazelcast...")
|
||
import hazelcast
|
||
self.hz_client = hazelcast.HazelcastClient(cluster_name=HZ_CLUSTER, cluster_members=[HZ_HOST])
|
||
self.features_map = self.hz_client.get_map("DOLPHIN_FEATURES")
|
||
self.safety_map = self.hz_client.get_map("DOLPHIN_SAFETY")
|
||
self.pnl_map = self.hz_client.get_map("DOLPHIN_PNL_BLUE")
|
||
self.state_map = self.hz_client.get_map("DOLPHIN_STATE_BLUE")
|
||
self.heartbeat_map = self.hz_client.get_map("DOLPHIN_HEARTBEAT")
|
||
# Immediate heartbeat — prevents Cat1=0 during startup gap
|
||
try:
|
||
self.heartbeat_map.blocking().put('nautilus_flow_heartbeat', json.dumps({
|
||
'ts': time.time(),
|
||
'iso': datetime.now(timezone.utc).isoformat(),
|
||
'phase': 'starting',
|
||
'flow': 'nautilus_event_trader',
|
||
}))
|
||
except Exception:
|
||
pass
|
||
log(" Hz connected")
|
||
|
||
def _read_posture(self):
|
||
now = time.time()
|
||
if now - self.posture_cache_time < 10:
|
||
return self.cached_posture
|
||
try:
|
||
posture_raw = self.safety_map.blocking().get("latest") or self.safety_map.blocking().get("posture")
|
||
if posture_raw:
|
||
if isinstance(posture_raw, str):
|
||
try:
|
||
parsed = json.loads(posture_raw)
|
||
self.cached_posture = parsed.get("posture", posture_raw)
|
||
except (json.JSONDecodeError, AttributeError):
|
||
self.cached_posture = posture_raw
|
||
else:
|
||
self.cached_posture = posture_raw.get("posture", "APEX")
|
||
self.posture_cache_time = now
|
||
except:
|
||
pass
|
||
return self.cached_posture
|
||
|
||
def _rollover_day(self):
|
||
today = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
||
if today == self.current_day:
|
||
return
|
||
posture = self._read_posture()
|
||
with self.eng_lock:
|
||
if today != self.current_day: # double-checked: only one thread calls begin_day
|
||
if getattr(self, 'acb', None):
|
||
try:
|
||
exf_raw = self.features_map.blocking().get('exf_latest') if self.features_map else None
|
||
es_raw = self.features_map.blocking().get('latest_eigen_scan') if self.features_map else None
|
||
|
||
exf_snapshot = json.loads(exf_raw) if isinstance(exf_raw, str) else (exf_raw or {})
|
||
eigen_scan = json.loads(es_raw) if isinstance(es_raw, str) else (es_raw or {})
|
||
|
||
w750_vel = eigen_scan.get('w750_velocity', 0.0)
|
||
|
||
if exf_snapshot:
|
||
self.acb.get_dynamic_boost_from_hz(
|
||
date_str=today,
|
||
exf_snapshot=exf_snapshot,
|
||
w750_velocity=float(w750_vel) if w750_vel else None
|
||
)
|
||
log(f"ACB: Pre-warmed cache for {today} from HZ")
|
||
except Exception as e:
|
||
log(f"ACB Rollover Error: {e}")
|
||
|
||
self.eng.begin_day(today, posture=posture)
|
||
self.bar_idx = 0
|
||
self.current_day = today
|
||
log(f"begin_day({today}) called with posture={posture}")
|
||
|
||
def _compute_vol_ok(self, scan):
|
||
assets = scan.get('assets', [])
|
||
prices = scan.get('asset_prices', [])
|
||
if not assets or not prices:
|
||
return True
|
||
prices_dict = dict(zip(assets, prices))
|
||
btc_price = prices_dict.get('BTCUSDT')
|
||
if btc_price is None:
|
||
return True
|
||
self.btc_prices.append(float(btc_price))
|
||
if len(self.btc_prices) < BTC_VOL_WINDOW:
|
||
return True
|
||
import numpy as np
|
||
arr = np.array(self.btc_prices)
|
||
dvol = float(np.std(np.diff(arr) / arr[:-1]))
|
||
return dvol > VOL_P60_THRESHOLD
|
||
|
||
@staticmethod
|
||
def _normalize_ng7(scan: dict) -> dict:
|
||
"""
|
||
Promote NG7-format scan to NG5-compatible flat dict.
|
||
NG7 embeds eigenvalue windows and prices inside result{} — the engine
|
||
expects flat top-level fields. Mapping derived from continuous_convert.py:
|
||
vel_div = w50_velocity − w750_velocity (fast minus slow eigenvalue velocity)
|
||
w50_velocity = multi_window_results["50"].tracking_data.lambda_max_velocity
|
||
w750_velocity = multi_window_results["750"].tracking_data.lambda_max_velocity
|
||
assets = sorted(current_prices.keys()), BTCUSDT always last
|
||
"""
|
||
result = scan.get('result') or {}
|
||
mw = result.get('multi_window_results') or {}
|
||
|
||
def _vel(win):
|
||
v = (mw.get(str(win)) or {}).get('tracking_data', {}).get('lambda_max_velocity')
|
||
try:
|
||
f = float(v)
|
||
return f if math.isfinite(f) else 0.0
|
||
except (TypeError, ValueError):
|
||
return 0.0
|
||
|
||
v50 = _vel(50)
|
||
v150 = _vel(150)
|
||
v750 = _vel(750)
|
||
|
||
cp = (result.get('pricing_data') or {}).get('current_prices') or {}
|
||
assets = [a for a in cp if a != 'BTCUSDT']
|
||
if 'BTCUSDT' in cp:
|
||
assets.append('BTCUSDT') # BTC always last — matches NG5/Arrow convention
|
||
prices = [float(cp[a]) for a in assets]
|
||
|
||
instability = float((result.get('regime_prediction') or {})
|
||
.get('instability_score') or 0.0)
|
||
|
||
return {
|
||
**scan,
|
||
'vel_div': v50 - v750,
|
||
'w50_velocity': v50,
|
||
'w750_velocity': v750,
|
||
'assets': assets,
|
||
'asset_prices': prices,
|
||
'instability_50': instability,
|
||
}
|
||
|
||
def on_scan(self, event):
|
||
"""Reactor-thread entry point — dispatches immediately to worker thread."""
|
||
if not event.value:
|
||
return
|
||
listener_time = time.time()
|
||
self._scan_executor.submit(self._process_scan, event, listener_time)
|
||
|
||
def _process_scan(self, event, listener_time):
|
||
try:
|
||
if not event.value:
|
||
return
|
||
|
||
scan = json.loads(event.value) if isinstance(event.value, str) else event.value
|
||
|
||
# Normalise NG7 format → NG5-compatible flat dict before any field access
|
||
if scan.get('version') == 'NG7':
|
||
scan = self._normalize_ng7(scan)
|
||
|
||
scan_number = int(scan.get('scan_number') or 0)
|
||
|
||
# Dedup: scan_number is authoritative (monotonically increasing).
|
||
# file_mtime / timestamp are unreliable across NG7 restart probes.
|
||
with self._dedup_lock:
|
||
if scan_number > 0 and scan_number <= self.last_scan_number:
|
||
return
|
||
self.last_scan_number = scan_number
|
||
self.scans_processed += 1
|
||
|
||
self._rollover_day()
|
||
|
||
assets = scan.get('assets') or []
|
||
if assets and not self.ob_assets:
|
||
self._wire_obf(assets)
|
||
|
||
prices = scan.get('asset_prices') or []
|
||
if assets and prices and len(assets) != len(prices):
|
||
log(f"WARN scan #{scan_number}: assets/prices mismatch "
|
||
f"({len(assets)}≠{len(prices)}) — dropped")
|
||
return
|
||
prices_dict = dict(zip(assets, prices)) if assets and prices else {}
|
||
# Remove stablecoins — they should never be selected as a trade asset
|
||
for sym in _STABLECOIN_SYMBOLS:
|
||
prices_dict.pop(sym, None)
|
||
|
||
vol_ok = self._compute_vol_ok(scan)
|
||
|
||
vel_div = float(scan.get('vel_div') or 0.0)
|
||
if not math.isfinite(vel_div):
|
||
log(f"WARN scan #{scan_number}: non-finite vel_div={vel_div} — clamped to 0.0")
|
||
vel_div = 0.0
|
||
|
||
v50_vel = float(scan.get('w50_velocity') or 0.0)
|
||
v750_vel = float(scan.get('w750_velocity') or 0.0)
|
||
if not math.isfinite(v50_vel): v50_vel = 0.0
|
||
if not math.isfinite(v750_vel): v750_vel = 0.0
|
||
self.last_w750_vel = v750_vel
|
||
|
||
# Feed live OB data into OBF engine for this bar (AGENT_SPEC_OBF_LIVE_SWITCHOVER)
|
||
if self.ob_eng is not None and self.ob_assets:
|
||
self.ob_eng.step_live(self.ob_assets, self.bar_idx)
|
||
|
||
# Live posture sync — update engine posture + regime_dd_halt together
|
||
posture_now = self._read_posture()
|
||
with self.eng_lock:
|
||
prev_posture = getattr(self.eng, '_day_posture', 'APEX')
|
||
if posture_now != prev_posture:
|
||
if posture_now in ('TURTLE', 'HIBERNATE'):
|
||
self.eng.regime_dd_halt = True # always block new entries
|
||
if (posture_now == 'HIBERNATE'
|
||
and self.eng.position is not None
|
||
and not self._hibernate_protect_active):
|
||
# Position in flight: arm TP+SL instead of letting
|
||
# _manage_position() fire HIBERNATE_HALT next bar.
|
||
# _day_posture stays at prev value — no HALT fires.
|
||
self._hibernate_protect_position()
|
||
else:
|
||
self.eng._day_posture = posture_now
|
||
log(f"POSTURE_SYNC: {posture_now} — halt set")
|
||
else:
|
||
self.eng._day_posture = posture_now
|
||
self.eng.regime_dd_halt = False
|
||
if self._hibernate_protect_active:
|
||
log(f"POSTURE_SYNC: {posture_now} — posture recovered, clearing protect mode")
|
||
self._hibernate_protect_active = None
|
||
else:
|
||
log(f"POSTURE_SYNC: {posture_now} — halt lifted")
|
||
|
||
step_start = time.time()
|
||
with self.eng_lock:
|
||
result = self.eng.step_bar(
|
||
bar_idx=self.bar_idx, vel_div=vel_div, prices=prices_dict,
|
||
vol_regime_ok=vol_ok, v50_vel=v50_vel, v750_vel=v750_vel
|
||
)
|
||
self.bar_idx += 1
|
||
scan_to_fill_ms = (time.time() - listener_time) * 1000
|
||
step_bar_ms = (time.time() - step_start) * 1000
|
||
log(f"LATENCY scan #{scan_number}: scan→fill={scan_to_fill_ms:.1f}ms step_bar={step_bar_ms:.1f}ms vel_div={vel_div:.5f}")
|
||
|
||
ch_put("eigen_scans", {
|
||
"ts": _ch_ts_us(),
|
||
"scan_number": scan_number,
|
||
"scan_uuid": str(scan.get("scan_uuid") or ""),
|
||
"vel_div": vel_div,
|
||
"w50_velocity": v50_vel,
|
||
"w750_velocity": v750_vel,
|
||
"instability_50": float(scan.get("instability_50") or 0.0),
|
||
"scan_to_fill_ms": scan_to_fill_ms,
|
||
"step_bar_ms": step_bar_ms,
|
||
})
|
||
|
||
if result.get('entry'):
|
||
self.trades_executed += 1
|
||
e = result['entry']
|
||
log(f"ENTRY: {e} [{ALGO_VERSION}]")
|
||
# Cache entry fields for CH trade_events on exit
|
||
tid = e.get('trade_id')
|
||
if tid:
|
||
self._pending_entries[tid] = {
|
||
'asset': e.get('asset', ''),
|
||
'side': 'SHORT' if e.get('direction', -1) == -1 else 'LONG',
|
||
'entry_price': float(e.get('entry_price', 0) or 0),
|
||
'quantity': round(float(e.get('notional', 0) or 0) / float(e.get('entry_price', 1) or 1), 6),
|
||
'leverage': float(e.get('leverage', 0) or 0),
|
||
'vel_div_entry': float(e.get('vel_div', 0) or 0),
|
||
'boost_at_entry': float(getattr(getattr(self, 'eng', None), 'acb_boost', 1.0) or 1.0),
|
||
'beta_at_entry': float(getattr(getattr(self, 'eng', None), 'acb_beta', 1.0) or 1.0),
|
||
'posture': posture_now,
|
||
'entry_ts': _ch_ts_us(),
|
||
'entry_date': (self.current_day or ''),
|
||
}
|
||
# Persist position to CH so restarts can recover it
|
||
self._ps_write_open(tid, self._pending_entries[tid])
|
||
# Shadow AE: notify of entry (vel_div at entry bar is in scope)
|
||
if self._ae is not None:
|
||
try:
|
||
self._ae.on_entry(
|
||
trade_id=tid,
|
||
asset=e.get('asset', ''),
|
||
direction=int(e.get('direction', -1)),
|
||
entry_price=float(e.get('entry_price', 0) or 0),
|
||
vel_div_entry=vel_div,
|
||
)
|
||
except Exception:
|
||
pass
|
||
|
||
if result.get('exit'):
|
||
x = result['exit']
|
||
tid = x.get('trade_id')
|
||
# Hibernate-protected exits: re-label reason, finalize posture
|
||
if tid and self._hibernate_protect_active == tid:
|
||
_orig = x.get('reason', '')
|
||
_map = {'FIXED_TP': 'HIBERNATE_TP', 'STOP_LOSS': 'HIBERNATE_SL',
|
||
'MAX_HOLD': 'HIBERNATE_MAXHOLD'}
|
||
x['reason'] = _map.get(_orig, f'HIBERNATE_{_orig}')
|
||
self._hibernate_protect_active = None
|
||
# Position closed — now safe to commit posture to HIBERNATE
|
||
_cur_posture = self._read_posture()
|
||
if _cur_posture == 'HIBERNATE':
|
||
self.eng._day_posture = 'HIBERNATE'
|
||
log(f"HIBERNATE_PROTECT: closed via {x['reason']} — posture finalized HIBERNATE")
|
||
else:
|
||
log(f"HIBERNATE_PROTECT: closed via {x['reason']} — posture recovered to {_cur_posture}")
|
||
log(f"EXIT: {x} [{ALGO_VERSION}]")
|
||
tid = x.get('trade_id')
|
||
pending = self._pending_entries.pop(tid, {}) if tid else {}
|
||
if pending:
|
||
# exact bar price the engine exited against — prices_dict is still in scope
|
||
exit_price = float(prices_dict.get(pending['asset'], 0) or 0)
|
||
ch_put("trade_events", {
|
||
"ts": _ch_ts_us(),
|
||
"date": pending['entry_date'],
|
||
"strategy": "blue",
|
||
"asset": pending['asset'],
|
||
"side": pending['side'],
|
||
"entry_price": pending['entry_price'],
|
||
"exit_price": exit_price,
|
||
"quantity": pending['quantity'],
|
||
"pnl": float(x.get('net_pnl', 0) or 0),
|
||
"pnl_pct": float(x.get('pnl_pct', 0) or 0),
|
||
"exit_reason": str(x.get('reason', 'UNKNOWN')),
|
||
"vel_div_entry": pending['vel_div_entry'],
|
||
"boost_at_entry": pending['boost_at_entry'],
|
||
"beta_at_entry": pending['beta_at_entry'],
|
||
"posture": pending['posture'],
|
||
"leverage": pending['leverage'],
|
||
"bars_held": int(x.get('bars_held', 0) or 0),
|
||
"regime_signal": 0,
|
||
})
|
||
# Mark position closed in CH (supersedes OPEN row via ReplacingMergeTree)
|
||
self._ps_write_closed(tid, pending, x)
|
||
# Shadow AE: record outcome for online update
|
||
if self._ae is not None and tid:
|
||
try:
|
||
self._ae.on_exit(
|
||
trade_id=tid,
|
||
actual_exit_reason=str(x.get('reason', 'UNKNOWN')),
|
||
pnl_pct=float(x.get('pnl_pct', 0) or 0),
|
||
)
|
||
except Exception:
|
||
pass
|
||
|
||
# Shadow AE: per-bar evaluate for all open trades — daemon thread, zero hot-path impact
|
||
if self._ae is not None and self._pending_entries:
|
||
_ae_ref = self._ae
|
||
_pending_snap = dict(self._pending_entries) # shallow copy under GIL
|
||
_prices_snap = dict(prices_dict)
|
||
_vel_now = vel_div
|
||
_bar = self.bar_idx
|
||
def _ae_eval():
|
||
for _tid, _p in _pending_snap.items():
|
||
try:
|
||
_cur = _prices_snap.get(_p['asset'], 0) or 0
|
||
if not _cur:
|
||
continue
|
||
_shadow = _ae_ref.evaluate(
|
||
trade_id=_tid,
|
||
asset=_p['asset'],
|
||
direction=-1,
|
||
entry_price=_p['entry_price'],
|
||
current_price=_cur,
|
||
bars_held=_bar,
|
||
vel_div_now=_vel_now,
|
||
)
|
||
_ae_ref.log_shadow(_shadow)
|
||
except Exception:
|
||
pass
|
||
threading.Thread(target=_ae_eval, daemon=True).start()
|
||
|
||
self._push_state(scan_number, vel_div, vol_ok, self._read_posture())
|
||
|
||
except Exception as e:
|
||
log(f"ERROR in _process_scan: {e}")
|
||
|
||
def on_exf_update(self, event):
|
||
if not event.value: return
|
||
snapshot = json.loads(event.value) if isinstance(event.value, str) else event.value
|
||
if not self.current_day or not self.acb: return
|
||
try:
|
||
w750_vel = getattr(self, 'last_w750_vel', None)
|
||
acb_info = self.acb.get_dynamic_boost_from_hz(
|
||
date_str=self.current_day,
|
||
exf_snapshot=snapshot,
|
||
w750_velocity=float(w750_vel) if w750_vel else None
|
||
)
|
||
with self.eng_lock:
|
||
if hasattr(self.eng, 'update_acb_boost'):
|
||
subday_exit = self.eng.update_acb_boost(
|
||
boost=acb_info['boost'],
|
||
beta=acb_info['beta']
|
||
)
|
||
if subday_exit is not None:
|
||
log(f"SUBDAY_EXIT: {subday_exit} [{ALGO_VERSION}]")
|
||
tid = subday_exit.get('trade_id')
|
||
if tid:
|
||
pending = self._pending_entries.pop(tid, {})
|
||
ch_put("trade_events", {
|
||
"ts": _ch_ts_us(),
|
||
"date": self.current_day or '',
|
||
"strategy": "blue",
|
||
"asset": pending.get('asset', subday_exit.get('asset', '')),
|
||
"side": pending.get('side', 'SHORT'),
|
||
"entry_price": pending.get('entry_price', 0),
|
||
"exit_price": float(subday_exit.get('exit_price', 0) or 0),
|
||
"quantity": round(float(pending.get('notional', 0) or 0) / max(float(pending.get('entry_price', 1) or 1), 1e-12), 6),
|
||
"pnl": float(subday_exit.get('net_pnl', 0) or 0),
|
||
"pnl_pct": float(subday_exit.get('pnl_pct', 0) or 0),
|
||
"exit_reason": str(subday_exit.get('reason', 'SUBDAY_ACB_NORMALIZATION')),
|
||
"vel_div_entry": float(pending.get('vel_div_entry', 0) or 0),
|
||
"boost_at_entry": float(pending.get('boost_at_entry', 0) or 0),
|
||
"beta_at_entry": float(pending.get('beta_at_entry', 0) or 0),
|
||
"posture": pending.get('posture', ''),
|
||
"leverage": float(pending.get('leverage', 0) or 0),
|
||
"bars_held": int(subday_exit.get('bars_held', 0) or 0),
|
||
"regime_signal": 0,
|
||
})
|
||
now = time.time()
|
||
if now - self._exf_log_time >= 300:
|
||
self._exf_log_time = now
|
||
log(f"ACB subday: boost={acb_info['boost']:.4f} beta={acb_info['beta']:.4f} "
|
||
f"signals={acb_info['signals']:.1f} src={acb_info.get('source','?')}")
|
||
# ACB_EXIT disabled: update_acb_boost() called to keep boost/beta current
|
||
# (ACBv6 intact), but SUBDAY_ACB_NORMALIZATION exits are suppressed.
|
||
except ValueError as e:
|
||
log(f"ACB Stale Data Fallback: {e}")
|
||
except Exception as e:
|
||
log(f"on_exf_update Error: {e}")
|
||
|
||
def _wire_obf(self, assets):
|
||
if not assets or self.ob_assets:
|
||
return
|
||
self.ob_assets = assets
|
||
from nautilus_dolphin.nautilus.hz_ob_provider import HZOBProvider
|
||
live_ob = HZOBProvider(
|
||
hz_cluster=HZ_CLUSTER,
|
||
hz_host=HZ_HOST,
|
||
assets=assets,
|
||
)
|
||
self.ob_eng = OBFeatureEngine(live_ob)
|
||
# No preload_date() call — live mode uses step_live() per scan
|
||
self.eng.set_ob_engine(self.ob_eng)
|
||
log(f" OBF wired: HZOBProvider, {len(assets)} assets (LIVE mode)")
|
||
|
||
def _save_capital(self):
|
||
"""Persist capital to HZ (primary) and disk (fallback) so restarts survive HZ loss."""
|
||
capital = getattr(self.eng, 'capital', None)
|
||
if capital is None or not math.isfinite(capital) or capital < 1.0:
|
||
return
|
||
payload = json.dumps({'capital': capital, 'ts': time.time()})
|
||
# Primary: Hazelcast
|
||
try:
|
||
self.state_map.blocking().put('capital_checkpoint', payload)
|
||
except Exception as e:
|
||
log(f" capital HZ save failed: {e}")
|
||
# Secondary: local disk (survives HZ restart)
|
||
try:
|
||
CAPITAL_DISK_CHECKPOINT.write_text(payload)
|
||
except Exception as e:
|
||
log(f" capital disk save failed: {e}")
|
||
|
||
def _restore_capital(self):
|
||
"""On startup, restore capital from HZ or disk checkpoint."""
|
||
def _try_load(raw, source):
|
||
if not raw:
|
||
return False
|
||
try:
|
||
data = json.loads(raw)
|
||
saved = float(data.get('capital', 0))
|
||
age_h = (time.time() - data.get('ts', 0)) / 3600
|
||
if saved >= 1.0 and math.isfinite(saved) and age_h < 72:
|
||
self.eng.capital = saved
|
||
log(f" Capital restored from {source}: ${saved:,.2f} (age {age_h:.1f}h)")
|
||
return True
|
||
except Exception:
|
||
pass
|
||
return False
|
||
|
||
# Primary: Hazelcast
|
||
try:
|
||
raw = self.state_map.blocking().get('capital_checkpoint')
|
||
if _try_load(raw, 'HZ'):
|
||
return
|
||
except Exception as e:
|
||
log(f" capital HZ restore failed: {e}")
|
||
|
||
# Secondary: disk fallback
|
||
try:
|
||
if CAPITAL_DISK_CHECKPOINT.exists():
|
||
raw = CAPITAL_DISK_CHECKPOINT.read_text()
|
||
if _try_load(raw, 'disk'):
|
||
return
|
||
except Exception as e:
|
||
log(f" capital disk restore failed: {e}")
|
||
|
||
log(" Capital: no valid checkpoint — starting at initial_capital")
|
||
|
||
def _push_state(self, scan_number, vel_div, vol_ok, posture):
|
||
try:
|
||
with self.eng_lock:
|
||
capital = getattr(self.eng, 'capital', 25000.0)
|
||
# Engine uses a single NDPosition object, not a list
|
||
pos = getattr(self.eng, 'position', None)
|
||
if pos is not None:
|
||
open_notional = float(getattr(pos, 'notional', 0) or 0)
|
||
open_positions_list = [{
|
||
'asset': pos.asset,
|
||
'side': 'SHORT' if pos.direction == -1 else 'LONG',
|
||
'entry_price': pos.entry_price,
|
||
'quantity': round(open_notional / pos.entry_price, 6) if pos.entry_price else 0,
|
||
'notional': open_notional,
|
||
'leverage': float(getattr(pos, 'leverage', 0) or 0),
|
||
'unrealized_pnl': round(pos.pnl_pct * open_notional, 2),
|
||
}]
|
||
else:
|
||
open_notional = 0.0
|
||
open_positions_list = []
|
||
cur_leverage = (open_notional / capital) if capital and capital > 0 and math.isfinite(capital) else 0.0
|
||
|
||
snapshot = {
|
||
'capital': capital if math.isfinite(capital) else None,
|
||
'open_positions': open_positions_list,
|
||
'algo_version': ALGO_VERSION,
|
||
'last_scan_number': scan_number, 'last_vel_div': vel_div,
|
||
'vol_ok': vol_ok, 'posture': posture,
|
||
'scans_processed': self.scans_processed,
|
||
'trades_executed': self.trades_executed,
|
||
'bar_idx': self.bar_idx,
|
||
'timestamp': datetime.now(timezone.utc).isoformat(),
|
||
# Leverage envelope — for TUI slider
|
||
'leverage_soft_cap': getattr(self.eng, 'base_max_leverage', 8.0),
|
||
'leverage_abs_cap': getattr(self.eng, 'abs_max_leverage', 9.0),
|
||
'open_notional': round(open_notional, 2),
|
||
'current_leverage': round(cur_leverage, 4),
|
||
}
|
||
future = self.state_map.put('engine_snapshot', json.dumps(snapshot))
|
||
future.add_done_callback(lambda f: None)
|
||
# Heartbeat — MHS checks age < 30s; we run every scan (~11s)
|
||
if self.heartbeat_map is not None:
|
||
hb = json.dumps({
|
||
'ts': time.time(),
|
||
'iso': datetime.now(timezone.utc).isoformat(),
|
||
'run_date': self.current_day,
|
||
'phase': 'trading',
|
||
'flow': 'nautilus_event_trader',
|
||
})
|
||
self.heartbeat_map.put('nautilus_flow_heartbeat', hb)
|
||
# Persist capital so next restart resumes from here
|
||
if capital is not None and math.isfinite(capital) and capital >= 1.0:
|
||
self._save_capital()
|
||
except Exception as e:
|
||
log(f" Failed to push state: {e}")
|
||
|
||
def run(self):
|
||
global running
|
||
log("=" * 70)
|
||
log("🐬 DOLPHIN Nautilus Event-Driven Trader Starting")
|
||
log("=" * 70)
|
||
|
||
self._build_engine()
|
||
self._connect_hz()
|
||
self._restore_capital()
|
||
self._rollover_day()
|
||
self._restore_position_state()
|
||
|
||
def listener(event):
|
||
self.on_scan(event)
|
||
|
||
self.features_map.add_entry_listener(
|
||
key='latest_eigen_scan', include_value=True,
|
||
updated_func=listener, added_func=listener
|
||
)
|
||
|
||
def exf_listener(event):
|
||
self.on_exf_update(event)
|
||
|
||
self.features_map.add_entry_listener(
|
||
key='exf_latest', include_value=True,
|
||
updated_func=exf_listener, added_func=exf_listener
|
||
)
|
||
|
||
log("✅ Hz listener registered")
|
||
log(f"🏷️ ALGO_VERSION: {ALGO_VERSION}")
|
||
log("⏳ Waiting for scans...")
|
||
|
||
try:
|
||
while running:
|
||
time.sleep(1)
|
||
except KeyboardInterrupt:
|
||
log("Interrupted")
|
||
finally:
|
||
self.shutdown()
|
||
|
||
def shutdown(self):
|
||
log("Shutting down...")
|
||
self._scan_executor.shutdown(wait=False)
|
||
if self.eng and self.current_day:
|
||
try:
|
||
with self.eng_lock:
|
||
summary = self.eng.end_day()
|
||
log(f"end_day: {summary}")
|
||
except Exception as e:
|
||
log(f"end_day failed: {e}")
|
||
if self.hz_client:
|
||
try:
|
||
self.hz_client.shutdown()
|
||
log("Hz disconnected")
|
||
except:
|
||
pass
|
||
log(f"🛑 Stopped. Scans: {self.scans_processed}, Trades: {self.trades_executed}")
|
||
|
||
def signal_handler(signum, frame):
|
||
global running
|
||
log(f"Signal {signum} received")
|
||
running = False
|
||
|
||
def main():
|
||
signal.signal(signal.SIGTERM, signal_handler)
|
||
signal.signal(signal.SIGINT, signal_handler)
|
||
trader = DolphinLiveTrader()
|
||
trader.run()
|
||
|
||
if __name__ == '__main__':
|
||
main()
|