initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree

Includes core prod + GREEN/BLUE subsystems:
- prod/ (BLUE harness, configs, scripts, docs)
- nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved)
- adaptive_exit/ (AEM engine + models/bucket_assignments.pkl)
- Observability/ (EsoF advisor, TUI, dashboards)
- external_factors/ (EsoF producer)
- mc_forewarning_qlabs_fork/ (MC regime/envelope)

Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
hjnormey
2026-04-21 16:58:38 +02:00
commit 01c19662cb
643 changed files with 260241 additions and 0 deletions

103
prod/diagnose_nautilus.py Executable file
View File

@@ -0,0 +1,103 @@
"""
Diagnose hanging backtest engine.
Feed only 500 rows of day 1, with extensive logging.
"""
import sys, time
sys.path.insert(0, '.')
sys.path.insert(0, 'nautilus_dolphin')
from nautilus_dolphin.nautilus.dolphin_actor import DolphinActor
from prod.nautilus_native_backtest import get_parquet_files, _make_instrument
import pandas as pd
import numpy as np
from nautilus_trader.model.identifiers import Venue
from nautilus_trader.backtest.engine import BacktestEngine, BacktestEngineConfig
from nautilus_trader.model.enums import OmsType, AccountType
from nautilus_trader.model.objects import Money, Currency
from nautilus_trader.model.data import BarType, Bar
import prod.nautilus_native_backtest as _nbt_mod
files = get_parquet_files()
df0 = pd.read_parquet(files[0])
df0 = df0.iloc[:500] # JUST 500 ROWS!
SKIP_COLS = {
'timestamp', 'scan_number', 'v50_lambda_max_velocity', 'v150_lambda_max_velocity',
'v300_lambda_max_velocity', 'v750_lambda_max_velocity', 'vel_div',
'instability_50', 'instability_150'
}
asset_cols = [c for c in df0.columns if c not in SKIP_COLS]
NV = Venue('BINANCE')
instruments = {}
for sym in asset_cols:
try: instruments[sym] = _make_instrument(sym, NV)
except: pass
print("Building features and bars...")
_nbt_mod._FEATURE_STORE.clear()
all_bars = []
import datetime
# Midnight nanoseconds
day_dt = datetime.datetime.strptime(files[0].stem, '%Y-%m-%d').replace(tzinfo=datetime.timezone.utc)
day_start_ns = int(day_dt.timestamp() * 1e9)
for ri in range(len(df0)):
row = df0.iloc[ri]
ts_ns = int(day_start_ns + ri * 5 * 1_000_000_000)
_nbt_mod._FEATURE_STORE[ts_ns] = {
'vel_div': float(row.get('vel_div', 0.0)),
'v50': float(row.get('v50_lambda_max_velocity', 0.0)),
'v750': float(row.get('v750_lambda_max_velocity', 0.0)),
'inst50': float(row.get('instability_50', 0.0)),
'vol_ok': True,
'row_i': ri,
}
for sym in asset_cols:
px = row.get(sym)
if px and np.isfinite(float(px)) and float(px)>0:
bt = BarType.from_str(f"{sym}.BINANCE-5-SECOND-LAST-EXTERNAL")
bar = _nbt_mod._make_bar(bt, float(px), ts_ns)
all_bars.append(bar)
print(f"Created {len(all_bars)} bars.")
# Build engine
be_cfg = BacktestEngineConfig(trader_id='TEST-DIAGNOSE-01')
bt_engine = BacktestEngine(config=be_cfg)
usdt = Currency.from_str('USDT')
bt_engine.add_venue(venue=NV, oms_type=OmsType.HEDGING, account_type=AccountType.MARGIN, base_currency=usdt, starting_balances=[Money('25000', usdt)])
for sym, instr in instruments.items():
bt_engine.add_instrument(instr)
actor_cfg = {
'engine': dict(_nbt_mod.CHAMPION_ENGINE_CFG),
'paper_trade': {'initial_capital': 25000.0},
'posture_override': 'APEX',
'live_mode': False,
'native_mode': True,
'run_date': files[0].stem,
'bar_type': 'BTCUSDT.BINANCE-5-SECOND-LAST-EXTERNAL',
'mc_models_dir': _nbt_mod.MC_MODELS_DIR,
'mc_base_cfg': _nbt_mod.MC_BASE_CFG,
'venue': 'BINANCE',
'vol_p60': 0.0002,
'acb_preload_dates': [f.stem for f in files],
'assets': asset_cols,
'parquet_dir': 'vbt_cache',
'registered_assets': asset_cols,
}
actor_cfg['engine']['initial_capital'] = 25000.0
actor = DolphinActor(config=actor_cfg)
bt_engine.add_strategy(actor)
bt_engine.add_data(all_bars)
print("Starting BacktestEngine run...")
t0 = time.time()
bt_engine.run()
print(f"Done in {time.time()-t0:.2f}s")