initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree

Includes core prod + GREEN/BLUE subsystems:
- prod/ (BLUE harness, configs, scripts, docs)
- nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved)
- adaptive_exit/ (AEM engine + models/bucket_assignments.pkl)
- Observability/ (EsoF advisor, TUI, dashboards)
- external_factors/ (EsoF producer)
- mc_forewarning_qlabs_fork/ (MC regime/envelope)

Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
hjnormey
2026-04-21 16:58:38 +02:00
commit 01c19662cb
643 changed files with 260241 additions and 0 deletions

71
prod/diag_expshared.py Executable file
View File

@@ -0,0 +1,71 @@
"""Quick diagnostic: compare production exp_shared.run_backtest vs replicate_181 loop."""
import sys
from pathlib import Path
import numpy as np
import pandas as pd
import time, gc
ROOT = Path(r"C:\Users\Lenovo\Documents\- DOLPHIN NG HD HCM TSF Predict")
sys.path.insert(0, str(ROOT / 'nautilus_dolphin'))
sys.path.insert(0, str(ROOT / 'nautilus_dolphin' / 'dvae'))
import exp_shared
from nautilus_dolphin.nautilus.proxy_boost_engine import create_d_liq_engine
from nautilus_dolphin.nautilus.adaptive_circuit_breaker import AdaptiveCircuitBreaker
print("JIT...")
exp_shared.ensure_jit()
# ── TEST A: production exp_shared.run_backtest (all agent changes) ─────────
print("\n=== TEST A: exp_shared.run_backtest (production) ===")
t0 = time.time()
r = exp_shared.run_backtest(lambda kw: create_d_liq_engine(**kw), "prod_run",
extra_kwargs={'sp_maker_entry_rate': 1.0, 'sp_maker_exit_rate': 1.0, 'use_sp_slippage': False})
print(f" ROI={r['roi']:+.2f}% T={r['trades']} DD={r['dd']:.2f}% t={time.time()-t0:.0f}s")
# ── TEST B: replicate_181_gold.py style (NO float32, NO set_esoteric_hazard_multiplier) ─────────
print("\n=== TEST B: replicate_181 style (no float32, no hazard call) ===")
from exp_shared import load_data, ENGINE_KWARGS, META_COLS
d = load_data()
kw2 = ENGINE_KWARGS.copy()
kw2.update({'sp_maker_entry_rate': 1.0, 'sp_maker_exit_rate': 1.0, 'use_sp_slippage': False})
acb = AdaptiveCircuitBreaker()
acb.preload_w750(d['date_strings'])
eng2 = create_d_liq_engine(**kw2)
eng2.set_ob_engine(d['ob_eng'])
eng2.set_acb(acb)
# NOTE: no set_esoteric_hazard_multiplier call
t1 = time.time()
daily_caps = []
for pf in d['parquet_files']:
ds = pf.stem
df = pd.read_parquet(pf) # float64, no casting
acols = [c for c in df.columns if c not in META_COLS]
bp = df['BTCUSDT'].values if 'BTCUSDT' in df.columns else None
dvol = np.full(len(df), np.nan)
if bp is not None:
diffs = np.zeros(len(bp), dtype=np.float64)
diffs[1:] = np.diff(bp) / bp[:-1]
for j in range(50, len(bp)):
dvol[j] = np.std(diffs[j-50:j])
vol_ok = np.where(np.isfinite(dvol), dvol > d['vol_p60'], False)
eng2.process_day(ds, df, acols, vol_regime_ok=vol_ok)
daily_caps.append(eng2.capital)
del df; gc.collect()
tr2 = eng2.trade_history
roi2 = (eng2.capital - 25000.0) / 25000.0 * 100.0
import math
daily_pnls = [daily_caps[0]-25000.0] + [daily_caps[i]-daily_caps[i-1] for i in range(1,len(daily_caps))]
peak, max_dd = 25000.0, 0.0
for cap in daily_caps:
peak = max(peak, cap); max_dd = max(max_dd, (peak-cap)/peak*100.0)
print(f" ROI={roi2:+.2f}% T={len(tr2)} DD={max_dd:.2f}% t={time.time()-t1:.0f}s")
print("\nDONE")