initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
286
nautilus_dolphin/test_pf_acb.py
Executable file
286
nautilus_dolphin/test_pf_acb.py
Executable file
@@ -0,0 +1,286 @@
|
||||
"""PF test with ACB (Adaptive Circuit Breaker) enabled.
|
||||
|
||||
Runs the same ND alpha engine but applies ACB position-size cuts per date
|
||||
based on external market stress signals (funding, DVOL, FNG, taker ratio).
|
||||
"""
|
||||
import sys, time
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
|
||||
# Pre-compile numba kernels
|
||||
print("Compiling numba kernels...")
|
||||
t_jit = time.time()
|
||||
from nautilus_dolphin.nautilus.alpha_asset_selector import compute_irp_nb, compute_ars_nb, rank_assets_irp_nb
|
||||
from nautilus_dolphin.nautilus.alpha_bet_sizer import compute_sizing_nb
|
||||
from nautilus_dolphin.nautilus.alpha_signal_generator import check_dc_nb
|
||||
# Warm up
|
||||
_p = np.array([1.0, 2.0, 3.0], dtype=np.float64)
|
||||
compute_irp_nb(_p, -1)
|
||||
compute_ars_nb(1.0, 0.5, 0.01)
|
||||
rank_assets_irp_nb(np.ones((10, 2), dtype=np.float64), 8, -1, 5, 500.0, 20, 0.20)
|
||||
compute_sizing_nb(-0.03, -0.02, -0.05, 3.0, 0.5, 5.0, 0.20, True, True, 0.0,
|
||||
np.zeros(4, dtype=np.int64), np.zeros(4, dtype=np.int64),
|
||||
np.zeros(5, dtype=np.float64), 0)
|
||||
check_dc_nb(_p, 3, 1, 0.75)
|
||||
print(f" JIT compile: {time.time() - t_jit:.1f}s")
|
||||
|
||||
from nautilus_dolphin.nautilus.alpha_orchestrator import NDAlphaEngine
|
||||
from nautilus_dolphin.nautilus.adaptive_circuit_breaker import AdaptiveCircuitBreaker
|
||||
|
||||
VBT_DIR = Path(r"C:\Users\Lenovo\Documents\- DOLPHIN NG HD HCM TSF Predict\vbt_cache")
|
||||
META_COLS = {'timestamp', 'scan_number', 'v50_lambda_max_velocity', 'v150_lambda_max_velocity',
|
||||
'v300_lambda_max_velocity', 'v750_lambda_max_velocity', 'vel_div',
|
||||
'instability_50', 'instability_150'}
|
||||
|
||||
# Initialize ACB
|
||||
acb = AdaptiveCircuitBreaker()
|
||||
|
||||
# Pre-load ACB cuts for all dates and show them
|
||||
print("\n=== ACB Signal Survey ===")
|
||||
parquet_files = sorted(VBT_DIR.glob("*.parquet"))
|
||||
acb_cuts = {}
|
||||
for pf in parquet_files:
|
||||
date_str = pf.stem # e.g. "2026-01-01"
|
||||
cut_info = acb.get_cut_for_date(date_str)
|
||||
acb_cuts[date_str] = cut_info
|
||||
if cut_info['cut'] > 0:
|
||||
print(f" {date_str}: CUT={cut_info['cut']*100:.0f}% "
|
||||
f"(signals={cut_info['signals']:.1f}, "
|
||||
f"funding={cut_info['factors']['funding_btc']:.6f}, "
|
||||
f"dvol={cut_info['factors']['dvol_btc']:.1f}, "
|
||||
f"fng={cut_info['factors']['fng']:.0f}, "
|
||||
f"taker={cut_info['factors']['taker']:.3f})")
|
||||
|
||||
dates_with_cuts = sum(1 for c in acb_cuts.values() if c['cut'] > 0)
|
||||
print(f"\nDates with ACB cuts: {dates_with_cuts}/{len(acb_cuts)}")
|
||||
print(f"Dates with data: {sum(1 for c in acb_cuts.values() if c['factors']['available'])}/{len(acb_cuts)}")
|
||||
|
||||
# Vol percentiles from first 2 days
|
||||
all_vols = []
|
||||
for pf in parquet_files[:2]:
|
||||
df = pd.read_parquet(pf)
|
||||
if 'BTCUSDT' not in df.columns:
|
||||
continue
|
||||
prices = df['BTCUSDT'].values
|
||||
for i in range(60, len(prices)):
|
||||
seg = prices[max(0, i-50):i]
|
||||
if len(seg) < 10:
|
||||
continue
|
||||
rets = np.diff(seg) / seg[:-1]
|
||||
v = float(np.std(rets))
|
||||
if v > 0:
|
||||
all_vols.append(v)
|
||||
vol_p60 = float(np.percentile(all_vols, 60))
|
||||
print(f"\nVol p60={vol_p60:.6f}")
|
||||
|
||||
# --- Run WITHOUT ACB (baseline) ---
|
||||
print("\n=== Running WITHOUT ACB (baseline) ===")
|
||||
engine_base = NDAlphaEngine(
|
||||
initial_capital=25000.0,
|
||||
vel_div_threshold=-0.02, vel_div_extreme=-0.05,
|
||||
min_leverage=0.5, max_leverage=5.0, leverage_convexity=3.0,
|
||||
fraction=0.20, fixed_tp_pct=0.0099, stop_pct=1.0, max_hold_bars=120,
|
||||
use_direction_confirm=True, dc_lookback_bars=7, dc_min_magnitude_bps=0.75,
|
||||
dc_skip_contradicts=True, dc_leverage_boost=1.0, dc_leverage_reduce=0.5,
|
||||
use_asset_selection=True, min_irp_alignment=0.45,
|
||||
use_sp_fees=True, use_sp_slippage=True,
|
||||
sp_maker_entry_rate=0.62, sp_maker_exit_rate=0.50,
|
||||
use_ob_edge=True, ob_edge_bps=5.0, ob_confirm_rate=0.40,
|
||||
lookback=100, use_alpha_layers=True, use_dynamic_leverage=True, seed=42,
|
||||
)
|
||||
|
||||
bar_idx_base = 0
|
||||
price_histories_base = {}
|
||||
t0 = time.time()
|
||||
|
||||
for pf in parquet_files:
|
||||
df = pd.read_parquet(pf)
|
||||
asset_cols = [c for c in df.columns if c not in META_COLS]
|
||||
btc_prices = df['BTCUSDT'].values if 'BTCUSDT' in df.columns else None
|
||||
date_vol = np.full(len(df), np.nan)
|
||||
if btc_prices is not None:
|
||||
for i in range(50, len(btc_prices)):
|
||||
seg = btc_prices[max(0, i-50):i]
|
||||
if len(seg) < 10:
|
||||
continue
|
||||
rets = np.diff(seg) / seg[:-1]
|
||||
date_vol[i] = float(np.std(rets))
|
||||
|
||||
bars_in_date = 0
|
||||
for row_i in range(len(df)):
|
||||
row = df.iloc[row_i]
|
||||
vel_div = row.get("vel_div")
|
||||
if vel_div is None or not np.isfinite(vel_div):
|
||||
bar_idx_base += 1
|
||||
bars_in_date += 1
|
||||
continue
|
||||
prices = {}
|
||||
for ac in asset_cols:
|
||||
p = row[ac]
|
||||
if p and p > 0 and np.isfinite(p):
|
||||
prices[ac] = float(p)
|
||||
if ac not in price_histories_base:
|
||||
price_histories_base[ac] = []
|
||||
price_histories_base[ac].append(float(p))
|
||||
if not prices:
|
||||
bar_idx_base += 1
|
||||
bars_in_date += 1
|
||||
continue
|
||||
if bars_in_date < 100:
|
||||
vol_regime_ok = False
|
||||
else:
|
||||
v = date_vol[row_i]
|
||||
vol_regime_ok = (np.isfinite(v) and v > vol_p60)
|
||||
engine_base.process_bar(
|
||||
bar_idx=bar_idx_base, vel_div=float(vel_div),
|
||||
prices=prices, vol_regime_ok=vol_regime_ok,
|
||||
price_histories=price_histories_base,
|
||||
)
|
||||
bar_idx_base += 1
|
||||
bars_in_date += 1
|
||||
|
||||
print(f" Baseline done: {time.time()-t0:.0f}s")
|
||||
|
||||
# --- Run WITH ACB ---
|
||||
print("\n=== Running WITH ACB ===")
|
||||
engine_acb = NDAlphaEngine(
|
||||
initial_capital=25000.0,
|
||||
vel_div_threshold=-0.02, vel_div_extreme=-0.05,
|
||||
min_leverage=0.5, max_leverage=5.0, leverage_convexity=3.0,
|
||||
fraction=0.20, fixed_tp_pct=0.0099, stop_pct=1.0, max_hold_bars=120,
|
||||
use_direction_confirm=True, dc_lookback_bars=7, dc_min_magnitude_bps=0.75,
|
||||
dc_skip_contradicts=True, dc_leverage_boost=1.0, dc_leverage_reduce=0.5,
|
||||
use_asset_selection=True, min_irp_alignment=0.45,
|
||||
use_sp_fees=True, use_sp_slippage=True,
|
||||
sp_maker_entry_rate=0.62, sp_maker_exit_rate=0.50,
|
||||
use_ob_edge=True, ob_edge_bps=5.0, ob_confirm_rate=0.40,
|
||||
lookback=100, use_alpha_layers=True, use_dynamic_leverage=True, seed=42,
|
||||
)
|
||||
|
||||
bar_idx_acb = 0
|
||||
price_histories_acb = {}
|
||||
t1 = time.time()
|
||||
|
||||
# Track ACB impact
|
||||
acb_applied_trades = 0
|
||||
acb_total_cut = 0.0
|
||||
|
||||
for pf in parquet_files:
|
||||
date_str = pf.stem
|
||||
cut_info = acb_cuts[date_str]
|
||||
acb_cut = cut_info['cut']
|
||||
|
||||
df = pd.read_parquet(pf)
|
||||
asset_cols = [c for c in df.columns if c not in META_COLS]
|
||||
btc_prices = df['BTCUSDT'].values if 'BTCUSDT' in df.columns else None
|
||||
date_vol = np.full(len(df), np.nan)
|
||||
if btc_prices is not None:
|
||||
for i in range(50, len(btc_prices)):
|
||||
seg = btc_prices[max(0, i-50):i]
|
||||
if len(seg) < 10:
|
||||
continue
|
||||
rets = np.diff(seg) / seg[:-1]
|
||||
date_vol[i] = float(np.std(rets))
|
||||
|
||||
bars_in_date = 0
|
||||
trades_before = len(engine_acb.trade_history)
|
||||
|
||||
for row_i in range(len(df)):
|
||||
row = df.iloc[row_i]
|
||||
vel_div = row.get("vel_div")
|
||||
if vel_div is None or not np.isfinite(vel_div):
|
||||
bar_idx_acb += 1
|
||||
bars_in_date += 1
|
||||
continue
|
||||
prices = {}
|
||||
for ac in asset_cols:
|
||||
p = row[ac]
|
||||
if p and p > 0 and np.isfinite(p):
|
||||
prices[ac] = float(p)
|
||||
if ac not in price_histories_acb:
|
||||
price_histories_acb[ac] = []
|
||||
price_histories_acb[ac].append(float(p))
|
||||
if not prices:
|
||||
bar_idx_acb += 1
|
||||
bars_in_date += 1
|
||||
continue
|
||||
if bars_in_date < 100:
|
||||
vol_regime_ok = False
|
||||
else:
|
||||
v = date_vol[row_i]
|
||||
vol_regime_ok = (np.isfinite(v) and v > vol_p60)
|
||||
|
||||
# Apply ACB: temporarily reduce fraction for this bar's potential entry
|
||||
if acb_cut > 0 and engine_acb.position is None:
|
||||
orig_fraction = engine_acb.bet_sizer.base_fraction
|
||||
engine_acb.bet_sizer.base_fraction = orig_fraction * (1.0 - acb_cut)
|
||||
|
||||
engine_acb.process_bar(
|
||||
bar_idx=bar_idx_acb, vel_div=float(vel_div),
|
||||
prices=prices, vol_regime_ok=vol_regime_ok,
|
||||
price_histories=price_histories_acb,
|
||||
)
|
||||
|
||||
# Restore fraction
|
||||
if acb_cut > 0 and engine_acb.bet_sizer.base_fraction != 0.20:
|
||||
engine_acb.bet_sizer.base_fraction = 0.20
|
||||
|
||||
bar_idx_acb += 1
|
||||
bars_in_date += 1
|
||||
|
||||
new_trades = len(engine_acb.trade_history) - trades_before
|
||||
if new_trades > 0 and acb_cut > 0:
|
||||
acb_applied_trades += new_trades
|
||||
acb_total_cut += acb_cut * new_trades
|
||||
|
||||
print(f" ACB done: {time.time()-t1:.0f}s")
|
||||
|
||||
# === Results comparison ===
|
||||
def print_results(label, engine):
|
||||
trades = engine.trade_history
|
||||
if not trades:
|
||||
print(f"\n{label}: 0 trades")
|
||||
return
|
||||
wins = [t for t in trades if t.pnl_absolute > 0]
|
||||
losses = [t for t in trades if t.pnl_absolute <= 0]
|
||||
gross_win = sum(t.pnl_absolute for t in wins) if wins else 0
|
||||
gross_loss = abs(sum(t.pnl_absolute for t in losses)) if losses else 0
|
||||
pf_val = gross_win / gross_loss if gross_loss > 0 else float("inf")
|
||||
|
||||
print(f"\n{'='*50}")
|
||||
print(f" {label}")
|
||||
print(f"{'='*50}")
|
||||
print(f"Trades: {len(trades)}")
|
||||
print(f"Wins: {len(wins)}, WR: {len(wins)/len(trades)*100:.1f}%")
|
||||
if wins: print(f"Avg win $: {np.mean([t.pnl_absolute for t in wins]):.2f}")
|
||||
if losses: print(f"Avg loss $: {np.mean([t.pnl_absolute for t in losses]):.2f}")
|
||||
print(f"Gross win: {gross_win:.2f}")
|
||||
print(f"Gross loss: {-gross_loss:.2f}")
|
||||
print(f"PF: {pf_val:.3f}")
|
||||
print(f"Fees: {engine.total_fees:.2f}")
|
||||
print(f"Final capital: ${engine.capital:.2f}")
|
||||
print(f"Return: {(engine.capital - 25000) / 25000 * 100:.2f}%")
|
||||
exit_dist = Counter(t.exit_reason for t in trades)
|
||||
print(f"Exit distribution: {dict(exit_dist)}")
|
||||
leverages = [t.leverage for t in trades]
|
||||
print(f"Avg leverage: {np.mean(leverages):.2f}")
|
||||
|
||||
print_results("BASELINE (no ACB)", engine_base)
|
||||
print_results("WITH ACB v5", engine_acb)
|
||||
|
||||
# Delta
|
||||
base_roi = (engine_base.capital - 25000) / 25000 * 100
|
||||
acb_roi = (engine_acb.capital - 25000) / 25000 * 100
|
||||
print(f"\n{'='*50}")
|
||||
print(f" DELTA")
|
||||
print(f"{'='*50}")
|
||||
print(f"ROI: {base_roi:+.2f}% -> {acb_roi:+.2f}% (delta: {acb_roi - base_roi:+.2f}%)")
|
||||
print(f"Capital: ${engine_base.capital:.2f} -> ${engine_acb.capital:.2f} (delta: ${engine_acb.capital - engine_base.capital:+.2f})")
|
||||
print(f"Trades with ACB cuts applied: {acb_applied_trades}")
|
||||
if acb_applied_trades > 0:
|
||||
print(f"Avg ACB cut on affected trades: {acb_total_cut / acb_applied_trades * 100:.1f}%")
|
||||
print(f"\nTotal time: {time.time() - t0:.0f}s")
|
||||
Reference in New Issue
Block a user