Files
DOLPHIN/nautilus_dolphin/test_pf_risk_boundaries.py

280 lines
12 KiB
Python
Raw Normal View History

"""Risk boundary & liquidation test for meta-boosted inverse ACB.
Checks:
1. Max notional/capital ratio (effective leverage after meta-boost)
2. Worst single-trade loss as % of capital
3. Liquidation proximity: would any SHORT have been liquidated?
Exchange liq for SHORT at lev L: price rises ~(1/L)*maint_margin_factor
Binance: initial_margin=1/L, maint_margin~0.4% for 5x, liq at ~+19.6% price move
4. Peak drawdown trajectory
5. Capital-at-risk: max exposure when capital is lowest
6. Risk of ruin estimate (Monte Carlo from trade distribution)
"""
import sys, time, math
from pathlib import Path
import numpy as np
import pandas as pd
sys.path.insert(0, str(Path(__file__).parent))
print("Compiling numba kernels...")
from nautilus_dolphin.nautilus.alpha_asset_selector import compute_irp_nb, compute_ars_nb, rank_assets_irp_nb
from nautilus_dolphin.nautilus.alpha_bet_sizer import compute_sizing_nb
from nautilus_dolphin.nautilus.alpha_signal_generator import check_dc_nb
_p = np.array([1.0, 2.0, 3.0], dtype=np.float64)
compute_irp_nb(_p, -1); compute_ars_nb(1.0, 0.5, 0.01)
rank_assets_irp_nb(np.ones((10, 2), dtype=np.float64), 8, -1, 5, 500.0, 20, 0.20)
compute_sizing_nb(-0.03, -0.02, -0.05, 3.0, 0.5, 5.0, 0.20, True, True, 0.0,
np.zeros(4, dtype=np.int64), np.zeros(4, dtype=np.int64),
np.zeros(5, dtype=np.float64), 0, -1, 0.01, 0.04)
check_dc_nb(_p, 3, 1, 0.75)
from nautilus_dolphin.nautilus.alpha_orchestrator import NDAlphaEngine, NDPosition
from nautilus_dolphin.nautilus.adaptive_circuit_breaker import AdaptiveCircuitBreaker
VBT_DIR = Path(r"C:\Users\Lenovo\Documents\- DOLPHIN NG HD HCM TSF Predict\vbt_cache")
META_COLS = {'timestamp', 'scan_number', 'v50_lambda_max_velocity', 'v150_lambda_max_velocity',
'v300_lambda_max_velocity', 'v750_lambda_max_velocity', 'vel_div',
'instability_50', 'instability_150'}
ENGINE_KWARGS = dict(
initial_capital=25000.0, vel_div_threshold=-0.02, vel_div_extreme=-0.05,
min_leverage=0.5, max_leverage=5.0, leverage_convexity=3.0,
fraction=0.20, fixed_tp_pct=0.0099, stop_pct=1.0, max_hold_bars=120,
use_direction_confirm=True, dc_lookback_bars=7, dc_min_magnitude_bps=0.75,
dc_skip_contradicts=True, dc_leverage_boost=1.0, dc_leverage_reduce=0.5,
use_asset_selection=True, min_irp_alignment=0.45,
use_sp_fees=True, use_sp_slippage=True,
sp_maker_entry_rate=0.62, sp_maker_exit_rate=0.50,
use_ob_edge=True, ob_edge_bps=5.0, ob_confirm_rate=0.40,
lookback=100, use_alpha_layers=True, use_dynamic_leverage=True, seed=42,
)
VD_THRESH = -0.02; VD_EXTREME = -0.05; CONVEXITY = 3.0
acb = AdaptiveCircuitBreaker()
parquet_files = sorted(VBT_DIR.glob("*.parquet"))
acb_signals = {pf.stem: acb.get_cut_for_date(pf.stem)['signals'] for pf in parquet_files}
all_vols = []
for pf in parquet_files[:2]:
df = pd.read_parquet(pf)
if 'BTCUSDT' not in df.columns: continue
pr = df['BTCUSDT'].values
for i in range(60, len(pr)):
seg = pr[max(0,i-50):i]
if len(seg)<10: continue
v = float(np.std(np.diff(seg)/seg[:-1]))
if v > 0: all_vols.append(v)
vol_p60 = float(np.percentile(all_vols, 60))
pq_data = {}
for pf in parquet_files:
df = pd.read_parquet(pf)
ac = [c for c in df.columns if c not in META_COLS]
bp = df['BTCUSDT'].values if 'BTCUSDT' in df.columns else None
dv = np.full(len(df), np.nan)
if bp is not None:
for i in range(50, len(bp)):
seg = bp[max(0,i-50):i]
if len(seg)<10: continue
dv[i] = float(np.std(np.diff(seg)/seg[:-1]))
pq_data[pf.stem] = (df, ac, dv)
def log05(s):
return 1.0 + 0.5 * math.log1p(s) if s >= 1.0 else 1.0
def strength_cubic(vel_div):
if vel_div >= VD_THRESH: return 0.0
raw = (VD_THRESH - vel_div) / (VD_THRESH - VD_EXTREME)
return min(1.0, max(0.0, raw)) ** CONVEXITY
# Binance futures liquidation for SHORT:
# liq_price = entry * (1 + (initial_margin - maint_margin) / 1.0)
# For 5x: initial_margin = 20%, maint_margin ~ 0.4%
# liq_price_move = +19.6% above entry (SHORT gets liquidated if price rises ~19.6%)
# For effective 10x: liq at ~9.6%, etc.
BINANCE_MAINT_MARGIN_RATE = 0.004 # 0.4% for up to 50x on majors
def run_risk_test(beta):
engine = NDAlphaEngine(**ENGINE_KWARGS)
bar_idx = 0; ph = {}
# Risk tracking
risk_log = [] # per-trade risk metrics
max_notional_ratio = 0.0
max_eff_leverage = 0.0
worst_trade_pct = 0.0
worst_trade_abs = 0.0
min_liq_distance = float('inf') # closest we got to liquidation
capital_series = [engine.capital]
exposure_at_min_cap = 0.0
min_capital = engine.capital
for pf in parquet_files:
ds = pf.stem
signals = acb_signals[ds]
base_boost = log05(signals)
engine.regime_direction = -1
engine.regime_dd_halt = False
df, acols, dvol = pq_data[ds]
bid = 0
for ri in range(len(df)):
row = df.iloc[ri]; vd = row.get("vel_div")
if vd is None or not np.isfinite(vd): bar_idx+=1; bid+=1; continue
prices = {}
for ac in acols:
p = row[ac]
if p and p > 0 and np.isfinite(p):
prices[ac] = float(p)
if ac not in ph: ph[ac] = []
ph[ac].append(float(p))
if not prices: bar_idx+=1; bid+=1; continue
vrok = False if bid < 100 else (np.isfinite(dvol[ri]) and dvol[ri] > vol_p60)
if beta > 0 and base_boost > 1.0:
ss = strength_cubic(float(vd))
engine.regime_size_mult = base_boost * (1.0 + beta * ss)
else:
engine.regime_size_mult = base_boost
# Snapshot BEFORE processing (to capture entries)
had_position = engine.position is not None
old_trades = len(engine.trade_history)
engine.process_bar(bar_idx=bar_idx, vel_div=float(vd), prices=prices,
vol_regime_ok=vrok, price_histories=ph)
# Check if new position was opened
if engine.position is not None and not had_position:
pos = engine.position
notional_ratio = pos.notional / engine.capital if engine.capital > 0 else 999
eff_lev = pos.notional / engine.capital if engine.capital > 0 else 999
# Liquidation distance for SHORT: how much can price rise before liq?
# margin = notional / leverage_used (but we track notional directly)
# For SHORT: liq when unrealized_loss > margin - maint_margin
# unrealized_loss_pct = (current - entry) / entry for SHORT (positive = loss)
# liq when: loss_pct * notional > (capital * fraction_committed - maint_margin * notional)
# Simplified: liq_move = capital / notional - BINANCE_MAINT_MARGIN_RATE
if pos.notional > 0:
liq_move_pct = (engine.capital / pos.notional - BINANCE_MAINT_MARGIN_RATE) * 100
else:
liq_move_pct = 999
max_notional_ratio = max(max_notional_ratio, notional_ratio)
max_eff_leverage = max(max_eff_leverage, eff_lev)
min_liq_distance = min(min_liq_distance, liq_move_pct)
risk_log.append({
'date': ds, 'bar': bar_idx, 'asset': pos.asset,
'notional': pos.notional, 'capital': engine.capital,
'notional_ratio': notional_ratio, 'eff_leverage': eff_lev,
'leverage': pos.leverage, 'meta_mult': engine.regime_size_mult,
'vel_div': float(vd), 'liq_move_pct': liq_move_pct,
})
# Check if trade was closed
if len(engine.trade_history) > old_trades:
t = engine.trade_history[-1]
trade_loss_pct = t.pnl_absolute / (engine.capital - t.pnl_absolute) * 100 if (engine.capital - t.pnl_absolute) > 0 else 0
if t.pnl_absolute < worst_trade_abs:
worst_trade_abs = t.pnl_absolute
worst_trade_pct = trade_loss_pct
# Track capital
capital_series.append(engine.capital)
if engine.capital < min_capital:
min_capital = engine.capital
if engine.position:
exposure_at_min_cap = engine.position.notional
bar_idx+=1; bid+=1
# Risk of ruin Monte Carlo
trades = engine.trade_history
if trades:
pnl_dist = np.array([t.pnl_absolute for t in trades])
n_sims = 5000
ruin_count = 0
ruin_threshold = 25000 * 0.5 # 50% drawdown = ruin
for _ in range(n_sims):
cap = 25000.0
peak = cap
sim_trades = np.random.choice(pnl_dist, size=len(trades), replace=True)
for pnl in sim_trades:
cap += pnl
if cap < ruin_threshold:
ruin_count += 1
break
ruin_pct = ruin_count / n_sims * 100
else:
ruin_pct = 0
# Summary stats
wins = [t for t in trades if t.pnl_absolute > 0]
losses = [t for t in trades if t.pnl_absolute <= 0]
gw = sum(t.pnl_absolute for t in wins) if wins else 0
gl = abs(sum(t.pnl_absolute for t in losses)) if losses else 0
cap_arr = np.array(capital_series)
peak_arr = np.maximum.accumulate(cap_arr)
dd_arr = (peak_arr - cap_arr) / peak_arr * 100
max_dd = float(np.max(dd_arr))
return {
'roi': (engine.capital - 25000) / 25000 * 100,
'pf': gw / gl if gl > 0 else 999,
'max_dd': max_dd,
'max_notional_ratio': max_notional_ratio,
'max_eff_leverage': max_eff_leverage,
'worst_trade_pct': worst_trade_pct,
'worst_trade_abs': worst_trade_abs,
'min_liq_distance_pct': min_liq_distance,
'min_capital': min_capital,
'exposure_at_min_cap': exposure_at_min_cap,
'ruin_pct_50dd': ruin_pct,
'trades': len(trades),
'risk_entries': len(risk_log),
}, risk_log
# Run for key betas
print(f"\n{'='*120}")
print(f"{'BETA':<8} {'ROI%':>7} {'PF':>5} {'DD%':>6} {'MAX_EFF_LEV':>12} {'MAX_NOT/CAP':>12} "
f"{'WORST_TRADE':>12} {'LIQ_DIST%':>10} {'MIN_CAP':>9} {'RUIN_50DD%':>11}")
print(f"{'='*120}")
t0 = time.time()
all_results = {}
for beta in [0, 0.3, 0.5, 0.7, 1.0, 1.5, 2.0]:
r, rlog = run_risk_test(beta)
all_results[beta] = (r, rlog)
danger = " !!!" if r['min_liq_distance_pct'] < 5 else " !!" if r['min_liq_distance_pct'] < 10 else ""
print(f" {beta:<6} {r['roi']:>+7.2f} {r['pf']:>5.3f} {r['max_dd']:>6.2f} "
f"{r['max_eff_leverage']:>12.2f}x {r['max_notional_ratio']:>11.2f}x "
f"{r['worst_trade_abs']:>+12.2f} {r['min_liq_distance_pct']:>9.1f}% "
f"{r['min_capital']:>9.2f} {r['ruin_pct_50dd']:>10.1f}%{danger}")
# Detailed risk log for dangerous entries (top 10 highest effective leverage)
print(f"\n--- TOP 10 RISKIEST ENTRIES (beta=0.5) ---")
_, rlog_05 = all_results[0.5]
rlog_sorted = sorted(rlog_05, key=lambda x: -x['eff_leverage'])
print(f"{'DATE':<12} {'ASSET':<12} {'VEL_DIV':>8} {'LEV':>5} {'META':>6} {'EFF_LEV':>8} "
f"{'NOTIONAL':>10} {'CAPITAL':>10} {'LIQ_DIST%':>10}")
for e in rlog_sorted[:10]:
print(f"{e['date']:<12} {e['asset']:<12} {e['vel_div']:>8.4f} {e['leverage']:>5.2f} "
f"{e['meta_mult']:>6.2f} {e['eff_leverage']:>8.2f}x {e['notional']:>10.2f} "
f"{e['capital']:>10.2f} {e['liq_move_pct']:>9.1f}%")
# Exchange margin summary
print(f"\n--- EXCHANGE MARGIN ANALYSIS ---")
for beta in [0, 0.3, 0.5, 0.7, 1.0]:
r, _ = all_results[beta]
print(f" beta={beta}: max_eff_lev={r['max_eff_leverage']:.2f}x, "
f"min_liq_distance={r['min_liq_distance_pct']:.1f}%, "
f"ruin_50dd={r['ruin_pct_50dd']:.1f}%"
f"{' SAFE' if r['min_liq_distance_pct'] > 10 else ' CAUTION' if r['min_liq_distance_pct'] > 5 else ' DANGER'}")
print(f"\nTotal time: {time.time()-t0:.0f}s")