Files
DOLPHIN/nautilus_dolphin/dvae/exp1_proxy_sizing.py
hjnormey 01c19662cb initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems:
- prod/ (BLUE harness, configs, scripts, docs)
- nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved)
- adaptive_exit/ (AEM engine + models/bucket_assignments.pkl)
- Observability/ (EsoF advisor, TUI, dashboards)
- external_factors/ (EsoF producer)
- mc_forewarning_qlabs_fork/ (MC regime/envelope)

Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
2026-04-21 16:58:38 +02:00

198 lines
8.0 KiB
Python
Executable File
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
Exp 1 — proxy_B-driven position sizing.
Instead of binary gating, scale bet_sizer.base_fraction proportionally to
the proxy_B percentile in a rolling window.
High proxy_B (stress incoming) → scale UP (better mean-reversion environment)
Low proxy_B (calm market) → scale DOWN (weaker signal)
Variants tested:
S1: [0.50x, 1.50x] linear, window=500
S2: [0.25x, 2.00x] linear, window=500 (more aggressive)
S3: [0.50x, 1.50x] linear, window=1000 (slower adaptation)
S4: [0.50x, 1.50x] clipped at p25/p75 (only extreme ends change)
Results logged to exp1_proxy_sizing_results.json.
"""
import sys, time
sys.stdout.reconfigure(encoding='utf-8', errors='replace')
from pathlib import Path
import numpy as np
_HERE = Path(__file__).resolve().parent
sys.path.insert(0, str(_HERE.parent))
from exp_shared import (
ensure_jit, ENGINE_KWARGS, GOLD,
load_data, load_forewarner, run_backtest, print_table, log_results
)
from nautilus_dolphin.nautilus.esf_alpha_orchestrator import NDAlphaEngine
# ── ProxyBSizedEngine ─────────────────────────────────────────────────────────
class ProxyBSizedEngine(NDAlphaEngine):
"""
NDAlphaEngine that scales base_fraction by rolling proxy_B percentile.
Parameters
----------
proxy_b_min_scale : float Minimum fraction multiplier (at p0 of proxy_B)
proxy_b_max_scale : float Maximum fraction multiplier (at p100 of proxy_B)
proxy_b_clip_low : float Percentile below which use min_scale (0=linear, 0.25=clip p25)
proxy_b_clip_high : float Percentile above which use max_scale
proxy_b_window : int Rolling history length for percentile
"""
def __init__(self, *args,
proxy_b_min_scale: float = 0.5,
proxy_b_max_scale: float = 1.5,
proxy_b_clip_low: float = 0.0,
proxy_b_clip_high: float = 1.0,
proxy_b_window: int = 500,
**kwargs):
super().__init__(*args, **kwargs)
self._pb_min = proxy_b_min_scale
self._pb_max = proxy_b_max_scale
self._pb_clip_lo = proxy_b_clip_low
self._pb_clip_hi = proxy_b_clip_high
self._pb_window = proxy_b_window
self._pb_history = []
self._current_inst50 = 0.0
self._current_v750 = 0.0
# Stats
self.sizing_scales = []
self.sizing_scale_mean = 1.0
def _proxy_b(self): return self._current_inst50 - self._current_v750
def _compute_scale(self):
pb = self._proxy_b()
self._pb_history.append(pb)
if len(self._pb_history) > self._pb_window * 2:
self._pb_history = self._pb_history[-self._pb_window:]
if len(self._pb_history) < 20:
return 1.0 # neutral until enough history
hist = np.array(self._pb_history[-self._pb_window:])
pct = float(np.mean(hist <= pb)) # empirical percentile of current pb
# Clip
pct = max(self._pb_clip_lo, min(self._pb_clip_hi, pct))
# Normalize pct into [0,1] between clip boundaries
span = self._pb_clip_hi - self._pb_clip_lo
if span < 1e-9: return 1.0
t = (pct - self._pb_clip_lo) / span
scale = self._pb_min + t * (self._pb_max - self._pb_min)
return float(scale)
def process_day(self, date_str, df, asset_columns,
vol_regime_ok=None, direction=None, posture='APEX'):
self.begin_day(date_str, posture=posture, direction=direction)
bid = 0
for ri in range(len(df)):
row = df.iloc[ri]
vd = row.get('vel_div')
if vd is None or not np.isfinite(float(vd)):
self._global_bar_idx += 1; bid += 1; continue
v50_raw = row.get('v50_lambda_max_velocity')
v750_raw = row.get('v750_lambda_max_velocity')
inst_raw = row.get('instability_50')
v50_val = float(v50_raw) if (v50_raw is not None and np.isfinite(float(v50_raw))) else 0.0
v750_val = float(v750_raw) if (v750_raw is not None and np.isfinite(float(v750_raw))) else 0.0
inst_val = float(inst_raw) if (inst_raw is not None and np.isfinite(float(inst_raw))) else 0.0
self._current_inst50 = inst_val
self._current_v750 = v750_val
prices = {}
for ac in asset_columns:
p = row.get(ac)
if p is not None and p > 0 and np.isfinite(p):
prices[ac] = float(p)
if not prices:
self._global_bar_idx += 1; bid += 1; continue
vrok = bool(vol_regime_ok[ri]) if vol_regime_ok is not None else (bid >= 100)
self.step_bar(bar_idx=ri, vel_div=float(vd), prices=prices,
vol_regime_ok=vrok, v50_vel=v50_val, v750_vel=v750_val)
bid += 1
# Update mean scale stat
if self.sizing_scales:
self.sizing_scale_mean = float(np.mean(self.sizing_scales))
return self.end_day()
def _try_entry(self, bar_idx, vel_div, prices, price_histories,
v50_vel=0.0, v750_vel=0.0):
scale = self._compute_scale()
self.sizing_scales.append(scale)
# Temporarily scale fraction
orig = self.bet_sizer.base_fraction
self.bet_sizer.base_fraction = orig * scale
result = super()._try_entry(bar_idx, vel_div, prices, price_histories,
v50_vel, v750_vel)
self.bet_sizer.base_fraction = orig
return result
# ── Experiment configs ────────────────────────────────────────────────────────
SIZING_VARIANTS = [
# (name, min_scale, max_scale, clip_lo, clip_hi, window)
('S1: [0.5x1.5x] lin w500', 0.50, 1.50, 0.0, 1.0, 500),
('S2: [0.25x2.0x] lin w500', 0.25, 2.00, 0.0, 1.0, 500),
('S3: [0.5x1.5x] lin w1000', 0.50, 1.50, 0.0, 1.0, 1000),
('S4: [0.5x1.5x] clip p25-p75', 0.50, 1.50, 0.25, 0.75, 500),
]
def main():
ensure_jit()
print("\nLoading data & forewarner...")
load_data()
fw = load_forewarner()
results = []
# Baseline (no sizing mod) — confirms alignment with gold
print("\n" + "="*60)
print("BASELINE (no proxy sizing)")
t0 = time.time()
r = run_backtest(lambda kw: NDAlphaEngine(**kw), 'Baseline (no sizing)', forewarner=fw)
r['elapsed'] = time.time() - t0
results.append(r)
print(f" {r['roi']:.2f}% PF={r['pf']:.4f} DD={r['dd']:.2f}% T={r['trades']} ({r['elapsed']:.0f}s)")
# Sizing variants
for vname, mn, mx, clo, chi, win in SIZING_VARIANTS:
print(f"\n{'='*60}\n{vname}")
t0 = time.time()
def factory(kw, mn=mn, mx=mx, clo=clo, chi=chi, win=win):
return ProxyBSizedEngine(**kw,
proxy_b_min_scale=mn, proxy_b_max_scale=mx,
proxy_b_clip_low=clo, proxy_b_clip_high=chi,
proxy_b_window=win)
r = run_backtest(factory, vname, forewarner=fw)
r['elapsed'] = time.time() - t0
if r.get('sizing_scale_mean'):
print(f" scale_mean={r['sizing_scale_mean']:.3f}")
print(f" {r['roi']:.2f}% PF={r['pf']:.4f} DD={r['dd']:.2f}% T={r['trades']} ({r['elapsed']:.0f}s)")
results.append(r)
print("\n" + "="*83)
print("EXP 1 — proxy_B POSITION SIZING RESULTS")
print("="*83)
print_table(results, gold=GOLD)
log_results(results, _HERE / 'exp1_proxy_sizing_results.json', meta={
'experiment': 'proxy_B position sizing',
'description': 'Scale base_fraction by rolling proxy_B percentile',
'proxy': 'instability_50 - v750_lambda_max_velocity',
})
if __name__ == '__main__':
main()