1452 lines
56 KiB
Python
1452 lines
56 KiB
Python
|
|
#!/usr/bin/env python3
|
|||
|
|
"""
|
|||
|
|
test_signal_to_fill.py
|
|||
|
|
======================
|
|||
|
|
Prod-path signal-to-fill latency harness for DolphinLiveTrader.
|
|||
|
|
|
|||
|
|
Uses the EXACT production codepath: DolphinLiveTrader → on_scan() →
|
|||
|
|
NDAlphaEngine (D_LIQ_GOLD config) → ACBv6 → OBF (HZOBProvider live).
|
|||
|
|
No engine mocking. External I/O (HZ state-push, trade log file) is
|
|||
|
|
stubbed to avoid prod side-effects.
|
|||
|
|
|
|||
|
|
Latency segments measured (wall-clock via time.perf_counter):
|
|||
|
|
pre_engine_ms parse + price-dict + vol_ok + OBF.step_live (pre-lock)
|
|||
|
|
lock_wait_ms time blocked acquiring eng_lock
|
|||
|
|
engine_ms step_bar() compute inside lock
|
|||
|
|
post_engine_ms result handling + state bookkeeping
|
|||
|
|
e2e_ms full on_scan() wall time (signal available ≡ step_bar done)
|
|||
|
|
|
|||
|
|
Performance budgets (P99 assertions):
|
|||
|
|
engine_ms P99 < 5 ms (numba-compiled kernels; typically < 0.5 ms)
|
|||
|
|
pre_engine P99 < 20 ms (OBF step_live is the bottleneck: HZ loopback)
|
|||
|
|
e2e_ms P99 < 30 ms (headroom: bar cadence is 5 000 ms)
|
|||
|
|
|
|||
|
|
Test classes:
|
|||
|
|
TestSignalFiringCorrectness — fires iff vel_div < −0.02 + all gates
|
|||
|
|
TestSignalLatency — 500-scan timing distribution + budget asserts
|
|||
|
|
TestBarIdxIntegrity — never skips / doubles; resets on day rollover
|
|||
|
|
TestDeduplication — file_mtime guard prevents double-processing
|
|||
|
|
TestConcurrentScans — lock contention under burst; no corruption
|
|||
|
|
TestOBFLiveIntegration — OBF wired once; step_live called per scan
|
|||
|
|
TestLiveHZInjection — push real HZ event; measure listener latency
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
import json
|
|||
|
|
import sys
|
|||
|
|
import threading
|
|||
|
|
import time
|
|||
|
|
import unittest
|
|||
|
|
from collections import deque
|
|||
|
|
from datetime import datetime, timezone
|
|||
|
|
from typing import List
|
|||
|
|
from unittest.mock import MagicMock, patch
|
|||
|
|
|
|||
|
|
import numpy as np
|
|||
|
|
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict')
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict/prod')
|
|||
|
|
sys.path.insert(0, '/mnt/dolphinng5_predict/nautilus_dolphin')
|
|||
|
|
|
|||
|
|
import math
|
|||
|
|
|
|||
|
|
from nautilus_event_trader import (
|
|||
|
|
DolphinLiveTrader,
|
|||
|
|
ENGINE_KWARGS,
|
|||
|
|
VOL_P60_THRESHOLD,
|
|||
|
|
BTC_VOL_WINDOW,
|
|||
|
|
_STABLECOIN_SYMBOLS,
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# Constants
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
ASSETS = ["BTCUSDT", "ETHUSDT", "BNBUSDT", "SOLUSDT", "XRPUSDT"]
|
|||
|
|
BASE_PRICES = [84_230.5, 2_143.2, 612.4, 145.8, 2.41]
|
|||
|
|
VEL_DIV_THRESHOLD = ENGINE_KWARGS['vel_div_threshold'] # -0.02
|
|||
|
|
LOOKBACK = ENGINE_KWARGS['lookback'] # 100
|
|||
|
|
|
|||
|
|
# P99 budget targets (milliseconds)
|
|||
|
|
BUDGET_ENGINE_P99 = 5.0
|
|||
|
|
BUDGET_PRE_ENGINE_P99 = 20.0
|
|||
|
|
BUDGET_E2E_P99 = 30.0
|
|||
|
|
BUDGET_HZ_LISTENER_P95 = 500.0 # measured P95 ~301ms; HZ Python client async dispatch overhead
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# Helpers
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
def _make_scan(scan_number: int, vel_div: float,
|
|||
|
|
file_mtime: float | None = None,
|
|||
|
|
assets: list = ASSETS,
|
|||
|
|
prices: list = BASE_PRICES,
|
|||
|
|
v50: float = -0.025,
|
|||
|
|
v750: float = -0.005) -> dict:
|
|||
|
|
"""Build a scan dict matching the NG5/7 schema consumed by on_scan()."""
|
|||
|
|
ts = time.time()
|
|||
|
|
return {
|
|||
|
|
"scan_number": scan_number,
|
|||
|
|
"timestamp_ns": int(ts * 1e9),
|
|||
|
|
"timestamp_iso": datetime.now(timezone.utc).isoformat(),
|
|||
|
|
"schema_version": "5.0.0",
|
|||
|
|
"vel_div": vel_div,
|
|||
|
|
"w50_velocity": v50,
|
|||
|
|
"w750_velocity": v750,
|
|||
|
|
"instability_50": max(0.0, v50 - v750),
|
|||
|
|
"assets": list(assets),
|
|||
|
|
"asset_prices": list(prices),
|
|||
|
|
"asset_loadings": [1.0 / len(assets)] * len(assets),
|
|||
|
|
"file_mtime": file_mtime if file_mtime is not None else ts,
|
|||
|
|
"bridge_ts": datetime.now(timezone.utc).isoformat(),
|
|||
|
|
"data_quality_score": 1.0,
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _make_event(scan: dict) -> MagicMock:
|
|||
|
|
ev = MagicMock()
|
|||
|
|
ev.value = json.dumps(scan)
|
|||
|
|
return ev
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _volatile_btc_prices(n: int = BTC_VOL_WINDOW + 5,
|
|||
|
|
base: float = 84_230.0,
|
|||
|
|
sigma: float = 300.0) -> list:
|
|||
|
|
"""Return BTC price series with std high enough to pass vol gate."""
|
|||
|
|
rng = np.random.default_rng(42)
|
|||
|
|
prices = [base]
|
|||
|
|
for _ in range(n - 1):
|
|||
|
|
prices.append(prices[-1] + rng.normal(0, sigma))
|
|||
|
|
return prices
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _flat_btc_prices(n: int = BTC_VOL_WINDOW + 5,
|
|||
|
|
base: float = 84_230.0) -> list:
|
|||
|
|
"""Return constant BTC price series → vol_ok = False."""
|
|||
|
|
return [base] * n
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _build_trader(suppress_pushes: bool = True,
|
|||
|
|
connect_hz: bool = False) -> DolphinLiveTrader:
|
|||
|
|
"""Instantiate and build engine; stub HZ write calls.
|
|||
|
|
|
|||
|
|
on_scan() is made synchronous for deterministic testing: the
|
|||
|
|
ThreadPoolExecutor dispatch is bypassed so assertions can be made
|
|||
|
|
immediately after the call without sleeping or draining the queue.
|
|||
|
|
Tests that specifically exercise threading call _process_scan directly.
|
|||
|
|
"""
|
|||
|
|
trader = DolphinLiveTrader()
|
|||
|
|
trader._build_engine()
|
|||
|
|
|
|||
|
|
# Keep posture cache hot → no live HZ read for posture
|
|||
|
|
trader.cached_posture = "APEX"
|
|||
|
|
trader.posture_cache_time = time.time() + 3600
|
|||
|
|
|
|||
|
|
if suppress_pushes:
|
|||
|
|
trader._push_state = MagicMock()
|
|||
|
|
trader._save_capital = MagicMock() # also suppress HZ capital checkpoint writes
|
|||
|
|
|
|||
|
|
# Synchronous on_scan for deterministic test assertions
|
|||
|
|
_orig_process = trader._process_scan
|
|||
|
|
trader.on_scan = lambda ev: _orig_process(ev, time.time())
|
|||
|
|
|
|||
|
|
if connect_hz:
|
|||
|
|
trader._connect_hz()
|
|||
|
|
|
|||
|
|
return trader
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _warmup(trader: DolphinLiveTrader,
|
|||
|
|
n_bars: int = LOOKBACK + 5,
|
|||
|
|
vol: str = "volatile") -> int:
|
|||
|
|
"""
|
|||
|
|
Feed n_bars warm-up scans so engine reaches _bar_count >= LOOKBACK.
|
|||
|
|
Returns next available scan_number.
|
|||
|
|
"""
|
|||
|
|
prices = _volatile_btc_prices() if vol == "volatile" else _flat_btc_prices()
|
|||
|
|
trader.btc_prices = deque(prices, maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
|
|||
|
|
# Ensure begin_day has been called
|
|||
|
|
today = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
|||
|
|
with trader.eng_lock:
|
|||
|
|
trader.eng.begin_day(today, posture="APEX")
|
|||
|
|
trader.bar_idx = 0
|
|||
|
|
trader.current_day = today
|
|||
|
|
|
|||
|
|
# Stub OBF so warmup is fast (wired later in live tests)
|
|||
|
|
trader.ob_assets = list(ASSETS) # prevents _wire_obf from firing again
|
|||
|
|
trader.ob_eng = MagicMock()
|
|||
|
|
trader.ob_eng.step_live = MagicMock()
|
|||
|
|
trader.eng.set_ob_engine(MagicMock())
|
|||
|
|
|
|||
|
|
mtime_base = time.time() - n_bars * 5
|
|||
|
|
for i in range(n_bars):
|
|||
|
|
scan = _make_scan(
|
|||
|
|
scan_number=i + 1,
|
|||
|
|
vel_div=-0.01, # below threshold → no entry
|
|||
|
|
file_mtime=mtime_base + i * 5,
|
|||
|
|
)
|
|||
|
|
ev = _make_event(scan)
|
|||
|
|
trader.on_scan(ev)
|
|||
|
|
|
|||
|
|
return n_bars + 1
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# Timing wrapper — installed without touching prod code
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class _TimingInterceptor:
|
|||
|
|
"""
|
|||
|
|
Wraps eng.step_bar to record per-call timing.
|
|||
|
|
Install before test run, uninstall after.
|
|||
|
|
"""
|
|||
|
|
def __init__(self, eng):
|
|||
|
|
self._eng = eng
|
|||
|
|
self._orig = eng.step_bar
|
|||
|
|
self.samples: List[float] = [] # milliseconds per step_bar call
|
|||
|
|
|
|||
|
|
def install(self):
|
|||
|
|
orig = self._orig
|
|||
|
|
samples = self.samples
|
|||
|
|
|
|||
|
|
def _timed(*args, **kwargs):
|
|||
|
|
t0 = time.perf_counter()
|
|||
|
|
result = orig(*args, **kwargs)
|
|||
|
|
samples.append((time.perf_counter() - t0) * 1_000)
|
|||
|
|
return result
|
|||
|
|
|
|||
|
|
self._eng.step_bar = _timed
|
|||
|
|
|
|||
|
|
def uninstall(self):
|
|||
|
|
self._eng.step_bar = self._orig
|
|||
|
|
|
|||
|
|
def percentile(self, pct: float) -> float:
|
|||
|
|
return float(np.percentile(self.samples, pct)) if self.samples else 0.0
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 1. Signal Firing Correctness
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestSignalFiringCorrectness(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Verify signal fires iff vel_div < VEL_DIV_THRESHOLD (−0.02)
|
|||
|
|
and all upstream gates pass.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
@classmethod
|
|||
|
|
def setUpClass(cls):
|
|||
|
|
cls.trader = _build_trader()
|
|||
|
|
cls.next_scan = _warmup(cls.trader)
|
|||
|
|
|
|||
|
|
def _fire(self, vel_div: float, vol: str = "volatile") -> dict:
|
|||
|
|
"""Fire one scan and return step_bar result."""
|
|||
|
|
if vol == "volatile":
|
|||
|
|
prices = list(_volatile_btc_prices())
|
|||
|
|
self.trader.btc_prices = deque(prices, maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
|
|||
|
|
scan = _make_scan(
|
|||
|
|
scan_number=self.next_scan,
|
|||
|
|
vel_div=vel_div,
|
|||
|
|
file_mtime=time.time(),
|
|||
|
|
)
|
|||
|
|
self.__class__.next_scan += 1
|
|||
|
|
|
|||
|
|
results = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
|
|||
|
|
def capture(*a, **kw):
|
|||
|
|
r = orig(*a, **kw)
|
|||
|
|
results.append(r)
|
|||
|
|
return r
|
|||
|
|
|
|||
|
|
self.trader.eng.step_bar = capture
|
|||
|
|
try:
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
finally:
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
|
|||
|
|
return results[0] if results else {}
|
|||
|
|
|
|||
|
|
def test_strong_signal_fires(self):
|
|||
|
|
"""vel_div = −0.05 (extreme zone) → entry generated."""
|
|||
|
|
# Close any open position first
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(-0.05)
|
|||
|
|
# Engine may or may not produce entry depending on ACB/OB state;
|
|||
|
|
# what we assert is that step_bar ran and returned a valid dict.
|
|||
|
|
self.assertIn('entry', result)
|
|||
|
|
self.assertIn('exit', result)
|
|||
|
|
|
|||
|
|
def test_just_past_threshold_fires(self):
|
|||
|
|
"""vel_div = −0.021 (1 bps past threshold) → entry attempted."""
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(-0.021)
|
|||
|
|
self.assertIn('entry', result)
|
|||
|
|
|
|||
|
|
def test_at_threshold_no_fire(self):
|
|||
|
|
"""vel_div = −0.020 (exactly at threshold) → no entry (strict <)."""
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(-0.020)
|
|||
|
|
self.assertIsNone(result.get('entry'),
|
|||
|
|
"vel_div == threshold must NOT produce an entry (strict < check)")
|
|||
|
|
|
|||
|
|
def test_above_threshold_no_fire(self):
|
|||
|
|
"""vel_div = −0.01 → no entry."""
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(-0.01)
|
|||
|
|
self.assertIsNone(result.get('entry'))
|
|||
|
|
|
|||
|
|
def test_positive_vel_div_no_fire(self):
|
|||
|
|
"""vel_div = +0.05 → no entry."""
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(+0.05)
|
|||
|
|
self.assertIsNone(result.get('entry'))
|
|||
|
|
|
|||
|
|
def test_no_entry_when_position_open(self):
|
|||
|
|
"""If position is open, no new entry should fire."""
|
|||
|
|
fake_pos = MagicMock() # duck-type: any truthy non-None value
|
|||
|
|
fake_pos.asset = "BTCUSDT"
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = fake_pos
|
|||
|
|
result = self._fire(-0.05)
|
|||
|
|
self.assertIsNone(result.get('entry'),
|
|||
|
|
"Engine must not open a second position while one is open")
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None # restore
|
|||
|
|
|
|||
|
|
def test_vol_gate_blocks_entry(self):
|
|||
|
|
"""Flat BTC prices → vol_ok=False → no entry even with strong signal."""
|
|||
|
|
flat = _flat_btc_prices()
|
|||
|
|
self.trader.btc_prices = deque(flat, maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
self.trader.eng.position = None
|
|||
|
|
result = self._fire(-0.05, vol="flat")
|
|||
|
|
self.assertIsNone(result.get('entry'),
|
|||
|
|
"vol_ok=False must suppress entry")
|
|||
|
|
|
|||
|
|
def test_duplicate_mtime_skipped(self):
|
|||
|
|
"""Same file_mtime → on_scan returns early; step_bar NOT called."""
|
|||
|
|
mtime = time.time()
|
|||
|
|
self.trader.last_file_mtime = mtime
|
|||
|
|
|
|||
|
|
calls = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
self.trader.eng.step_bar = lambda *a, **kw: calls.append(1) or orig(*a, **kw)
|
|||
|
|
try:
|
|||
|
|
scan = _make_scan(self.next_scan, -0.05, file_mtime=mtime)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
finally:
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
|
|||
|
|
self.assertEqual(len(calls), 0,
|
|||
|
|
"Duplicate mtime must be dropped before reaching step_bar")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 2. Signal-to-Fill Latency
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestSignalLatency(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
500-scan run over the EXACT prod path (DolphinLiveTrader.on_scan).
|
|||
|
|
Measures pre_engine, engine, post_engine, e2e latency distributions.
|
|||
|
|
Asserts P99 budget targets.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
N_SCANS = 500
|
|||
|
|
WARMUP_N = LOOKBACK + 5
|
|||
|
|
|
|||
|
|
@classmethod
|
|||
|
|
def setUpClass(cls):
|
|||
|
|
cls.trader = _build_trader()
|
|||
|
|
cls.next_scan = _warmup(cls.trader)
|
|||
|
|
|
|||
|
|
cls.pre_engine_ms: List[float] = []
|
|||
|
|
cls.engine_ms: List[float] = []
|
|||
|
|
cls.e2e_ms: List[float] = []
|
|||
|
|
|
|||
|
|
interceptor = _TimingInterceptor(cls.trader.eng)
|
|||
|
|
interceptor.install()
|
|||
|
|
cls._interceptor = interceptor
|
|||
|
|
|
|||
|
|
# Volatile BTC for vol_ok=True throughout
|
|||
|
|
prices = _volatile_btc_prices()
|
|||
|
|
cls.trader.btc_prices = deque(prices, maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
|
|||
|
|
mtime_base = time.time()
|
|||
|
|
for i in range(cls.N_SCANS):
|
|||
|
|
vel_div = -0.04 if (i % 20 == 0) else -0.01
|
|||
|
|
|
|||
|
|
scan = _make_scan(
|
|||
|
|
scan_number=cls.next_scan + i,
|
|||
|
|
vel_div=vel_div,
|
|||
|
|
file_mtime=mtime_base + i * 0.001, # 1 ms apart → all distinct
|
|||
|
|
)
|
|||
|
|
ev = _make_event(scan)
|
|||
|
|
|
|||
|
|
# Time full on_scan
|
|||
|
|
t0 = time.perf_counter()
|
|||
|
|
cls.trader.on_scan(ev)
|
|||
|
|
t1 = time.perf_counter()
|
|||
|
|
cls.e2e_ms.append((t1 - t0) * 1_000)
|
|||
|
|
|
|||
|
|
interceptor.uninstall()
|
|||
|
|
cls.next_scan += cls.N_SCANS
|
|||
|
|
|
|||
|
|
def test_engine_p99_within_budget(self):
|
|||
|
|
p99 = self._interceptor.percentile(99)
|
|||
|
|
self.assertLess(p99, BUDGET_ENGINE_P99,
|
|||
|
|
f"engine step_bar P99={p99:.2f}ms exceeds budget {BUDGET_ENGINE_P99}ms")
|
|||
|
|
|
|||
|
|
def test_e2e_p99_within_budget(self):
|
|||
|
|
p99 = float(np.percentile(self.e2e_ms, 99))
|
|||
|
|
self.assertLess(p99, BUDGET_E2E_P99,
|
|||
|
|
f"on_scan E2E P99={p99:.2f}ms exceeds budget {BUDGET_E2E_P99}ms")
|
|||
|
|
|
|||
|
|
def test_engine_p50_sub_millisecond(self):
|
|||
|
|
p50 = self._interceptor.percentile(50)
|
|||
|
|
self.assertLess(p50, 1.0,
|
|||
|
|
f"engine step_bar P50={p50:.3f}ms should be sub-millisecond")
|
|||
|
|
|
|||
|
|
def test_no_on_scan_exceptions(self):
|
|||
|
|
"""All 500 scans processed without exceptions (scans_processed incremented)."""
|
|||
|
|
# Each valid (non-dup) scan increments scans_processed
|
|||
|
|
self.assertGreaterEqual(self.trader.scans_processed, self.N_SCANS)
|
|||
|
|
|
|||
|
|
def test_engine_samples_count(self):
|
|||
|
|
"""step_bar called exactly once per non-dup scan."""
|
|||
|
|
self.assertEqual(len(self._interceptor.samples), self.N_SCANS)
|
|||
|
|
|
|||
|
|
def test_print_latency_report(self):
|
|||
|
|
"""Print latency report (informational, always passes)."""
|
|||
|
|
engine_samples = self._interceptor.samples
|
|||
|
|
e2e = self.e2e_ms
|
|||
|
|
print(f"\n{'='*60}")
|
|||
|
|
print(f" Signal-to-Fill Latency Report ({self.N_SCANS} scans)")
|
|||
|
|
print(f"{'='*60}")
|
|||
|
|
print(f" step_bar(): P50={np.percentile(engine_samples,50):.3f}ms"
|
|||
|
|
f" P95={np.percentile(engine_samples,95):.3f}ms"
|
|||
|
|
f" P99={np.percentile(engine_samples,99):.3f}ms"
|
|||
|
|
f" max={max(engine_samples):.3f}ms")
|
|||
|
|
print(f" on_scan E2E: P50={np.percentile(e2e,50):.2f}ms"
|
|||
|
|
f" P95={np.percentile(e2e,95):.2f}ms"
|
|||
|
|
f" P99={np.percentile(e2e,99):.2f}ms"
|
|||
|
|
f" max={max(e2e):.2f}ms")
|
|||
|
|
print(f" budget: engine<{BUDGET_ENGINE_P99}ms"
|
|||
|
|
f" e2e<{BUDGET_E2E_P99}ms")
|
|||
|
|
print(f"{'='*60}")
|
|||
|
|
self.assertTrue(True)
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 3. bar_idx Integrity
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestBarIdxIntegrity(unittest.TestCase):
|
|||
|
|
|
|||
|
|
def setUp(self):
|
|||
|
|
self.trader = _build_trader()
|
|||
|
|
self.next_scan = _warmup(self.trader)
|
|||
|
|
|
|||
|
|
def test_bar_idx_increments_by_one_per_scan(self):
|
|||
|
|
"""Each valid scan increments bar_idx by exactly 1."""
|
|||
|
|
start = self.trader.bar_idx
|
|||
|
|
n = 10
|
|||
|
|
mtime = time.time()
|
|||
|
|
for i in range(n):
|
|||
|
|
scan = _make_scan(self.next_scan + i, -0.01,
|
|||
|
|
file_mtime=mtime + i * 0.1)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.assertEqual(self.trader.bar_idx, start + n,
|
|||
|
|
f"Expected bar_idx={start+n}, got {self.trader.bar_idx}")
|
|||
|
|
|
|||
|
|
def test_duplicate_scan_does_not_increment_bar_idx(self):
|
|||
|
|
"""Duplicate mtime → bar_idx unchanged."""
|
|||
|
|
mtime = time.time()
|
|||
|
|
self.trader.last_file_mtime = mtime
|
|||
|
|
before = self.trader.bar_idx
|
|||
|
|
scan = _make_scan(self.next_scan, -0.05, file_mtime=mtime)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.assertEqual(self.trader.bar_idx, before)
|
|||
|
|
|
|||
|
|
def test_bar_idx_resets_on_day_rollover(self):
|
|||
|
|
"""Day rollover resets bar_idx to 0."""
|
|||
|
|
# Force tomorrow's date
|
|||
|
|
import datetime as dt
|
|||
|
|
tomorrow = (dt.date.today() + dt.timedelta(days=1)).isoformat()
|
|||
|
|
self.trader.current_day = tomorrow # lie about current day
|
|||
|
|
|
|||
|
|
# Fire a scan with today's actual date → triggers rollover
|
|||
|
|
self.trader.current_day = "1970-01-01" # force mismatch
|
|||
|
|
scan = _make_scan(self.next_scan, -0.01, file_mtime=time.time() + 1000)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.assertEqual(self.trader.bar_idx, 1,
|
|||
|
|
"bar_idx must be 1 (post-rollover reset to 0, then incremented by scan)")
|
|||
|
|
|
|||
|
|
def test_bar_idx_passed_correctly_to_step_bar(self):
|
|||
|
|
"""bar_idx passed to step_bar equals trader.bar_idx before the call."""
|
|||
|
|
received_bar_idx = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
|
|||
|
|
def capturing(*args, **kwargs):
|
|||
|
|
received_bar_idx.append(kwargs.get('bar_idx', args[0] if args else None))
|
|||
|
|
return orig(*args, **kwargs)
|
|||
|
|
|
|||
|
|
self.trader.eng.step_bar = capturing
|
|||
|
|
before = self.trader.bar_idx
|
|||
|
|
scan = _make_scan(self.next_scan, -0.01, file_mtime=time.time() + 500)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
|
|||
|
|
self.assertEqual(len(received_bar_idx), 1)
|
|||
|
|
self.assertEqual(received_bar_idx[0], before,
|
|||
|
|
f"step_bar received bar_idx={received_bar_idx[0]}, expected {before}")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 4. Deduplication
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestDeduplication(unittest.TestCase):
|
|||
|
|
|
|||
|
|
def setUp(self):
|
|||
|
|
self.trader = _build_trader()
|
|||
|
|
_warmup(self.trader)
|
|||
|
|
|
|||
|
|
def _count_step_bar_calls(self, events) -> int:
|
|||
|
|
calls = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
self.trader.eng.step_bar = lambda *a, **kw: calls.append(1) or orig(*a, **kw)
|
|||
|
|
for ev in events:
|
|||
|
|
self.trader.on_scan(ev)
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
return len(calls)
|
|||
|
|
|
|||
|
|
def test_same_mtime_rejected(self):
|
|||
|
|
mtime = time.time()
|
|||
|
|
s1 = _make_scan(999, -0.01, file_mtime=mtime)
|
|||
|
|
s2 = _make_scan(1000, -0.01, file_mtime=mtime) # same mtime
|
|||
|
|
calls = self._count_step_bar_calls([
|
|||
|
|
_make_event(s1), _make_event(s2)
|
|||
|
|
])
|
|||
|
|
self.assertEqual(calls, 1, "Second scan with same mtime must be dropped")
|
|||
|
|
|
|||
|
|
def test_older_mtime_rejected(self):
|
|||
|
|
now = time.time()
|
|||
|
|
s1 = _make_scan(999, -0.01, file_mtime=now)
|
|||
|
|
s2 = _make_scan(1000, -0.01, file_mtime=now - 1.0) # older
|
|||
|
|
calls = self._count_step_bar_calls([
|
|||
|
|
_make_event(s1), _make_event(s2)
|
|||
|
|
])
|
|||
|
|
self.assertEqual(calls, 1, "Older mtime must be dropped")
|
|||
|
|
|
|||
|
|
def test_newer_mtime_accepted(self):
|
|||
|
|
now = time.time()
|
|||
|
|
s1 = _make_scan(999, -0.01, file_mtime=now)
|
|||
|
|
s2 = _make_scan(1000, -0.01, file_mtime=now + 0.001) # 1ms newer
|
|||
|
|
calls = self._count_step_bar_calls([
|
|||
|
|
_make_event(s1), _make_event(s2)
|
|||
|
|
])
|
|||
|
|
self.assertEqual(calls, 2, "Newer mtime must be accepted")
|
|||
|
|
|
|||
|
|
def test_100_sequential_scans_all_accepted(self):
|
|||
|
|
"""100 scans with monotonically increasing mtime — all reach step_bar."""
|
|||
|
|
base = time.time()
|
|||
|
|
events = [_make_event(_make_scan(i, -0.01, file_mtime=base + i * 0.01))
|
|||
|
|
for i in range(100)]
|
|||
|
|
calls = self._count_step_bar_calls(events)
|
|||
|
|
self.assertEqual(calls, 100)
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 5. Concurrent Scans (lock contention + race conditions)
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestConcurrentScans(unittest.TestCase):
|
|||
|
|
|
|||
|
|
def setUp(self):
|
|||
|
|
self.trader = _build_trader()
|
|||
|
|
_warmup(self.trader)
|
|||
|
|
|
|||
|
|
def test_burst_10_threads_bar_idx_exact(self):
|
|||
|
|
"""
|
|||
|
|
10 threads burst simultaneously via Barrier, each with a distinct mtime.
|
|||
|
|
|
|||
|
|
Invariants verified:
|
|||
|
|
1. bar_idx increments by AT LEAST 1 (some scans processed)
|
|||
|
|
2. bar_idx increments by AT MOST N (no double-counting — TOCTOU guard)
|
|||
|
|
3. last_file_mtime ends up as the maximum mtime of any accepted scan
|
|||
|
|
(monotone — without _dedup_lock this could be overwritten with a
|
|||
|
|
lower value, allowing stale re-processing)
|
|||
|
|
4. No thread errors or crashes
|
|||
|
|
|
|||
|
|
Why "exactly N" is NOT the correct assertion here:
|
|||
|
|
With strict Barrier burst and monotone dedup, threads execute in random
|
|||
|
|
scheduler order. If thread-9 (mtime=base+0.009) acquires _dedup_lock
|
|||
|
|
before thread-0 (mtime=base+0.000), thread-0 is correctly rejected
|
|||
|
|
(its mtime < last). The invariant that matters is no double-counting and
|
|||
|
|
correct monotone state — not that all N were accepted (which requires
|
|||
|
|
ascending execution order, unguaranteed with threads).
|
|||
|
|
|
|||
|
|
The TOCTOU bug (pre-fix, no lock): thread-B could overwrite
|
|||
|
|
last_file_mtime with a LOWER value after thread-A set a higher one,
|
|||
|
|
allowing future stale scans to be incorrectly accepted. _dedup_lock
|
|||
|
|
makes check+set atomic, preserving the monotone invariant.
|
|||
|
|
"""
|
|||
|
|
N = 10
|
|||
|
|
base_mtime = time.time() + 1000
|
|||
|
|
scans = [_make_scan(9000 + i, -0.01, file_mtime=base_mtime + i * 0.001)
|
|||
|
|
for i in range(N)]
|
|||
|
|
barrier = threading.Barrier(N)
|
|||
|
|
errors = []
|
|||
|
|
|
|||
|
|
def fire(ev):
|
|||
|
|
try:
|
|||
|
|
barrier.wait()
|
|||
|
|
self.trader.on_scan(ev)
|
|||
|
|
except Exception as e:
|
|||
|
|
errors.append(e)
|
|||
|
|
|
|||
|
|
before = self.trader.bar_idx
|
|||
|
|
mtime_before = self.trader.last_file_mtime
|
|||
|
|
threads = [threading.Thread(target=fire, args=(_make_event(s),))
|
|||
|
|
for s in scans]
|
|||
|
|
for t in threads: t.start()
|
|||
|
|
for t in threads: t.join(timeout=10)
|
|||
|
|
|
|||
|
|
increment = self.trader.bar_idx - before
|
|||
|
|
max_scan_mtime = base_mtime + (N - 1) * 0.001
|
|||
|
|
|
|||
|
|
self.assertEqual(errors, [], f"Thread errors: {errors}")
|
|||
|
|
|
|||
|
|
# 1+2: at least one processed, never double-counted
|
|||
|
|
self.assertGreaterEqual(increment, 1,
|
|||
|
|
"No scan was processed at all — total deadlock?")
|
|||
|
|
self.assertLessEqual(increment, N,
|
|||
|
|
f"bar_idx over-incremented: {increment} > {N}. Double-counting race!")
|
|||
|
|
|
|||
|
|
# 3: last_file_mtime must be monotonically increasing (key invariant)
|
|||
|
|
self.assertGreater(self.trader.last_file_mtime, mtime_before,
|
|||
|
|
"last_file_mtime did not advance — no scan was accepted")
|
|||
|
|
self.assertLessEqual(self.trader.last_file_mtime, max_scan_mtime + 1e-6,
|
|||
|
|
"last_file_mtime beyond max injected mtime — impossible")
|
|||
|
|
|
|||
|
|
def test_concurrent_scans_no_engine_state_corruption(self):
|
|||
|
|
"""
|
|||
|
|
50 rapid sequential scans (no sleep) — engine state must remain
|
|||
|
|
self-consistent: capital >= 0, no position with None asset.
|
|||
|
|
"""
|
|||
|
|
base = time.time() + 2000
|
|||
|
|
for i in range(50):
|
|||
|
|
scan = _make_scan(8000 + i, -0.01, file_mtime=base + i * 0.0001)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
|
|||
|
|
with self.trader.eng_lock:
|
|||
|
|
capital = getattr(self.trader.eng, 'capital', 25000.0)
|
|||
|
|
pos = getattr(self.trader.eng, 'position', None)
|
|||
|
|
|
|||
|
|
self.assertGreater(capital, 0, "Capital must remain positive")
|
|||
|
|
if pos is not None:
|
|||
|
|
self.assertIsNotNone(getattr(pos, 'asset', None),
|
|||
|
|
"Open position must have a non-None asset")
|
|||
|
|
|
|||
|
|
def test_lock_always_released_on_exception(self):
|
|||
|
|
"""
|
|||
|
|
Even if step_bar raises, eng_lock must be released so
|
|||
|
|
subsequent scans can proceed.
|
|||
|
|
"""
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
explode_once = [True]
|
|||
|
|
|
|||
|
|
def maybe_raise(*a, **kw):
|
|||
|
|
if explode_once[0]:
|
|||
|
|
explode_once[0] = False
|
|||
|
|
raise RuntimeError("injected test failure")
|
|||
|
|
return orig(*a, **kw)
|
|||
|
|
|
|||
|
|
self.trader.eng.step_bar = maybe_raise
|
|||
|
|
base = time.time() + 3000
|
|||
|
|
|
|||
|
|
# Fire 3 scans: first will raise, others must still run
|
|||
|
|
results = []
|
|||
|
|
for i in range(3):
|
|||
|
|
try:
|
|||
|
|
self.trader.on_scan(
|
|||
|
|
_make_event(_make_scan(7000 + i, -0.01,
|
|||
|
|
file_mtime=base + i * 0.1)))
|
|||
|
|
results.append('ok')
|
|||
|
|
except Exception:
|
|||
|
|
results.append('exc')
|
|||
|
|
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
|
|||
|
|
# Lock must not be held after the exception
|
|||
|
|
acquired = self.trader.eng_lock.acquire(blocking=False)
|
|||
|
|
if acquired:
|
|||
|
|
self.trader.eng_lock.release()
|
|||
|
|
self.assertTrue(acquired, "eng_lock is stuck — not released after exception in on_scan")
|
|||
|
|
|
|||
|
|
# Scans 2 and 3 must have reached step_bar
|
|||
|
|
self.assertGreaterEqual(results.count('ok'), 2,
|
|||
|
|
"Scans after an exception must still process normally")
|
|||
|
|
|
|||
|
|
def test_nan_vel_div_capital_stays_finite(self):
|
|||
|
|
"""
|
|||
|
|
A scan with vel_div=NaN must not poison capital.
|
|||
|
|
Root cause of 2026-03-30 incident: NaN vel_div → NaN notional →
|
|||
|
|
capital += NaN → all subsequent trades broken.
|
|||
|
|
"""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.eng.begin_day(datetime.now(timezone.utc).strftime('%Y-%m-%d'), posture='APEX')
|
|||
|
|
capital_before = trader.eng.capital
|
|||
|
|
|
|||
|
|
base = time.time() + 9000
|
|||
|
|
# Inject NaN vel_div — must be clamped, not passed to engine
|
|||
|
|
nan_scan = _make_scan(9001, float('nan'), file_mtime=base)
|
|||
|
|
nan_scan['w50_velocity'] = float('nan')
|
|||
|
|
nan_scan['w750_velocity'] = float('nan')
|
|||
|
|
trader._process_scan(_make_event(nan_scan), base)
|
|||
|
|
|
|||
|
|
with trader.eng_lock:
|
|||
|
|
capital_after = trader.eng.capital
|
|||
|
|
|
|||
|
|
self.assertTrue(math.isfinite(capital_after),
|
|||
|
|
f"Capital became non-finite ({capital_after}) after NaN vel_div scan — "
|
|||
|
|
"NaN guard in _process_scan must clamp before step_bar")
|
|||
|
|
|
|||
|
|
def test_stablecoin_never_selected_as_trade_asset(self):
|
|||
|
|
"""
|
|||
|
|
Stablecoin symbols (USDCUSDT et al.) must be stripped from prices_dict
|
|||
|
|
before reaching the engine picker, even when present in scan data.
|
|||
|
|
Eigen algo retains them for correlation purity; the PICKER hard-blocks them.
|
|||
|
|
Root cause of 2026-03-30 incident: USDCUSDT shorted at 9x leverage.
|
|||
|
|
"""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.eng.begin_day(datetime.now(timezone.utc).strftime('%Y-%m-%d'), posture='APEX')
|
|||
|
|
|
|||
|
|
# Warmup so engine can fire entries
|
|||
|
|
trader.btc_prices = deque(_volatile_btc_prices(), maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
base = time.time() + 10000
|
|||
|
|
for i in range(LOOKBACK + 5):
|
|||
|
|
trader._process_scan(_make_event(_make_scan(10000 + i, -0.005,
|
|||
|
|
file_mtime=base + i * 0.001)), base)
|
|||
|
|
|
|||
|
|
# Now inject a strong signal scan that includes stablecoins
|
|||
|
|
stable_assets = list(ASSETS) + ['USDCUSDT', 'BUSDUSDT', 'FDUSDUSDT']
|
|||
|
|
stable_prices = list(BASE_PRICES) + [1.0001, 0.9999, 1.0002]
|
|||
|
|
captured_prices_dicts = []
|
|||
|
|
orig_step = trader.eng.step_bar
|
|||
|
|
def capture_step(*a, **kw):
|
|||
|
|
captured_prices_dicts.append(kw.get('prices') or (a[2] if len(a) > 2 else {}))
|
|||
|
|
return orig_step(*a, **kw)
|
|||
|
|
trader.eng.step_bar = capture_step
|
|||
|
|
|
|||
|
|
trigger = _make_scan(10999, -0.08, file_mtime=base + 99999,
|
|||
|
|
assets=stable_assets, prices=stable_prices)
|
|||
|
|
trader._process_scan(_make_event(trigger), base + 99999)
|
|||
|
|
trader.eng.step_bar = orig_step
|
|||
|
|
|
|||
|
|
if captured_prices_dicts:
|
|||
|
|
pd = captured_prices_dicts[-1]
|
|||
|
|
for sym in _STABLECOIN_SYMBOLS:
|
|||
|
|
self.assertNotIn(sym, pd,
|
|||
|
|
f"{sym} reached engine prices_dict — picker must hard-block stablecoins")
|
|||
|
|
|
|||
|
|
def test_capital_checkpoint_save_restore(self):
|
|||
|
|
"""Capital saved to HZ checkpoint must be recoverable on trader restart."""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.eng.begin_day(datetime.now(timezone.utc).strftime('%Y-%m-%d'), posture='APEX')
|
|||
|
|
trader.eng.capital = 31_500.00 # simulate P&L growth
|
|||
|
|
|
|||
|
|
# Mock state_map for save
|
|||
|
|
saved = {}
|
|||
|
|
mock_map = MagicMock()
|
|||
|
|
mock_map.blocking.return_value.put = lambda k, v: saved.update({k: v})
|
|||
|
|
mock_map.blocking.return_value.get = lambda k: saved.get(k)
|
|||
|
|
trader.state_map = mock_map
|
|||
|
|
|
|||
|
|
# Call real _save_capital (instance has it mocked in _build_trader)
|
|||
|
|
DolphinLiveTrader._save_capital(trader)
|
|||
|
|
self.assertIn('capital_checkpoint', saved, "checkpoint key not written")
|
|||
|
|
|
|||
|
|
# New trader restores from checkpoint
|
|||
|
|
trader2 = _build_trader()
|
|||
|
|
trader2.eng.begin_day(datetime.now(timezone.utc).strftime('%Y-%m-%d'), posture='APEX')
|
|||
|
|
trader2.state_map = mock_map
|
|||
|
|
DolphinLiveTrader._restore_capital(trader2)
|
|||
|
|
|
|||
|
|
self.assertAlmostEqual(trader2.eng.capital, 31_500.00, delta=0.01,
|
|||
|
|
msg="Restored capital does not match saved checkpoint")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 6. OBF Live Integration
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestOBFLiveIntegration(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Verifies OBF (HZOBProvider) wiring and step_live cadence using
|
|||
|
|
the actual live HZ connection.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
@classmethod
|
|||
|
|
def setUpClass(cls):
|
|||
|
|
# Build trader, suppress HZ state pushes
|
|||
|
|
cls.trader = _build_trader(suppress_pushes=True, connect_hz=False)
|
|||
|
|
|
|||
|
|
# Warmup with mock OBF so warmup is fast
|
|||
|
|
cls.trader.ob_assets = list(ASSETS)
|
|||
|
|
cls.trader.ob_eng = MagicMock()
|
|||
|
|
cls.trader.ob_eng.step_live = MagicMock()
|
|||
|
|
cls.trader.eng.set_ob_engine(MagicMock())
|
|||
|
|
_warmup(cls.trader)
|
|||
|
|
|
|||
|
|
# Reset OBF state — next scan will trigger real _wire_obf
|
|||
|
|
cls.trader.ob_assets = []
|
|||
|
|
cls.trader.ob_eng = None
|
|||
|
|
cls.trader.eng.set_ob_engine(None)
|
|||
|
|
|
|||
|
|
# Track wiring calls
|
|||
|
|
cls._wired_calls = []
|
|||
|
|
orig_wire = cls.trader._wire_obf
|
|||
|
|
|
|||
|
|
def spy_wire(assets):
|
|||
|
|
cls._wired_calls.append(list(assets))
|
|||
|
|
orig_wire(assets)
|
|||
|
|
|
|||
|
|
cls.trader._wire_obf = spy_wire
|
|||
|
|
|
|||
|
|
# Fire first scan — triggers _wire_obf once
|
|||
|
|
scan = _make_scan(5000, -0.01, file_mtime=time.time() + 5000)
|
|||
|
|
cls.trader.on_scan(_make_event(scan))
|
|||
|
|
|
|||
|
|
# Restore _wire_obf (spy no longer needed)
|
|||
|
|
cls.trader._wire_obf = orig_wire
|
|||
|
|
|
|||
|
|
def test_obf_wired_on_first_scan(self):
|
|||
|
|
"""_wire_obf called exactly once on first scan; BTCUSDT in asset list."""
|
|||
|
|
self.assertEqual(len(self._wired_calls), 1,
|
|||
|
|
"OBF must be wired exactly once")
|
|||
|
|
self.assertIn('BTCUSDT', self._wired_calls[0],
|
|||
|
|
"BTCUSDT must be in wired assets")
|
|||
|
|
|
|||
|
|
def test_obf_not_wired_twice(self):
|
|||
|
|
"""Second scan must NOT re-wire OBF (ob_assets already set)."""
|
|||
|
|
# ob_assets was populated by the first scan in setUpClass
|
|||
|
|
self.assertNotEqual(self.trader.ob_assets, [],
|
|||
|
|
"ob_assets should be set after first scan")
|
|||
|
|
|
|||
|
|
extra_wires = []
|
|||
|
|
orig_wire = self.trader._wire_obf
|
|||
|
|
self.trader._wire_obf = lambda a: extra_wires.append(a)
|
|||
|
|
scan = _make_scan(5001, -0.01, file_mtime=time.time() + 6000)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.trader._wire_obf = orig_wire
|
|||
|
|
|
|||
|
|
self.assertEqual(len(extra_wires), 0,
|
|||
|
|
"_wire_obf must not be called again once ob_assets is populated")
|
|||
|
|
|
|||
|
|
def test_step_live_called_per_scan(self):
|
|||
|
|
"""ob_eng.step_live() called on every scan (provides fresh OBF data)."""
|
|||
|
|
if self.trader.ob_eng is None:
|
|||
|
|
self.skipTest("OBF not wired (HZ unavailable?)")
|
|||
|
|
|
|||
|
|
step_live_calls = []
|
|||
|
|
orig_sl = self.trader.ob_eng.step_live
|
|||
|
|
|
|||
|
|
def spy_sl(assets, bar_idx):
|
|||
|
|
step_live_calls.append(bar_idx)
|
|||
|
|
return orig_sl(assets, bar_idx)
|
|||
|
|
|
|||
|
|
self.trader.ob_eng.step_live = spy_sl
|
|||
|
|
base = time.time() + 7000
|
|||
|
|
for i in range(5):
|
|||
|
|
scan = _make_scan(5100 + i, -0.01, file_mtime=base + i * 0.1)
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.trader.ob_eng.step_live = orig_sl
|
|||
|
|
|
|||
|
|
self.assertEqual(len(step_live_calls), 5,
|
|||
|
|
"step_live must be called once per scan")
|
|||
|
|
for j in range(1, len(step_live_calls)):
|
|||
|
|
self.assertGreater(step_live_calls[j], step_live_calls[j - 1],
|
|||
|
|
"bar_idx passed to step_live must increase each scan")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 7. Live HZ Injection (round-trip listener latency)
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestLiveHZInjection(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Inject a synthetic scan directly into DOLPHIN_FEATURES['latest_eigen_scan']
|
|||
|
|
via HZ, then measure how long it takes the listener to fire on_scan().
|
|||
|
|
|
|||
|
|
This tests the FULL signal path: HZ publish → event queue → listener
|
|||
|
|
thread → on_scan() → step_bar() → signal available.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
@classmethod
|
|||
|
|
def setUpClass(cls):
|
|||
|
|
try:
|
|||
|
|
import hazelcast
|
|||
|
|
cls.hz = hazelcast.HazelcastClient(
|
|||
|
|
cluster_name="dolphin",
|
|||
|
|
cluster_members=["127.0.0.1:5701"],
|
|||
|
|
)
|
|||
|
|
cls.features_map = cls.hz.get_map("DOLPHIN_FEATURES")
|
|||
|
|
cls.hz_available = True
|
|||
|
|
except Exception:
|
|||
|
|
cls.hz_available = False
|
|||
|
|
|
|||
|
|
@classmethod
|
|||
|
|
def tearDownClass(cls):
|
|||
|
|
if getattr(cls, 'hz_available', False):
|
|||
|
|
try:
|
|||
|
|
cls.hz.shutdown()
|
|||
|
|
except Exception:
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def setUp(self):
|
|||
|
|
if not self.hz_available:
|
|||
|
|
self.skipTest("Hazelcast unavailable — skipping live injection tests")
|
|||
|
|
|
|||
|
|
def _make_live_trader_with_listener(self):
|
|||
|
|
"""Build trader, connect HZ, register listener."""
|
|||
|
|
trader = _build_trader(suppress_pushes=True)
|
|||
|
|
trader._connect_hz()
|
|||
|
|
|
|||
|
|
# Warmup engine state
|
|||
|
|
today = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
|||
|
|
with trader.eng_lock:
|
|||
|
|
trader.eng.begin_day(today, posture="APEX")
|
|||
|
|
trader.bar_idx = 0
|
|||
|
|
trader.current_day = today
|
|||
|
|
|
|||
|
|
# Suppress OBF wiring in listener path (live OBF tested separately)
|
|||
|
|
trader.ob_assets = list(ASSETS)
|
|||
|
|
trader.ob_eng = MagicMock()
|
|||
|
|
trader.ob_eng.step_live = MagicMock()
|
|||
|
|
trader.eng.set_ob_engine(MagicMock())
|
|||
|
|
|
|||
|
|
# Warm vol buffer
|
|||
|
|
prices = _volatile_btc_prices()
|
|||
|
|
trader.btc_prices = deque(prices, maxlen=BTC_VOL_WINDOW + 2)
|
|||
|
|
|
|||
|
|
# Advance past lookback barrier
|
|||
|
|
mtime_base = time.time() - (LOOKBACK + 10) * 5
|
|||
|
|
for i in range(LOOKBACK + 5):
|
|||
|
|
scan = _make_scan(i + 1, -0.01, file_mtime=mtime_base + i * 5)
|
|||
|
|
trader.on_scan(_make_event(scan))
|
|||
|
|
|
|||
|
|
return trader
|
|||
|
|
|
|||
|
|
def test_listener_fires_within_budget(self):
|
|||
|
|
"""
|
|||
|
|
Inject scan to HZ, register listener, verify on_scan fires within
|
|||
|
|
BUDGET_HZ_LISTENER_P95 ms.
|
|||
|
|
"""
|
|||
|
|
trader = self._make_live_trader_with_listener()
|
|||
|
|
fired = threading.Event()
|
|||
|
|
fire_times: List[float] = []
|
|||
|
|
|
|||
|
|
orig_on_scan = trader.on_scan
|
|||
|
|
|
|||
|
|
def instrumented_on_scan(event):
|
|||
|
|
fire_times.append(time.perf_counter())
|
|||
|
|
orig_on_scan(event)
|
|||
|
|
fired.set()
|
|||
|
|
|
|||
|
|
trader.on_scan = instrumented_on_scan
|
|||
|
|
|
|||
|
|
# Register listener on the trader's own HZ client
|
|||
|
|
listener_handle = trader.features_map.add_entry_listener(
|
|||
|
|
key='latest_eigen_scan',
|
|||
|
|
include_value=True,
|
|||
|
|
updated_func=instrumented_on_scan,
|
|||
|
|
added_func=instrumented_on_scan,
|
|||
|
|
)
|
|||
|
|
time.sleep(0.2) # allow listener registration to propagate in HZ
|
|||
|
|
|
|||
|
|
# Push via the same client — guarantees same-cluster observation
|
|||
|
|
push_scan_number = 999_001
|
|||
|
|
scan = _make_scan(push_scan_number, -0.03,
|
|||
|
|
file_mtime=time.time() + 99_000)
|
|||
|
|
t_push = time.perf_counter()
|
|||
|
|
trader.features_map.put('latest_eigen_scan', json.dumps(scan))
|
|||
|
|
|
|||
|
|
# Wait for listener to fire (up to 2 seconds)
|
|||
|
|
fired_ok = fired.wait(timeout=2.0)
|
|||
|
|
|
|||
|
|
# Unregister listener
|
|||
|
|
trader.features_map.remove_entry_listener(listener_handle)
|
|||
|
|
if trader.hz_client:
|
|||
|
|
trader.hz_client.shutdown()
|
|||
|
|
|
|||
|
|
self.assertTrue(fired_ok, "HZ listener did NOT fire within 2s of publish")
|
|||
|
|
|
|||
|
|
if fire_times:
|
|||
|
|
latency_ms = (fire_times[0] - t_push) * 1_000
|
|||
|
|
print(f"\n HZ listener latency: {latency_ms:.2f} ms")
|
|||
|
|
self.assertLess(latency_ms, BUDGET_HZ_LISTENER_P95,
|
|||
|
|
f"HZ publish→listener latency {latency_ms:.2f}ms > "
|
|||
|
|
f"budget {BUDGET_HZ_LISTENER_P95}ms")
|
|||
|
|
|
|||
|
|
def test_10_injections_latency_distribution(self):
|
|||
|
|
"""
|
|||
|
|
Inject 10 scans sequentially, collect listener latencies.
|
|||
|
|
Report distribution; assert P95 within budget.
|
|||
|
|
"""
|
|||
|
|
trader = self._make_live_trader_with_listener()
|
|||
|
|
latencies_ms: List[float] = []
|
|||
|
|
lock = threading.Lock()
|
|||
|
|
push_times: List[float] = []
|
|||
|
|
|
|||
|
|
orig_on_scan = trader.on_scan
|
|||
|
|
call_count = [0]
|
|||
|
|
|
|||
|
|
def instrumented(event):
|
|||
|
|
with lock:
|
|||
|
|
if call_count[0] < len(push_times):
|
|||
|
|
latencies_ms.append(
|
|||
|
|
(time.perf_counter() - push_times[call_count[0]]) * 1_000
|
|||
|
|
)
|
|||
|
|
call_count[0] += 1
|
|||
|
|
orig_on_scan(event)
|
|||
|
|
|
|||
|
|
listener_handle = trader.features_map.add_entry_listener(
|
|||
|
|
key='latest_eigen_scan',
|
|||
|
|
include_value=True,
|
|||
|
|
updated_func=instrumented,
|
|||
|
|
added_func=instrumented,
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
N = 10
|
|||
|
|
for i in range(N):
|
|||
|
|
scan = _make_scan(998_000 + i, -0.02 - i * 0.001,
|
|||
|
|
file_mtime=time.time() + 98_000 + i)
|
|||
|
|
push_times.append(time.perf_counter())
|
|||
|
|
self.features_map.put('latest_eigen_scan', json.dumps(scan))
|
|||
|
|
time.sleep(0.3) # space injections
|
|||
|
|
|
|||
|
|
time.sleep(1.0) # drain last events
|
|||
|
|
trader.features_map.remove_entry_listener(listener_handle)
|
|||
|
|
if trader.hz_client:
|
|||
|
|
trader.hz_client.shutdown()
|
|||
|
|
|
|||
|
|
self.assertGreaterEqual(len(latencies_ms), N // 2,
|
|||
|
|
f"Expected ≥{N//2} listener fires, got {len(latencies_ms)}")
|
|||
|
|
|
|||
|
|
if latencies_ms:
|
|||
|
|
p50 = float(np.percentile(latencies_ms, 50))
|
|||
|
|
p95 = float(np.percentile(latencies_ms, 95))
|
|||
|
|
print(f"\n HZ injection latency over {len(latencies_ms)} samples:")
|
|||
|
|
print(f" P50={p50:.1f}ms P95={p95:.1f}ms max={max(latencies_ms):.1f}ms")
|
|||
|
|
self.assertLess(p95, BUDGET_HZ_LISTENER_P95,
|
|||
|
|
f"P95 HZ listener latency {p95:.1f}ms > budget {BUDGET_HZ_LISTENER_P95}ms")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 8. Scan Data Integrity
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestScanDataIntegrity(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Guards against silent data corruption at the scan→engine boundary.
|
|||
|
|
zip() truncation, null fields, and ordering bugs can trigger wrong trades
|
|||
|
|
without raising exceptions.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
def setUp(self):
|
|||
|
|
self.trader = _build_trader()
|
|||
|
|
self.next_scan = _warmup(self.trader)
|
|||
|
|
|
|||
|
|
def _step_bar_calls(self, events):
|
|||
|
|
calls = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
self.trader.eng.step_bar = lambda *a, **kw: calls.append(1) or orig(*a, **kw)
|
|||
|
|
for ev in events:
|
|||
|
|
self.trader.on_scan(ev)
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
return len(calls)
|
|||
|
|
|
|||
|
|
def _captured_prices(self, scan):
|
|||
|
|
received = {}
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
def cap(*a, **kw):
|
|||
|
|
received.update(kw.get('prices', {}))
|
|||
|
|
return orig(*a, **kw)
|
|||
|
|
self.trader.eng.step_bar = cap
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
return received
|
|||
|
|
|
|||
|
|
def test_assets_prices_length_mismatch_dropped(self):
|
|||
|
|
"""zip() truncation = silent wrong prices → trade on garbage data. Must be dropped."""
|
|||
|
|
scan = _make_scan(self.next_scan, -0.05, file_mtime=time.time() + 10_000)
|
|||
|
|
scan['asset_prices'] = scan['asset_prices'][:-1] # len mismatch
|
|||
|
|
self.assertEqual(self._step_bar_calls([_make_event(scan)]), 0,
|
|||
|
|
"Length-mismatched scan must be dropped before step_bar")
|
|||
|
|
|
|||
|
|
def test_all_assets_mapped_to_correct_prices(self):
|
|||
|
|
"""asset[i] must map to price[i] — no rotation or shuffle."""
|
|||
|
|
scan = _make_scan(self.next_scan + 1, -0.01, file_mtime=time.time() + 11_000,
|
|||
|
|
assets=ASSETS, prices=BASE_PRICES)
|
|||
|
|
received = self._captured_prices(scan)
|
|||
|
|
for asset, price in zip(ASSETS, BASE_PRICES):
|
|||
|
|
self.assertAlmostEqual(received.get(asset, -1), price, places=2,
|
|||
|
|
msg=f"{asset} price mapping incorrect")
|
|||
|
|
|
|||
|
|
def test_btcusdt_present_in_prices_dict(self):
|
|||
|
|
"""BTCUSDT must reach step_bar — vol gate and engine both depend on it."""
|
|||
|
|
scan = _make_scan(self.next_scan + 2, -0.01, file_mtime=time.time() + 12_000)
|
|||
|
|
received = self._captured_prices(scan)
|
|||
|
|
self.assertIn('BTCUSDT', received)
|
|||
|
|
|
|||
|
|
def test_null_assets_does_not_crash(self):
|
|||
|
|
"""assets: null must not raise — or [] guard converts to empty, engine handles gracefully."""
|
|||
|
|
scan = _make_scan(self.next_scan + 3, -0.05, file_mtime=time.time() + 13_000)
|
|||
|
|
scan['assets'] = None
|
|||
|
|
scan['asset_prices'] = None
|
|||
|
|
try:
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
except Exception as e:
|
|||
|
|
self.fail(f"Null assets crashed on_scan: {e}")
|
|||
|
|
|
|||
|
|
def test_missing_vel_div_defaults_to_zero(self):
|
|||
|
|
"""Missing vel_div key → 0.0 (no entry triggered, no crash)."""
|
|||
|
|
vd_seen = []
|
|||
|
|
orig = self.trader.eng.step_bar
|
|||
|
|
def cap(*a, **kw):
|
|||
|
|
vd_seen.append(kw.get('vel_div'))
|
|||
|
|
return orig(*a, **kw)
|
|||
|
|
self.trader.eng.step_bar = cap
|
|||
|
|
scan = _make_scan(self.next_scan + 4, -0.01, file_mtime=time.time() + 14_000)
|
|||
|
|
del scan['vel_div']
|
|||
|
|
self.trader.on_scan(_make_event(scan))
|
|||
|
|
self.trader.eng.step_bar = orig
|
|||
|
|
self.assertEqual(len(vd_seen), 1)
|
|||
|
|
self.assertEqual(vd_seen[0], 0.0)
|
|||
|
|
|
|||
|
|
def test_extended_universe_all_prices_preserved(self):
|
|||
|
|
"""Extra assets beyond base universe must all reach step_bar."""
|
|||
|
|
extra_assets = ASSETS + ["AVAXUSDT", "DOGEUSDT"]
|
|||
|
|
extra_prices = BASE_PRICES + [28.5, 0.18]
|
|||
|
|
scan = _make_scan(self.next_scan + 5, -0.01, file_mtime=time.time() + 15_000,
|
|||
|
|
assets=extra_assets, prices=extra_prices)
|
|||
|
|
received = self._captured_prices(scan)
|
|||
|
|
for a in ["AVAXUSDT", "DOGEUSDT"]:
|
|||
|
|
self.assertIn(a, received, f"{a} missing from prices_dict — universe truncated")
|
|||
|
|
|
|||
|
|
def test_prices_with_zero_not_dropped(self):
|
|||
|
|
"""A price of 0.0 is suspicious but must not be silently dropped by `or []` guards."""
|
|||
|
|
prices_with_zero = list(BASE_PRICES)
|
|||
|
|
prices_with_zero[1] = 0.0 # ETHUSDT = 0
|
|||
|
|
scan = _make_scan(self.next_scan + 6, -0.01, file_mtime=time.time() + 16_000,
|
|||
|
|
assets=ASSETS, prices=prices_with_zero)
|
|||
|
|
received = self._captured_prices(scan)
|
|||
|
|
self.assertIn('ETHUSDT', received)
|
|||
|
|
self.assertEqual(received['ETHUSDT'], 0.0)
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 9. Rollover Safety
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestRolloverSafety(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
begin_day() must fire EXACTLY once at day boundary even under concurrent load.
|
|||
|
|
Double-begin_day resets bar_idx mid-day → silent desync from backtest.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
def test_concurrent_rollover_begin_day_called_once(self):
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.current_day = "1970-01-01"
|
|||
|
|
|
|||
|
|
calls = []
|
|||
|
|
orig = trader.eng.begin_day
|
|||
|
|
trader.eng.begin_day = lambda *a, **kw: calls.append(1) or orig(*a, **kw)
|
|||
|
|
|
|||
|
|
N = 20
|
|||
|
|
barrier = threading.Barrier(N)
|
|||
|
|
errors = []
|
|||
|
|
|
|||
|
|
def go():
|
|||
|
|
try:
|
|||
|
|
barrier.wait()
|
|||
|
|
trader._rollover_day()
|
|||
|
|
except Exception as e:
|
|||
|
|
errors.append(e)
|
|||
|
|
|
|||
|
|
threads = [threading.Thread(target=go) for _ in range(N)]
|
|||
|
|
for t in threads: t.start()
|
|||
|
|
for t in threads: t.join(timeout=5)
|
|||
|
|
trader.eng.begin_day = orig
|
|||
|
|
|
|||
|
|
self.assertEqual(errors, [])
|
|||
|
|
self.assertEqual(len(calls), 1,
|
|||
|
|
f"begin_day called {len(calls)}× — double rollover resets bar_idx mid-day")
|
|||
|
|
|
|||
|
|
def test_current_day_updated_inside_lock(self):
|
|||
|
|
"""
|
|||
|
|
current_day must be set inside eng_lock so a second thread sees the
|
|||
|
|
updated value before it can acquire the lock and re-call begin_day.
|
|||
|
|
"""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.current_day = "1970-01-01"
|
|||
|
|
|
|||
|
|
current_day_during_begin = []
|
|||
|
|
orig = trader.eng.begin_day
|
|||
|
|
|
|||
|
|
def slow_begin(*a, **kw):
|
|||
|
|
current_day_during_begin.append(trader.current_day)
|
|||
|
|
time.sleep(0.02)
|
|||
|
|
return orig(*a, **kw)
|
|||
|
|
|
|||
|
|
trader.eng.begin_day = slow_begin
|
|||
|
|
t1_done = threading.Event()
|
|||
|
|
t2_calls = []
|
|||
|
|
|
|||
|
|
def t1():
|
|||
|
|
trader._rollover_day()
|
|||
|
|
t1_done.set()
|
|||
|
|
|
|||
|
|
def t2():
|
|||
|
|
t1_done.wait()
|
|||
|
|
orig2 = trader.eng.begin_day
|
|||
|
|
trader.eng.begin_day = lambda *a, **kw: t2_calls.append(1) or orig2(*a, **kw)
|
|||
|
|
trader._rollover_day()
|
|||
|
|
|
|||
|
|
th1 = threading.Thread(target=t1)
|
|||
|
|
th2 = threading.Thread(target=t2)
|
|||
|
|
th1.start(); th2.start()
|
|||
|
|
th1.join(5); th2.join(5)
|
|||
|
|
trader.eng.begin_day = orig
|
|||
|
|
|
|||
|
|
self.assertEqual(len(t2_calls), 0,
|
|||
|
|
"current_day set outside lock lets t2 see stale value and re-call begin_day")
|
|||
|
|
|
|||
|
|
def test_bar_idx_is_zero_after_rollover_before_first_scan(self):
|
|||
|
|
trader = _build_trader()
|
|||
|
|
trader.current_day = "1970-01-01"
|
|||
|
|
trader._rollover_day()
|
|||
|
|
self.assertEqual(trader.bar_idx, 0)
|
|||
|
|
|
|||
|
|
def test_rollover_is_idempotent(self):
|
|||
|
|
"""Calling _rollover_day twice on same day must not call begin_day twice."""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
calls = []
|
|||
|
|
orig = trader.eng.begin_day
|
|||
|
|
trader.eng.begin_day = lambda *a, **kw: calls.append(1) or orig(*a, **kw)
|
|||
|
|
trader.current_day = "1970-01-01"
|
|||
|
|
trader._rollover_day()
|
|||
|
|
trader._rollover_day() # same day now
|
|||
|
|
trader.eng.begin_day = orig
|
|||
|
|
self.assertEqual(len(calls), 1)
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 10. Backtest Parity
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestBacktestParity(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Every ENGINE_KWARGS value is an assertion against the gold-spec backtest.
|
|||
|
|
Any drift here is silent algorithmic deviation — the live trader is no
|
|||
|
|
longer running the strategy that was validated.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
def test_vel_div_threshold(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['vel_div_threshold'], -0.02)
|
|||
|
|
|
|||
|
|
def test_vel_div_extreme(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['vel_div_extreme'], -0.05)
|
|||
|
|
|
|||
|
|
def test_max_leverage_gold_spec(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['max_leverage'], 8.0)
|
|||
|
|
|
|||
|
|
def test_min_leverage(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['min_leverage'], 0.5)
|
|||
|
|
|
|||
|
|
def test_max_hold_bars_gold_spec(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['max_hold_bars'], 250)
|
|||
|
|
|
|||
|
|
def test_tp_pct_gold_spec(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['fixed_tp_pct'], 0.0095)
|
|||
|
|
|
|||
|
|
def test_fraction_gold_spec(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['fraction'], 0.20)
|
|||
|
|
|
|||
|
|
def test_vol_threshold_5yr_calibration(self):
|
|||
|
|
self.assertAlmostEqual(VOL_P60_THRESHOLD, 0.00026414, places=8)
|
|||
|
|
|
|||
|
|
def test_direction_confirm_params(self):
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_direction_confirm'])
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['dc_lookback_bars'], 7)
|
|||
|
|
self.assertAlmostEqual(ENGINE_KWARGS['dc_min_magnitude_bps'], 0.75)
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['dc_skip_contradicts'])
|
|||
|
|
|
|||
|
|
def test_fees_and_slippage_on(self):
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_sp_fees'])
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_sp_slippage'])
|
|||
|
|
|
|||
|
|
def test_maker_rates(self):
|
|||
|
|
self.assertAlmostEqual(ENGINE_KWARGS['sp_maker_entry_rate'], 0.62)
|
|||
|
|
self.assertAlmostEqual(ENGINE_KWARGS['sp_maker_exit_rate'], 0.50)
|
|||
|
|
|
|||
|
|
def test_ob_edge_params(self):
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_ob_edge'])
|
|||
|
|
self.assertAlmostEqual(ENGINE_KWARGS['ob_edge_bps'], 5.0)
|
|||
|
|
self.assertAlmostEqual(ENGINE_KWARGS['ob_confirm_rate'], 0.40)
|
|||
|
|
|
|||
|
|
def test_irp_filter_disabled(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['min_irp_alignment'], 0.0)
|
|||
|
|
|
|||
|
|
def test_lookback(self):
|
|||
|
|
self.assertEqual(ENGINE_KWARGS['lookback'], 100)
|
|||
|
|
|
|||
|
|
def test_alpha_layers_and_dynamic_leverage_on(self):
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_alpha_layers'])
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_dynamic_leverage'])
|
|||
|
|
|
|||
|
|
def test_asset_selection_on(self):
|
|||
|
|
self.assertTrue(ENGINE_KWARGS['use_asset_selection'])
|
|||
|
|
|
|||
|
|
def test_bar_idx_resets_on_rollover_documented_deviation(self):
|
|||
|
|
"""
|
|||
|
|
Live resets bar_idx=0 per day; backtest uses continuous bar_idx.
|
|||
|
|
This is a known, accepted deviation — documented here so it's explicit.
|
|||
|
|
Any change to this behaviour must be intentional.
|
|||
|
|
"""
|
|||
|
|
trader = _build_trader()
|
|||
|
|
today = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
|||
|
|
with trader.eng_lock:
|
|||
|
|
trader.eng.begin_day(today, posture="APEX")
|
|||
|
|
trader.bar_idx = 50
|
|||
|
|
trader.current_day = today
|
|||
|
|
trader.current_day = "1970-01-01"
|
|||
|
|
trader.on_scan(_make_event(_make_scan(1, -0.01, file_mtime=time.time() + 99_000)))
|
|||
|
|
self.assertEqual(trader.bar_idx, 1,
|
|||
|
|
"bar_idx resets to 0 on rollover — known live/backtest deviation")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# 11. Slippage & Missed-Trade Cost Model
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
class TestSlippageCostModel(unittest.TestCase):
|
|||
|
|
"""
|
|||
|
|
Quantifies — not prevents — the financial cost of latency failures.
|
|||
|
|
|
|||
|
|
Gold spec: T=2155, ROI=+189.48%, DD=21.31%, 5yr, capital=$25k, leverage=8x.
|
|||
|
|
All monetary assertions use delta tolerances so a round-number change
|
|||
|
|
in spec parameters breaks loudly.
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
C = ENGINE_KWARGS['initial_capital'] # 25_000
|
|||
|
|
LEV = ENGINE_KWARGS['max_leverage'] # 8.0
|
|||
|
|
TP = ENGINE_KWARGS['fixed_tp_pct'] # 0.0095
|
|||
|
|
T = 2155
|
|||
|
|
ROI = 1.8948
|
|||
|
|
BAR_S = 5
|
|||
|
|
|
|||
|
|
@property
|
|||
|
|
def max_pos(self): return self.C * self.LEV # $200k
|
|||
|
|
@property
|
|||
|
|
def max_gross(self): return self.max_pos * self.TP # $1,900
|
|||
|
|
@property
|
|||
|
|
def avg_net(self): return (self.C * self.ROI) / self.T
|
|||
|
|
|
|||
|
|
def test_max_position_size(self):
|
|||
|
|
self.assertAlmostEqual(self.max_pos, 200_000, delta=1)
|
|||
|
|
|
|||
|
|
def test_max_gross_profit_per_trade(self):
|
|||
|
|
self.assertAlmostEqual(self.max_gross, 1_900, delta=1)
|
|||
|
|
print(f"\n Max gross profit/trade: ${self.max_gross:,.0f}")
|
|||
|
|
|
|||
|
|
def test_avg_net_profit_per_trade_positive(self):
|
|||
|
|
self.assertGreater(self.avg_net, 0,
|
|||
|
|
"Negative avg net = no edge. Backtest ROI or trade count changed?")
|
|||
|
|
print(f" Avg net profit/trade (5yr gold): ${self.avg_net:,.2f}")
|
|||
|
|
|
|||
|
|
def test_cost_of_1bar_delayed_entry(self):
|
|||
|
|
"""
|
|||
|
|
1-bar (5s) late entry: BTC moves against position.
|
|||
|
|
Typical = 10bps × $200k = $200. Volatile = 50bps × $200k = $1,000.
|
|||
|
|
Both must be < max_gross (otherwise latency budget breaks strategy viability).
|
|||
|
|
"""
|
|||
|
|
typical = self.max_pos * 0.0010 # 10bps
|
|||
|
|
volatile = self.max_pos * 0.0050 # 50bps
|
|||
|
|
print(f" 1-bar delay cost: typical=${typical:,.0f} volatile=${volatile:,.0f}")
|
|||
|
|
self.assertLess(volatile, self.max_gross,
|
|||
|
|
"Worst-case 1-bar delay exceeds full TP — latency budget is strategy-critical")
|
|||
|
|
|
|||
|
|
def test_cost_of_missed_entry_upper_bound(self):
|
|||
|
|
"""Upper bound on missed entry = max_gross. Expected ≈ avg_net."""
|
|||
|
|
print(f" Missed entry cost: upper=${self.max_gross:,.0f} "
|
|||
|
|
f"expected≈${self.avg_net:,.2f}")
|
|||
|
|
self.assertGreater(self.max_gross, self.avg_net)
|
|||
|
|
|
|||
|
|
def test_max_drawdown_gold_spec(self):
|
|||
|
|
"""DD=21.31% → $5,327 max observed drawdown over 5yr."""
|
|||
|
|
max_dd = self.C * 0.2131
|
|||
|
|
max_hold_s = ENGINE_KWARGS['max_hold_bars'] * self.BAR_S
|
|||
|
|
print(f" Max drawdown (gold): ${max_dd:,.0f} | "
|
|||
|
|
f"Max hold: {max_hold_s}s ({max_hold_s/60:.1f}min)")
|
|||
|
|
self.assertAlmostEqual(max_dd, 5327.50, delta=100)
|
|||
|
|
self.assertLessEqual(max_hold_s, 1300)
|
|||
|
|
|
|||
|
|
def test_1ms_latency_cost_per_trade(self):
|
|||
|
|
"""
|
|||
|
|
Cost of 1ms additional latency per trade (entry slip only).
|
|||
|
|
Avg hold ~50 bars; 1ms → BTC move ≈ 0.002bps; 50 bars × 0.002bps × $200k = $2/trade.
|
|||
|
|
Over T=2155 trades: $4,310 total — real but not strategy-killing.
|
|||
|
|
The 30ms E2E budget already ensures we're well inside this.
|
|||
|
|
"""
|
|||
|
|
avg_hold_bars = 50
|
|||
|
|
btc_move_per_ms_bps = 0.002 / 100 # 0.002bps expressed as fraction
|
|||
|
|
cost_per_trade = self.max_pos * btc_move_per_ms_bps * avg_hold_bars
|
|||
|
|
total_5yr = cost_per_trade * self.T
|
|||
|
|
print(f" 1ms/trade latency cost: ${cost_per_trade:.2f}/trade "
|
|||
|
|
f"${total_5yr:,.0f} over {self.T} trades")
|
|||
|
|
# Informational — we assert it's positive and bounded below max_gross
|
|||
|
|
self.assertGreater(cost_per_trade, 0)
|
|||
|
|
self.assertLess(cost_per_trade, self.max_gross)
|
|||
|
|
|
|||
|
|
def test_stablecoin_trade_financial_loss(self):
|
|||
|
|
"""
|
|||
|
|
Shorting a stablecoin (e.g. USDCUSDT ≈ $1.000) produces near-zero
|
|||
|
|
gross P&L regardless of leverage — it cannot meaningfully contribute to
|
|||
|
|
ROI and adds unnecessary fee drag. Any stablecoin trade at gold-spec
|
|||
|
|
9x abs_max leverage would cost at least one round-trip fee.
|
|||
|
|
Picker hard-block prevents this entire class of loss.
|
|||
|
|
"""
|
|||
|
|
stable_price = 1.0001 # realistic USDC perp price
|
|||
|
|
max_lev = 9.0 # abs_max_leverage
|
|||
|
|
notional = self.C * ENGINE_KWARGS['fraction'] * max_lev # $22,500
|
|||
|
|
round_trip_fee = notional * 0.0004 * 2 # 0.04% each side
|
|||
|
|
max_move_bps = 1.0 # stablecoin max realistic move
|
|||
|
|
gross = notional * (max_move_bps / 10_000)
|
|||
|
|
|
|||
|
|
print(f"\n Stablecoin short: notional=${notional:,.0f} "
|
|||
|
|
f"max_gross=${gross:.2f} fee_drag=${round_trip_fee:.2f}")
|
|||
|
|
|
|||
|
|
# The fee drag exceeds any realistic gross profit on a stablecoin
|
|||
|
|
self.assertGreater(round_trip_fee, gross,
|
|||
|
|
"Fee drag on stablecoin trade exceeds gross P&L — "
|
|||
|
|
"confirms picker hard-block is financially correct, not just cosmetic")
|
|||
|
|
|
|||
|
|
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
# Runner
|
|||
|
|
# ---------------------------------------------------------------------------
|
|||
|
|
|
|||
|
|
if __name__ == '__main__':
|
|||
|
|
unittest.main(verbosity=2)
|