initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
4
Observability/TUI/_check_textual.py
Executable file
4
Observability/TUI/_check_textual.py
Executable file
@@ -0,0 +1,4 @@
|
||||
import textual, textual.widgets as w, textual.containers as c
|
||||
print("version:", textual.__version__)
|
||||
print("widgets:", sorted([x for x in dir(w) if x[0].isupper()]))
|
||||
print("containers:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
8
Observability/TUI/_find_textual.py
Executable file
8
Observability/TUI/_find_textual.py
Executable file
@@ -0,0 +1,8 @@
|
||||
import sys, textual, textual.widgets as w
|
||||
print("python:", sys.executable)
|
||||
print("version:", textual.__version__)
|
||||
print("location:", textual.__file__)
|
||||
widgets = sorted([x for x in dir(w) if x[0].isupper()])
|
||||
print("widgets:", widgets)
|
||||
import textual.containers as c
|
||||
print("containers:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
4
Observability/TUI/_widgets_check.py
Executable file
4
Observability/TUI/_widgets_check.py
Executable file
@@ -0,0 +1,4 @@
|
||||
import textual.widgets as w, textual.containers as c
|
||||
print("textual version:", __import__("textual").__version__)
|
||||
print("WIDGETS:", sorted([x for x in dir(w) if x[0].isupper()]))
|
||||
print("CONTAINERS:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
2654
Observability/TUI/dolphin_tui.py
Executable file
2654
Observability/TUI/dolphin_tui.py
Executable file
File diff suppressed because it is too large
Load Diff
372
Observability/TUI/dolphin_tui_v2.py
Executable file
372
Observability/TUI/dolphin_tui_v2.py
Executable file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v2 — full layout, mock data, sexy MC-Forewarner footer.
|
||||
Run: python3 dolphin_tui_v2.py
|
||||
q=quit r=refresh l=toggle log
|
||||
"""
|
||||
import time
|
||||
import math
|
||||
from collections import deque
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.widgets import Static, ProgressBar, Sparkline, Digits, Rule
|
||||
from textual.containers import Horizontal, Vertical
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
CSS = """
|
||||
Screen { background: #0d0d0d; color: #d0d0d0; }
|
||||
|
||||
#header { height: 2; background: #111; border: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
|
||||
/* MC Footer */
|
||||
#mc_footer_outer {
|
||||
height: 7;
|
||||
border: solid #336;
|
||||
background: #080818;
|
||||
}
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 6; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_spark_label { height: 1; }
|
||||
#mc_sparkline { height: 2; }
|
||||
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status_text { height: 2; }
|
||||
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
|
||||
Static.panel {
|
||||
border: solid #3a3a3a;
|
||||
padding: 0 1;
|
||||
height: 100%;
|
||||
}
|
||||
#panel_trader { width: 1fr; border: solid #00aa88; }
|
||||
#panel_health { width: 1fr; }
|
||||
#panel_alpha { width: 1fr; }
|
||||
#panel_scan { width: 1fr; }
|
||||
#panel_extf { width: 1fr; }
|
||||
#panel_esof { width: 1fr; }
|
||||
#panel_capital { width: 1fr; }
|
||||
#panel_prefect { width: 1fr; }
|
||||
#panel_obf { width: 1fr; }
|
||||
#panel_log { width: 1fr; border: solid #444; padding: 0 1; }
|
||||
"""
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def prefect_dot(status: str, blink_frame: bool) -> str:
|
||||
s = status.upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]◉[/cyan]" if blink_frame else "[dim]◌[/dim]"
|
||||
if s in ("FAILED", "CRASHED"): return "[red]●[/red]"
|
||||
if s == "LATE": return "[dark_orange]●[/dark_orange]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]●[/dim]"
|
||||
|
||||
MOCK_FLOWS = [
|
||||
("paper_trade_flow", "COMPLETED", "2m"),
|
||||
("nautilus_prefect", "COMPLETED", "8m"),
|
||||
("obf_prefect_flow", "RUNNING", "0m"),
|
||||
("exf_fetcher_flow", "COMPLETED", "15m"),
|
||||
("mc_forewarner_flow", "RUNNING", "3m"),
|
||||
]
|
||||
|
||||
MOCK_POSITIONS = [
|
||||
("BTCUSDT", "SHORT", 0.01, 83420.5, 83278.2),
|
||||
("ETHUSDT", "SHORT", 0.10, 1612.3, 1598.7),
|
||||
]
|
||||
|
||||
def mock_open_positions(n: int) -> list:
|
||||
phase = (n // 20) % 3
|
||||
if phase == 0: return []
|
||||
if phase == 1:
|
||||
p = MOCK_POSITIONS[0]
|
||||
return [(p[0], p[1], p[2], p[3], p[4] - (n % 10) * 2.1)]
|
||||
return [(p[0], p[1], p[2], p[3], p[4] - (n % 10) * 1.5) for p in MOCK_POSITIONS]
|
||||
|
||||
def mc_mock(n: int) -> dict:
|
||||
"""Real schema: DOLPHIN_FEATURES['mc_forewarner_latest']
|
||||
Thresholds: GREEN<0.10 ORANGE<0.30 RED>=0.30"""
|
||||
t = n * 0.05
|
||||
prob = max(0.0, min(1.0, 0.12 + 0.10 * math.sin(t)))
|
||||
env = max(0.0, min(1.0, 0.82 - 0.08 * abs(math.sin(t * 1.3))))
|
||||
status = "GREEN" if prob < 0.10 else ("ORANGE" if prob < 0.30 else "RED")
|
||||
return {"status": status, "catastrophic_prob": prob,
|
||||
"envelope_score": env, "source": "MOCK",
|
||||
"timestamp": time.strftime("%H:%M:%SZ", time.gmtime())}
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
class DolphinTUI(App):
|
||||
CSS = CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","refresh","Refresh"),("l","toggle_log","Log")]
|
||||
|
||||
_log_visible = False
|
||||
_tick_n = 0
|
||||
_prob_history: deque = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="panel_trader")
|
||||
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="panel_health")
|
||||
yield Static("", classes="panel", id="panel_alpha")
|
||||
yield Static("", classes="panel", id="panel_scan")
|
||||
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="panel_extf")
|
||||
yield Static("", classes="panel", id="panel_esof")
|
||||
yield Static("", classes="panel", id="panel_capital")
|
||||
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="panel_prefect")
|
||||
yield Static("", classes="panel", id="panel_obf")
|
||||
|
||||
# ── MC-Forewarner footer ──────────────────────────────────────────────
|
||||
with Vertical(id="mc_footer_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
# Left: big probability digits
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.00", id="mc_digits")
|
||||
yield Static("", id="mc_status_text")
|
||||
# Center: progress bars + sparkline
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False,
|
||||
id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False,
|
||||
id="mc_env_bar")
|
||||
yield Static("", id="mc_spark_label")
|
||||
yield Sparkline([], id="mc_sparkline")
|
||||
# Right: threshold legend + source
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_legend")
|
||||
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="panel_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_history = deque([0.12] * 40, maxlen=40)
|
||||
self.set_interval(1, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
n = self._tick_n
|
||||
self._tick_n += 1
|
||||
blink = (n % 2 == 0)
|
||||
t = time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime())
|
||||
|
||||
cap = 124532.10 + n * 0.5
|
||||
pnl = 1240.50 + n * 0.1
|
||||
rm = 0.82 + (n % 10) * 0.01
|
||||
vel = -0.031 - (n % 5) * 0.002
|
||||
scan = 59000 + n
|
||||
age = (n % 5) + 0.1
|
||||
age_col = "green" if age < 15 else "yellow"
|
||||
mc = mc_mock(n)
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
hz = "[green][HZ ✓][/green]"
|
||||
sc = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}.get(mc["status"],"dim")
|
||||
self.query_one("#header", Static).update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] v2.0 │ {t}"
|
||||
f" │ [green]● GREEN[/green] {hz}"
|
||||
f" │ MC:[{sc}]{mc['status']}[/{sc}]\n"
|
||||
f"[dim] localhost:5701 │ q=quit r=refresh l=log[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
positions = mock_open_positions(n)
|
||||
pos_lines = " ".join(
|
||||
f"[cyan]{sym}[/cyan] [yellow]{side}[/yellow] {qty}"
|
||||
f"@[dim]{entry:,.0f}[/dim]→[green]{cur:,.0f}[/green]"
|
||||
f"([green]+${abs((entry-cur)*qty):,.1f}[/green])"
|
||||
for sym, side, qty, entry, cur in positions
|
||||
) if positions else "[dim]NONE[/dim]"
|
||||
vol_ok = "[green]YES[/green]" if (n % 8) < 6 else "[yellow]NO[/yellow]"
|
||||
self.query_one("#panel_trader", Static).update(
|
||||
f"[bold cyan]NAUTILUS-DOLPHIN TRADER[/bold cyan]"
|
||||
f" posture:[green]APEX[/green] bar:{scan} vol:{vol_ok}"
|
||||
f" trades:[cyan]{12+n//30}[/cyan] cap:[cyan]${cap:,.0f}[/cyan]\n"
|
||||
f" open: {pos_lines}\n"
|
||||
f" vel:[yellow]{vel:.5f}[/yellow] thr:-0.02000 pnl:[green]+${pnl:,.2f}[/green]"
|
||||
)
|
||||
|
||||
# ── SYSTEM HEALTH ─────────────────────────────────────────────────────
|
||||
self.query_one("#panel_health", Static).update(
|
||||
f"[bold]SYS HEALTH[/bold]\n"
|
||||
f"rm_meta:[green]{rm:.3f}[/green]\n"
|
||||
f"M1:[green]1.0[/green] M2:[green]1.0[/green] M3:[green]1.0[/green]\n"
|
||||
f"M4:[green]1.0[/green] M5:[green]1.0[/green]\n"
|
||||
f"[green]● GREEN[/green]"
|
||||
)
|
||||
|
||||
# ── ALPHA ENGINE ──────────────────────────────────────────────────────
|
||||
filled = int(rm * 16)
|
||||
bar = "█" * filled + "░" * (16 - filled)
|
||||
self.query_one("#panel_alpha", Static).update(
|
||||
f"[bold]ALPHA ENGINE[/bold]\n"
|
||||
f"Posture:[green]APEX ●[/green]\n"
|
||||
f"Rm:[green]{bar}[/green]{rm:.2f}\n"
|
||||
f"ACB:1.55x β=0.80\n"
|
||||
f"C1:[green].9[/green] C2:[green].8[/green] C3:[yellow].7[/yellow]"
|
||||
f" C4:[green]1.[/green] C5:[green].9[/green]"
|
||||
)
|
||||
|
||||
# ── SCAN BRIDGE ───────────────────────────────────────────────────────
|
||||
self.query_one("#panel_scan", Static).update(
|
||||
f"[bold]SCAN / NG7[/bold]\n"
|
||||
f"#{scan} age:[{age_col}]{age:.1f}s[/{age_col}]\n"
|
||||
f"vel_div:[{age_col}]{vel:.4f}[/{age_col}]\n"
|
||||
f"w50:-0.0421 w750:-0.0109\n"
|
||||
f"inst:0.0234"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_extf", Static).update(
|
||||
f"[bold]ExtF[/bold] [green]9/9 ✓[/green]\n"
|
||||
f"fund:[cyan]-0.012[/cyan] dvol:[cyan]62.4[/cyan]\n"
|
||||
f"fng:[yellow]28[/yellow] taker:0.81\n"
|
||||
f"vix:18.2 ls:0.48\n"
|
||||
f"age:[green]4.2s[/green]"
|
||||
)
|
||||
|
||||
# ── EsoF ──────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_esof", Static).update(
|
||||
f"[bold]EsoF[/bold]\n"
|
||||
f"Moon: Waxing Gibbous\n"
|
||||
f"Merc:[green]Normal[/green]\n"
|
||||
f"Sess:London MC:0.42\n"
|
||||
f"age:[green]3.8s[/green]"
|
||||
)
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_capital", Static).update(
|
||||
f"[bold]CAPITAL[/bold]\n"
|
||||
f"Cap:[cyan]${cap:,.0f}[/cyan]\n"
|
||||
f"DD:[yellow]-3.21%[/yellow]\n"
|
||||
f"PnL:[green]+${pnl:,.2f}[/green]\n"
|
||||
f"Pos:[green]APEX[/green] T:{12+n//30}"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flow_lines = "\n".join(
|
||||
f"{prefect_dot(st, blink)} {name:<22} {dur}"
|
||||
for name, st, dur in MOCK_FLOWS
|
||||
)
|
||||
self.query_one("#panel_prefect", Static).update(
|
||||
f"[bold]PREFECT[/bold] [green]✓[/green]\n{flow_lines}"
|
||||
)
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_obf", Static).update(
|
||||
f"[bold]OBF TOP[/bold]\n"
|
||||
f"BTC [green]+0.18[/green] fp:0.72\n"
|
||||
f"ETH [green]+0.12[/green] fp:0.68\n"
|
||||
f"SOL [green]+0.09[/green] fp:0.61\n"
|
||||
f"BNB [red]-0.05[/red] fp:0.51"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER FOOTER (sexy) ───────────────────────────────────────
|
||||
prob = mc["catastrophic_prob"]
|
||||
env = mc["envelope_score"]
|
||||
self._prob_history.append(prob)
|
||||
|
||||
# Title bar
|
||||
self.query_one("#mc_title", Static).update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan]"
|
||||
f" [{sc}]▶ {mc['status']}[/{sc}]"
|
||||
f" [dim]src:{mc['source']} {mc['timestamp']}[/dim]"
|
||||
)
|
||||
|
||||
# Left: Digits widget showing probability as large text
|
||||
self.query_one("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_emoji = {"GREEN": "🟢 SAFE", "ORANGE": "🟡 CAUTION", "RED": "🔴 DANGER"}.get(mc["status"], "⚪")
|
||||
self.query_one("#mc_status_text", Static).update(
|
||||
f"[{sc}]{status_emoji}[/{sc}]\n[dim]cat.prob[/dim]"
|
||||
)
|
||||
|
||||
# Center: ProgressBar for catastrophic_prob (danger = high value)
|
||||
prob_pct = int(prob * 100)
|
||||
prob_bar = self.query_one("#mc_prob_bar", ProgressBar)
|
||||
prob_bar.progress = prob_pct
|
||||
# Apply danger CSS class based on threshold
|
||||
prob_bar.remove_class("-danger", "-warning")
|
||||
if prob >= 0.30: prob_bar.add_class("-danger")
|
||||
elif prob >= 0.10: prob_bar.add_class("-warning")
|
||||
self.query_one("#mc_prob_label", Static).update(
|
||||
f"[dim]catastrophic_prob[/dim] "
|
||||
f"[green]▏GREEN<0.10[/green] [yellow]▏ORANGE<0.30[/yellow] [red]▏RED≥0.30[/red]"
|
||||
f" [{sc}]{prob:.4f}[/{sc}]"
|
||||
)
|
||||
|
||||
# ProgressBar for envelope_score (safe = high value, so invert display)
|
||||
env_pct = int(env * 100)
|
||||
env_bar = self.query_one("#mc_env_bar", ProgressBar)
|
||||
env_bar.progress = env_pct
|
||||
env_bar.remove_class("-danger", "-warning")
|
||||
if env < 0.40: env_bar.add_class("-danger")
|
||||
elif env < 0.70: env_bar.add_class("-warning")
|
||||
self.query_one("#mc_env_label", Static).update(
|
||||
f"[dim]envelope_score [/dim]"
|
||||
f"[red]▏DANGER<0.40[/red] [yellow]▏CAUTION<0.70[/yellow] [green]▏SAFE≥0.70[/green]"
|
||||
f" [green]{env:.4f}[/green]"
|
||||
)
|
||||
|
||||
# Sparkline: rolling 40-sample history of catastrophic_prob
|
||||
self.query_one("#mc_spark_label", Static).update(
|
||||
f"[dim]prob history (40s)[/dim] "
|
||||
f"[dim]min:{min(self._prob_history):.3f} "
|
||||
f"max:{max(self._prob_history):.3f}[/dim]"
|
||||
)
|
||||
self.query_one("#mc_sparkline", Sparkline).data = list(self._prob_history)
|
||||
|
||||
# Right: threshold legend
|
||||
self.query_one("#mc_legend", Static).update(
|
||||
f"[bold]THRESHOLDS[/bold]\n"
|
||||
f"[green]GREEN[/green] prob < 0.10\n"
|
||||
f"[yellow]ORANGE[/yellow] prob < 0.30\n"
|
||||
f"[red]RED[/red] prob ≥ 0.30\n"
|
||||
f"\n"
|
||||
f"[dim]runs every 4h[/dim]\n"
|
||||
f"[dim]model: DolphinForewarner[/dim]"
|
||||
)
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_visible:
|
||||
self.query_one("#panel_log", Static).update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{t}[/dim] [INFO] RM_META=0.923 GREEN\n"
|
||||
f"[dim]{t}[/dim] [INFO] SCAN #{scan} vel={vel:.4f}\n"
|
||||
f"[dim]{t}[/dim] [INFO] MC {mc['status']} prob={prob:.4f}"
|
||||
)
|
||||
|
||||
def action_refresh(self) -> None:
|
||||
self._update()
|
||||
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_visible = not self._log_visible
|
||||
self.query_one("#log_row").display = self._log_visible
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
1053
Observability/TUI/dolphin_tui_v3.py
Executable file
1053
Observability/TUI/dolphin_tui_v3.py
Executable file
File diff suppressed because it is too large
Load Diff
694
Observability/TUI/dolphin_tui_v4.py
Executable file
694
Observability/TUI/dolphin_tui_v4.py
Executable file
@@ -0,0 +1,694 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "v4"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
_TEST_JSON = Path("/mnt/dolphinng5_predict/run_logs/test_results_latest.json")
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age= _age(cap_t) if cap_t else "?"
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${cap_val:,.0f}[/cyan]\n"
|
||||
f"DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}] C5:{c5:.3f}\n"
|
||||
f"Pos:{_posture_markup(posture)}\n"
|
||||
f"[dim]pnl/trades: DOLPHIN_PNL_BLUE[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER ───────────────────────────────────────────────────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
def _tr_badge(cat):
|
||||
info = tr.get(cat, {})
|
||||
if not info: return f"[dim]{cat[:12]}:n/a[/dim]"
|
||||
p, f = info.get("passed",0), info.get("failed",0)
|
||||
c = "green" if f == 0 else ("yellow" if f <= 2 else "red")
|
||||
return f"[{c}]{cat[:10]}:{p}/{p+f}[/{c}][dim]@{info.get('ts','?')[:10]}[/dim]"
|
||||
cats = ["data_integrity","finance_fuzz","signal_fill","degradation","actor"]
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last:{tr.get('_run_at','never')}[/dim]\n"
|
||||
f"{' '.join(_tr_badge(c) for c in cats)}\n"
|
||||
f"[dim]update: prod/run_logs/test_results_latest.json[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
740
Observability/TUI/dolphin_tui_v5.py
Executable file
740
Observability/TUI/dolphin_tui_v5.py
Executable file
@@ -0,0 +1,740 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v5"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age= _age(cap_t) if cap_t else "?"
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${cap_val:,.0f}[/cyan]\n"
|
||||
f"DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}] C5:{c5:.3f}\n"
|
||||
f"Pos:{_posture_markup(posture)}\n"
|
||||
f"[dim]pnl/trades: DOLPHIN_PNL_BLUE[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v5.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
777
Observability/TUI/dolphin_tui_v6.py
Executable file
777
Observability/TUI/dolphin_tui_v6.py
Executable file
@@ -0,0 +1,777 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v6"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
800
Observability/TUI/dolphin_tui_v7.py
Executable file
800
Observability/TUI/dolphin_tui_v7.py
Executable file
@@ -0,0 +1,800 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v7"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
# Extra fields from result dict
|
||||
r_dict = scan.get("result", scan)
|
||||
regime = eigen.get("regime", r_dict.get("regime", "?"))
|
||||
bull_pct = r_dict.get("bull_pct", 0.0)
|
||||
bear_pct = r_dict.get("bear_pct", 0.0)
|
||||
conf = r_dict.get("confidence", 0.0)
|
||||
bb_dist = r_dict.get("bb_dist_pct", 0.0)
|
||||
price = r_dict.get("price", eigen.get("btc_price"))
|
||||
reg_c = "green" if regime == "BULL" else ("red" if regime == "BEAR" else "yellow")
|
||||
bb_c = "red" if abs(bb_dist) > 0.05 else ("yellow" if abs(bb_dist) > 0.02 else "green")
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] [{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel:[{vc}]{vel_div:+.5f}[/{vc}]"
|
||||
f" w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"[{reg_c}]{regime}[/{reg_c}] B:{bull_pct:.0f}% b:{bear_pct:.0f}%"
|
||||
f" conf:{conf:.2f} inst:{vi:.4f}\n"
|
||||
f"BB:[{bb_c}]{bb_dist:+.4f}[/{bb_c}]"
|
||||
+ (f" ${price:,.0f}" if price else "")
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker = exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
f_eth = exf.get("funding_eth"); ls_eth = exf.get("ls_eth")
|
||||
oi_btc = exf.get("oi_btc"); oi_eth = exf.get("oi_eth")
|
||||
fdb_btc = exf.get("fund_dbt_btc")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
taker_c = "red" if taker and taker > 0.7 else ("yellow" if taker and taker > 0.55 else "green")
|
||||
def _ef(v, fmt=".5f"): return f"{v:{fmt}}" if v is not None else "?"
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fBTC:[cyan]{_ef(f_btc)}[/cyan] fETH:[dim]{_ef(f_eth)}[/dim]"
|
||||
f" dvol:[{dvol_c}]{_ef(dvol,'.1f')}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}]"
|
||||
f" tkr:[{taker_c}]{_ef(taker,'.3f')}[/{taker_c}]"
|
||||
f" vix:{_ef(vix,'.1f')}\n"
|
||||
f"ls_b:{_ef(ls_btc,'.3f')} ls_e:{_ef(ls_eth,'.3f')}"
|
||||
f" fdb:{_ef(fdb_btc,'.5f')}\n"
|
||||
f"oi_b:{_ef(oi_btc,'.0f')} oi_e:{_ef(oi_eth,'.0f')}"
|
||||
f" acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
958
Observability/TUI/dolphin_tui_v9.py
Executable file
958
Observability/TUI/dolphin_tui_v9.py
Executable file
@@ -0,0 +1,958 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
import urllib.request as _urlreq
|
||||
|
||||
_CH_URL = "http://localhost:8123/"
|
||||
_CH_HEADERS = {"X-ClickHouse-User": "dolphin", "X-ClickHouse-Key": "dolphin_ch_2026"}
|
||||
|
||||
def _ch_q(sql: str) -> list:
|
||||
try:
|
||||
body = (sql + "\nFORMAT JSONEachRow").encode()
|
||||
req = _urlreq.Request(_CH_URL, data=body, method="POST")
|
||||
for k, v in _CH_HEADERS.items(): req.add_header(k, v)
|
||||
resp = _urlreq.urlopen(req, timeout=5)
|
||||
return [json.loads(l) for l in resp.read().decode().strip().split("\n") if l]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def _start_trades_poll():
|
||||
"""Background thread: poll CH every 30s for recent trades and AE shadow exits."""
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
rows = _ch_q(f"""
|
||||
SELECT asset, pnl_pct, exit_reason, bars_held, strategy
|
||||
FROM dolphin.trade_events
|
||||
WHERE date = '{today}'
|
||||
ORDER BY ts DESC LIMIT 8
|
||||
""")
|
||||
_S.put("ch.recent_trades", rows)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
# CLOSED rows = one row per trade at real close, with actual_exit + p_cont at that moment
|
||||
ae_rows = _ch_q(f"""
|
||||
SELECT asset, action, actual_exit, p_cont, mae_norm, mfe_norm, tau_norm, pnl_pct
|
||||
FROM dolphin.adaptive_exit_shadow
|
||||
WHERE action = 'CLOSED' AND ts_day = '{today}'
|
||||
ORDER BY ts DESC LIMIT 8
|
||||
""")
|
||||
_S.put("ch.ae_shadow_exits", ae_rows)
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(30)
|
||||
threading.Thread(target=_run, daemon=True, name="ch-trades-poll").start()
|
||||
|
||||
def _start_bucket_poll():
|
||||
"""Background thread: poll CH every 60s for per-bucket trade performance."""
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
rows = _ch_q("""
|
||||
SELECT
|
||||
bucket_id,
|
||||
count() AS n,
|
||||
countIf(pnl_pct > 0) AS wins,
|
||||
avg(pnl_pct) AS avg_pnl
|
||||
FROM dolphin.adaptive_exit_shadow
|
||||
WHERE action = 'CLOSED'
|
||||
AND actual_exit NOT IN ('HIBERNATE_HALT', 'SUBDAY_ACB_NORMALIZATION')
|
||||
GROUP BY bucket_id
|
||||
ORDER BY bucket_id
|
||||
""")
|
||||
_S.put("ch.bucket_perf", rows)
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(60)
|
||||
threading.Thread(target=_run, daemon=True, name="ch-bucket-poll").start()
|
||||
|
||||
TUI_VERSION = "TUI v9"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm_nb = client.get_map("DOLPHIN_FEATURES")
|
||||
fm = fm_nb.blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm_nb.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m_nb = client.get_map(map_name)
|
||||
_ingest(state_key, m_nb.blocking().get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m_nb.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm_nb = client.get_map("DOLPHIN_STATE_BLUE")
|
||||
stm = stm_nb.blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm_nb.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm_nb = client.get_map("DOLPHIN_PNL_BLUE")
|
||||
_ingest("pnl_blue", pm_nb.blocking().get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm_nb.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
_poll_ctr = 0
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
# Re-poll key maps every 30s — catches missed listener events
|
||||
# after HZ restarts or reconnects
|
||||
_poll_ctr += 1
|
||||
if _poll_ctr % 6 == 0:
|
||||
try:
|
||||
_ingest("safety", client.get_map("DOLPHIN_SAFETY").blocking().get("latest"))
|
||||
_ingest("capital", client.get_map("DOLPHIN_STATE_BLUE").blocking().get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", client.get_map("DOLPHIN_STATE_BLUE").blocking().get("engine_snapshot"))
|
||||
_ingest("heartbeat", client.get_map("DOLPHIN_HEARTBEAT").blocking().get("nautilus_flow_heartbeat"))
|
||||
except Exception:
|
||||
pass # non-fatal — listeners may still work
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#trades_footer { height: 5; background: #060a10; border-top: solid #003820; padding: 0 1; }
|
||||
#bucket_footer { height: 5; background: #080810; border-top: solid #002040; padding: 0 1; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="trades_footer")
|
||||
yield Static("", id="bucket_footer")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
_start_trades_poll()
|
||||
_start_bucket_poll()
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
# Extra fields from result dict
|
||||
r_dict = scan.get("result", scan)
|
||||
regime = eigen.get("regime", r_dict.get("regime", "?"))
|
||||
bull_pct = r_dict.get("bull_pct", 0.0)
|
||||
bear_pct = r_dict.get("bear_pct", 0.0)
|
||||
conf = r_dict.get("confidence", 0.0)
|
||||
bb_dist = r_dict.get("bb_dist_pct", 0.0)
|
||||
price = r_dict.get("price", eigen.get("btc_price"))
|
||||
reg_c = "green" if regime == "BULL" else ("red" if regime == "BEAR" else "yellow")
|
||||
bb_c = "red" if abs(bb_dist) > 0.05 else ("yellow" if abs(bb_dist) > 0.02 else "green")
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] [{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel:[{vc}]{vel_div:+.5f}[/{vc}]"
|
||||
f" w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"[{reg_c}]{regime}[/{reg_c}] B:{bull_pct:.0f}% b:{bear_pct:.0f}%"
|
||||
f" conf:{conf:.2f} inst:{vi:.4f}\n"
|
||||
f"BB:[{bb_c}]{bb_dist:+.4f}[/{bb_c}]"
|
||||
+ (f" ${price:,.0f}" if price else "")
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker = exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
f_eth = exf.get("funding_eth"); ls_eth = exf.get("ls_eth")
|
||||
oi_btc = exf.get("oi_btc"); oi_eth = exf.get("oi_eth")
|
||||
fdb_btc = exf.get("fund_dbt_btc")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
taker_c = "red" if taker and taker > 0.7 else ("yellow" if taker and taker > 0.55 else "green")
|
||||
def _ef(v, fmt=".5f"): return f"{v:{fmt}}" if v is not None else "?"
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fBTC:[cyan]{_ef(f_btc)}[/cyan] fETH:[dim]{_ef(f_eth)}[/dim]"
|
||||
f" dvol:[{dvol_c}]{_ef(dvol,'.1f')}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}]"
|
||||
f" tkr:[{taker_c}]{_ef(taker,'.3f')}[/{taker_c}]"
|
||||
f" vix:{_ef(vix,'.1f')}\n"
|
||||
f"ls_b:{_ef(ls_btc,'.3f')} ls_e:{_ef(ls_eth,'.3f')}"
|
||||
f" fdb:{_ef(fdb_btc,'.5f')}\n"
|
||||
f"oi_b:{_ef(oi_btc,'.0f')} oi_e:{_ef(oi_eth,'.0f')}"
|
||||
f" acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TRADES FOOTER — real exits vs AE shadow exits ────────────────────────
|
||||
real_trades = _S.get("ch.recent_trades") or []
|
||||
ae_closed = _S.get("ch.ae_shadow_exits") or [] # CLOSED rows, one per trade
|
||||
|
||||
# Build asset→ae lookup for matching (most recent per asset)
|
||||
ae_by_asset: dict = {}
|
||||
for r in ae_closed:
|
||||
a = r.get("asset", "")
|
||||
if a and a not in ae_by_asset:
|
||||
ae_by_asset[a] = r
|
||||
|
||||
lines = []
|
||||
for r in real_trades[:5]:
|
||||
asset = r.get("asset", "?")
|
||||
pnl = float(r.get("pnl_pct", 0) or 0) * 100
|
||||
reason = str(r.get("exit_reason", "?"))[:20]
|
||||
bars = int(r.get("bars_held", 0) or 0)
|
||||
pnl_c = "green" if pnl >= 0 else "red"
|
||||
real_s = (f"[cyan]{asset:<9}[/cyan]"
|
||||
f" [{pnl_c}]{pnl:+.2f}%[/{pnl_c}]"
|
||||
f" [dim]{reason} {bars}b[/dim]")
|
||||
ae = ae_by_asset.get(asset)
|
||||
if ae:
|
||||
p = float(ae.get("p_cont", 0.5) or 0.5)
|
||||
mae_n = float(ae.get("mae_norm", 0) or 0)
|
||||
mfe_n = float(ae.get("mfe_norm", 0) or 0)
|
||||
p_c = "red" if p < 0.35 else ("yellow" if p < 0.50 else "green")
|
||||
ae_s = (f"[dim]AE p=[/dim][{p_c}]{p:.2f}[/{p_c}]"
|
||||
f"[dim] mae={mae_n:.1f} mfe={mfe_n:.1f}[/dim]")
|
||||
else:
|
||||
ae_s = "[dim]AE: no data[/dim]"
|
||||
lines.append(f" {real_s} {ae_s}")
|
||||
|
||||
body = "\n".join(lines) if lines else " [dim]no trades today[/dim]"
|
||||
self._w("#trades_footer").update(
|
||||
f"[bold green]TRADES[/bold green] [dim]real exit → AE state at close (poll 30s)[/dim]\n"
|
||||
f"{body}"
|
||||
)
|
||||
|
||||
# ── BUCKET PERFORMANCE ────────────────────────────────────────────────────
|
||||
bkt_rows = _S.get("ch.bucket_perf") or []
|
||||
if bkt_rows:
|
||||
cells = []
|
||||
for r in bkt_rows:
|
||||
bid = int(r.get("bucket_id", 0))
|
||||
n = int(r.get("n", 0))
|
||||
wins = int(r.get("wins", 0))
|
||||
avg_p = float(r.get("avg_pnl", 0)) * 100
|
||||
wr = wins / n if n > 0 else 0.0
|
||||
wr_c = "green" if wr >= 0.55 else ("yellow" if wr >= 0.45 else "red")
|
||||
ap_c = "green" if avg_p >= 0 else "red"
|
||||
cells.append(
|
||||
f"[bold]B{bid}[/bold] [dim]n={n}[/dim]"
|
||||
f" [{wr_c}]{wr:.0%}[/{wr_c}]"
|
||||
f" [{ap_c}]{avg_p:+.1f}%[/{ap_c}]"
|
||||
)
|
||||
mid = (len(cells) + 1) // 2
|
||||
col1, col2 = cells[:mid], cells[mid:]
|
||||
bkt_lines = []
|
||||
for i in range(max(len(col1), len(col2))):
|
||||
l = col1[i] if i < len(col1) else ""
|
||||
r_cell = col2[i] if i < len(col2) else ""
|
||||
bkt_lines.append(f" {l:<52}{r_cell}")
|
||||
bkt_body = "\n".join(bkt_lines)
|
||||
else:
|
||||
bkt_body = " [dim]no bucket data yet — AE shadow requires closed trades[/dim]"
|
||||
self._w("#bucket_footer").update(
|
||||
f"[bold cyan]BUCKETS[/bold cyan] [dim]excl HIBERNATE/ACB all-time WR% avg-pnl poll 60s[/dim]\n"
|
||||
f"{bkt_body}"
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
2070
Observability/TUI/test_dolphin_tui.py
Executable file
2070
Observability/TUI/test_dolphin_tui.py
Executable file
File diff suppressed because it is too large
Load Diff
296
Observability/TUI/test_dolphin_tui_keyboard.py
Executable file
296
Observability/TUI/test_dolphin_tui_keyboard.py
Executable file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
Keyboard shortcut tests for DolphinTUIApp.
|
||||
|
||||
Validates: Requirements 10.1, 10.2, 10.3, 10.4
|
||||
|
||||
Uses Textual's built-in async pilot (App.run_test / pilot.press) to simulate
|
||||
keypresses and assert expected behaviour. DolphinDataFetcher is mocked so no
|
||||
real Hazelcast or Prefect connections are needed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import os
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Compatibility shim: Textual 8.x moved VerticalScroll to textual.containers.
|
||||
# Patch textual.widgets so dolphin_tui.py (which imports from textual.widgets)
|
||||
# can load without error.
|
||||
# ---------------------------------------------------------------------------
|
||||
import textual.widgets as _tw
|
||||
import textual.containers as _tc
|
||||
if not hasattr(_tw, "VerticalScroll"):
|
||||
_tw.VerticalScroll = _tc.VerticalScroll
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Import the real app (all deps are available in this environment)
|
||||
# ---------------------------------------------------------------------------
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinTUIApp,
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
LogPanel,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Shared fixture: a minimal DataSnapshot with no real data
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_EMPTY_SNAP = DataSnapshot()
|
||||
|
||||
|
||||
def _make_mock_fetcher_instance() -> MagicMock:
|
||||
"""Return a mock DolphinDataFetcher with all network methods stubbed."""
|
||||
fetcher = MagicMock(spec=DolphinDataFetcher)
|
||||
fetcher.hz_connected = False
|
||||
fetcher.connect_hz = AsyncMock(return_value=False)
|
||||
fetcher.disconnect_hz = AsyncMock(return_value=None)
|
||||
fetcher.fetch = AsyncMock(return_value=_EMPTY_SNAP)
|
||||
fetcher.fetch_prefect = AsyncMock(return_value=(False, []))
|
||||
fetcher.tail_log = MagicMock(return_value=[])
|
||||
fetcher._running = True
|
||||
fetcher._reconnect_task = None
|
||||
return fetcher
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.1 — `q` key: action_quit called, app exits cleanly
|
||||
# Validates: Requirements 10.1
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_q_key_quits_app_cleanly():
|
||||
"""Pressing `q` calls action_quit which disconnects HZ and exits (Req 10.1).
|
||||
|
||||
Verifies that disconnect_hz is awaited before exit so the HZ client
|
||||
is shut down cleanly.
|
||||
"""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
# Stop the poll timer to avoid background noise
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
await pilot.press("q")
|
||||
# run_test context exits cleanly after q — app.exit() was called
|
||||
|
||||
# After the context exits, verify disconnect_hz was called (clean HZ shutdown)
|
||||
mock_fetcher.disconnect_hz.assert_awaited_once()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.2 — `r` key: action_force_refresh triggers an immediate poll
|
||||
# Validates: Requirements 10.2
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_r_key_triggers_immediate_poll():
|
||||
"""Pressing `r` calls action_force_refresh which runs _poll immediately (Req 10.2).
|
||||
|
||||
Verifies that fetch() is called at least once more after pressing `r`,
|
||||
confirming a poll outside the normal 2s cycle.
|
||||
"""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
call_count_before = mock_fetcher.fetch.await_count
|
||||
|
||||
await pilot.press("r")
|
||||
await pilot.pause(0.2)
|
||||
|
||||
# fetch() should have been called at least once more after `r`
|
||||
assert mock_fetcher.fetch.await_count > call_count_before, (
|
||||
f"Expected fetch() to be called after pressing 'r', "
|
||||
f"but call count stayed at {mock_fetcher.fetch.await_count}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.3 — `l` key: LogPanel visibility toggles on/off
|
||||
# Validates: Requirements 10.3
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_shows_log_panel():
|
||||
"""Pressing `l` once makes the LogPanel visible (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
|
||||
# Initially hidden
|
||||
assert log_panel.display is False, "LogPanel should be hidden on startup"
|
||||
|
||||
# Press l → should become visible
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is True, "LogPanel should be visible after first 'l' press"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_hides_log_panel_on_second_press():
|
||||
"""Pressing `l` twice returns LogPanel to hidden state (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
|
||||
# Press l twice: hidden → visible → hidden
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is True
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is False, "LogPanel should be hidden after second 'l' press"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_updates_log_visible_flag():
|
||||
"""Pressing `l` updates the _log_visible internal flag (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
assert app._log_visible is False
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert app._log_visible is True
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert app._log_visible is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.4 — Arrow keys: scroll actions dispatched on LogPanel
|
||||
# Validates: Requirements 10.4
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_up_arrow_scrolls_log_panel():
|
||||
"""Pressing ↑ dispatches action_scroll_up on LogPanel when visible (Req 10.4)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Make log panel visible and focus it
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
assert log_panel.display is True
|
||||
|
||||
# Focus the log panel so it receives the scroll action
|
||||
log_panel.focus()
|
||||
await pilot.pause(0.05)
|
||||
|
||||
# Track action_scroll_up calls on the log panel
|
||||
scroll_up_called = []
|
||||
original = log_panel.action_scroll_up
|
||||
|
||||
def _track(*args, **kwargs):
|
||||
scroll_up_called.append(True)
|
||||
return original(*args, **kwargs)
|
||||
|
||||
log_panel.action_scroll_up = _track
|
||||
|
||||
await pilot.press("up")
|
||||
await pilot.pause(0.1)
|
||||
|
||||
assert len(scroll_up_called) > 0, (
|
||||
"action_scroll_up should have been called on LogPanel after pressing 'up'"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_down_arrow_scrolls_log_panel():
|
||||
"""Pressing ↓ dispatches action_scroll_down on LogPanel when visible (Req 10.4)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Make log panel visible and focus it
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
assert log_panel.display is True
|
||||
|
||||
log_panel.focus()
|
||||
await pilot.pause(0.05)
|
||||
|
||||
# Track action_scroll_down calls on the log panel
|
||||
scroll_down_called = []
|
||||
original = log_panel.action_scroll_down
|
||||
|
||||
def _track(*args, **kwargs):
|
||||
scroll_down_called.append(True)
|
||||
return original(*args, **kwargs)
|
||||
|
||||
log_panel.action_scroll_down = _track
|
||||
|
||||
await pilot.press("down")
|
||||
await pilot.pause(0.1)
|
||||
|
||||
assert len(scroll_down_called) > 0, (
|
||||
"action_scroll_down should have been called on LogPanel after pressing 'down'"
|
||||
)
|
||||
288
Observability/TUI/test_dolphin_tui_log_tail.py
Executable file
288
Observability/TUI/test_dolphin_tui_log_tail.py
Executable file
@@ -0,0 +1,288 @@
|
||||
"""
|
||||
test_dolphin_tui_log_tail.py
|
||||
|
||||
Verifies the tail_log() method in DolphinDataFetcher:
|
||||
- Uses seek(-N, 2) to read only the last N bytes (not the full file)
|
||||
- Returns the correct last N lines from a large file
|
||||
- Does not load the entire file into memory
|
||||
|
||||
Tests:
|
||||
- test_tail_log_returns_last_n_lines
|
||||
- test_tail_log_large_file_seek_not_full_read
|
||||
- test_tail_log_large_file_correctness
|
||||
- test_tail_log_file_not_found
|
||||
- test_tail_log_small_file
|
||||
- test_tail_log_empty_file
|
||||
- test_tail_log_n_param
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub hazelcast so the import succeeds without the package
|
||||
# ---------------------------------------------------------------------------
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub textual so dolphin_tui imports cleanly without a terminal
|
||||
# ---------------------------------------------------------------------------
|
||||
for _mod in ["textual", "textual.app", "textual.containers", "textual.widgets"]:
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub httpx
|
||||
# ---------------------------------------------------------------------------
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
from dolphin_tui import DolphinDataFetcher, LOG_TAIL_CHUNK_BYTES # noqa: E402
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_fetcher() -> DolphinDataFetcher:
|
||||
return DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestTailLogReturnsLastNLines(unittest.TestCase):
|
||||
"""test_tail_log_returns_last_n_lines
|
||||
|
||||
Create a temp file with 1000 known lines, call tail_log(path, 50),
|
||||
verify exactly the last 50 lines are returned.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(1000):
|
||||
self.tmp.write(f"Line {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_returns_last_n_lines(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
self.assertEqual(len(result), 50, f"Expected 50 lines, got {len(result)}")
|
||||
# The last 50 lines should be Line 950 .. Line 999
|
||||
for i, line in enumerate(result):
|
||||
expected = f"Line {950 + i}"
|
||||
self.assertEqual(line, expected, f"Line {i}: expected {expected!r}, got {line!r}")
|
||||
|
||||
|
||||
class TestTailLogLargeFileSeekNotFullRead(unittest.TestCase):
|
||||
"""test_tail_log_large_file_seek_not_full_read
|
||||
|
||||
Verify that tail_log uses seek(-chunk, 2) and does NOT call read()
|
||||
with the full file size.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
# Write a file that is clearly larger than the chunk size
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="wb", suffix=".log", delete=False)
|
||||
line = b"2026-01-01 00:00:00 [INFO] padding " + b"x" * 100 + b"\n"
|
||||
# Write enough to be > LOG_TAIL_CHUNK_BYTES
|
||||
total = 0
|
||||
while total < LOG_TAIL_CHUNK_BYTES * 3:
|
||||
self.tmp.write(line)
|
||||
total += len(line)
|
||||
self.tmp.close()
|
||||
self.file_size = os.path.getsize(self.tmp.name)
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_large_file_seek_not_full_read(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
read_sizes = []
|
||||
original_open = open
|
||||
|
||||
def spy_open(path, mode="r", **kwargs):
|
||||
fh = original_open(path, mode, **kwargs)
|
||||
original_read = fh.read
|
||||
|
||||
def tracking_read(size=-1):
|
||||
read_sizes.append(size)
|
||||
return original_read(size)
|
||||
|
||||
fh.read = tracking_read
|
||||
return fh
|
||||
|
||||
with patch("builtins.open", side_effect=spy_open):
|
||||
fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
# read() must never be called with the full file size
|
||||
self.assertNotIn(
|
||||
self.file_size,
|
||||
read_sizes,
|
||||
f"read() was called with full file size {self.file_size} — full file was loaded",
|
||||
)
|
||||
# At least one read() call must have happened
|
||||
self.assertTrue(len(read_sizes) > 0, "read() was never called")
|
||||
|
||||
|
||||
class TestTailLogLargeFileCorrectness(unittest.TestCase):
|
||||
"""test_tail_log_large_file_correctness
|
||||
|
||||
Create a temp file >10MB of repeated log lines, call tail_log,
|
||||
verify the returned lines match the actual last N lines of the file.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False, encoding="utf-8")
|
||||
line_template = "2026-01-01 00:00:00 [INFO] Line {i} padding " + "x" * 100
|
||||
self.lines = []
|
||||
total_bytes = 0
|
||||
i = 0
|
||||
while total_bytes < 10 * 1024 * 1024: # 10 MB
|
||||
line = line_template.format(i=i)
|
||||
self.tmp.write(line + "\n")
|
||||
self.lines.append(line)
|
||||
total_bytes += len(line) + 1
|
||||
i += 1
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_large_file_correctness(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
expected = self.lines[-50:]
|
||||
self.assertEqual(len(result), 50, f"Expected 50 lines, got {len(result)}")
|
||||
self.assertEqual(result, expected, "Returned lines do not match the actual last 50 lines")
|
||||
|
||||
|
||||
class TestTailLogFileNotFound(unittest.TestCase):
|
||||
"""test_tail_log_file_not_found
|
||||
|
||||
When path doesn't exist, returns ["Log not found: <path>"].
|
||||
"""
|
||||
|
||||
def test_tail_log_file_not_found(self):
|
||||
fetcher = _make_fetcher()
|
||||
missing = "/tmp/this_file_does_not_exist_dolphin_test_xyz.log"
|
||||
result = fetcher.tail_log(missing, n=50)
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertEqual(result[0], f"Log not found: {missing}")
|
||||
|
||||
|
||||
class TestTailLogSmallFile(unittest.TestCase):
|
||||
"""test_tail_log_small_file
|
||||
|
||||
File smaller than the seek chunk still returns correct lines
|
||||
(seek hits start of file via OSError fallback).
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(20):
|
||||
self.tmp.write(f"SmallLine {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_small_file(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
# File only has 20 lines — should return all 20
|
||||
self.assertEqual(len(result), 20, f"Expected 20 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"SmallLine {i}")
|
||||
|
||||
|
||||
class TestTailLogEmptyFile(unittest.TestCase):
|
||||
"""test_tail_log_empty_file
|
||||
|
||||
Empty file returns empty list.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_empty_file(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
self.assertEqual(result, [], f"Expected empty list for empty file, got {result!r}")
|
||||
|
||||
|
||||
class TestTailLogNParam(unittest.TestCase):
|
||||
"""test_tail_log_n_param
|
||||
|
||||
Calling with n=10 returns exactly 10 lines, n=100 returns 100 lines
|
||||
(when file has enough).
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(500):
|
||||
self.tmp.write(f"NParamLine {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_n_10(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=10)
|
||||
|
||||
self.assertEqual(len(result), 10, f"Expected 10 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"NParamLine {490 + i}")
|
||||
|
||||
def test_tail_log_n_100(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=100)
|
||||
|
||||
self.assertEqual(len(result), 100, f"Expected 100 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"NParamLine {400 + i}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
272
Observability/TUI/test_dolphin_tui_malformed_json.py
Executable file
272
Observability/TUI/test_dolphin_tui_malformed_json.py
Executable file
@@ -0,0 +1,272 @@
|
||||
# Tests for graceful handling of malformed JSON in HZ values.
|
||||
# Validates: Requirements 12.3
|
||||
# The TUI MUST NOT crash when any individual HZ key contains malformed JSON.
|
||||
# Malformed JSON MUST result in all fields being None (no crash, no exception).
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch, AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ensure the TUI module is importable without textual/hazelcast/httpx installed
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
for _mod in ("textual", "textual.app", "textual.widgets", "textual.containers", "httpx", "hazelcast"):
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
import textual.app as _textual_app
|
||||
import textual.widgets as _textual_widgets
|
||||
import textual.containers as _textual_containers
|
||||
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
from dolphin_tui import (
|
||||
color_age,
|
||||
fmt_float,
|
||||
fmt_pnl,
|
||||
DataSnapshot,
|
||||
DolphinDataFetcher,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Malformed JSON inputs to test
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
MALFORMED_JSON_INPUTS = [
|
||||
"{bad json",
|
||||
"not-json",
|
||||
"null",
|
||||
"[]",
|
||||
"123",
|
||||
"",
|
||||
"{",
|
||||
"}",
|
||||
"{'key': 'value'}", # single quotes — invalid JSON
|
||||
"undefined",
|
||||
"NaN",
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_fetcher():
|
||||
return DolphinDataFetcher()
|
||||
|
||||
|
||||
def _make_mock_map_returning(value):
|
||||
"""Return a mock IMap whose .get(key).result() returns the given value."""
|
||||
future = MagicMock()
|
||||
future.result.return_value = value
|
||||
hz_map = MagicMock()
|
||||
hz_map.get.return_value = future
|
||||
hz_map.key_set.return_value = future
|
||||
return hz_map
|
||||
|
||||
|
||||
def _make_fetcher_with_malformed_json(malformed: str):
|
||||
"""Create a DolphinDataFetcher whose hz_client returns malformed JSON for every map key."""
|
||||
fetcher = DolphinDataFetcher()
|
||||
fetcher.hz_connected = True
|
||||
|
||||
mock_map = _make_mock_map_returning(malformed)
|
||||
map_future = MagicMock()
|
||||
map_future.result.return_value = mock_map
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_map.return_value = map_future
|
||||
fetcher.hz_client = mock_client
|
||||
return fetcher
|
||||
|
||||
|
||||
def _run(coro):
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _fetch_with_malformed_json(malformed: str):
|
||||
fetcher = _make_fetcher_with_malformed_json(malformed)
|
||||
with patch.object(fetcher, "fetch_prefect", new=AsyncMock(return_value=(False, []))):
|
||||
with patch.object(fetcher, "tail_log", return_value=[]):
|
||||
with patch.object(fetcher, "_start_reconnect", return_value=None):
|
||||
return _run(fetcher.fetch())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_scan: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_scan_malformed_no_crash(bad_json):
|
||||
"""_parse_scan must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_scan(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_scan_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_scan must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_scan(bad_json)
|
||||
for key in ("scan_number", "vel_div", "w50_velocity", "w750_velocity",
|
||||
"instability_50", "scan_bridge_ts", "scan_age_s"):
|
||||
assert result[key] is None, f"_parse_scan({bad_json!r})[{key!r}] should be None"
|
||||
assert result["asset_prices"] == {}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_safety: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_safety_malformed_no_crash(bad_json):
|
||||
"""_parse_safety must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_safety(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_safety_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_safety must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_safety(bad_json)
|
||||
for key in ("posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5"):
|
||||
assert result[key] is None, f"_parse_safety({bad_json!r})[{key!r}] should be None"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_heartbeat: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_heartbeat_malformed_no_crash(bad_json):
|
||||
"""_parse_heartbeat must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_heartbeat(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_heartbeat_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_heartbeat must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_heartbeat(bad_json)
|
||||
for key in ("heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s"):
|
||||
assert result[key] is None, f"_parse_heartbeat({bad_json!r})[{key!r}] should be None"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fetch() with malformed JSON from HZ -> DataSnapshot, no crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_returns_datasnapshot(bad_json):
|
||||
"""fetch() must return a DataSnapshot even when HZ returns malformed JSON."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert isinstance(snap, DataSnapshot)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_scan_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None scan fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.scan_number is None
|
||||
assert snap.vel_div is None
|
||||
assert snap.w50_velocity is None
|
||||
assert snap.instability_50 is None
|
||||
assert snap.scan_bridge_ts is None
|
||||
assert snap.scan_age_s is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_safety_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None safety fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.posture is None
|
||||
assert snap.rm is None
|
||||
assert snap.cat1 is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_heartbeat_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None heartbeat fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.heartbeat_ts is None
|
||||
assert snap.heartbeat_phase is None
|
||||
assert snap.heartbeat_age_s is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_no_crash(bad_json):
|
||||
"""fetch() must not raise any exception when HZ returns malformed JSON."""
|
||||
# If this doesn't raise, the test passes
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fmt_float, fmt_pnl, color_age handle None gracefully (parse errors -> None fields)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fmt_float_handles_none_from_parse_error():
|
||||
"""fmt_float(None) must return '--' — parse errors produce None fields."""
|
||||
assert fmt_float(None) == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_handles_none_from_parse_error():
|
||||
"""fmt_pnl(None) must return ('white', '--') — parse errors produce None fields."""
|
||||
color, text = fmt_pnl(None)
|
||||
assert text == "--"
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_color_age_handles_none_from_parse_error():
|
||||
"""color_age(None) must return ('dim', 'N/A') — parse errors produce None fields."""
|
||||
color, text = color_age(None)
|
||||
assert text == "N/A"
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_all_none_snapshot_fmt_float_no_crash():
|
||||
"""All float fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in (
|
||||
"vel_div", "w50_velocity", "w750_velocity", "instability_50",
|
||||
"acb_boost", "acb_beta", "funding_btc", "dvol_btc", "fng",
|
||||
"vix", "capital", "pnl", "rm",
|
||||
):
|
||||
val = getattr(snap, field_name)
|
||||
result = fmt_float(val)
|
||||
assert result == "--", f"fmt_float({field_name}=None) should be '--', got {result!r}"
|
||||
|
||||
|
||||
def test_all_none_snapshot_fmt_pnl_no_crash():
|
||||
"""PnL fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("pnl", "nautilus_pnl"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = fmt_pnl(val)
|
||||
assert text == "--"
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_all_none_snapshot_color_age_no_crash():
|
||||
"""Age fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("scan_age_s", "exf_age_s", "esof_age_s", "heartbeat_age_s"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = color_age(val)
|
||||
assert text == "N/A"
|
||||
assert color == "dim"
|
||||
283
Observability/TUI/test_dolphin_tui_missing_keys.py
Executable file
283
Observability/TUI/test_dolphin_tui_missing_keys.py
Executable file
@@ -0,0 +1,283 @@
|
||||
# Tests for graceful "N/A" / "--" rendering when HZ maps return None for all keys.
|
||||
# Validates: Requirements 12.3
|
||||
# The TUI MUST NOT crash when any individual HZ key is missing or contains
|
||||
# malformed JSON. Missing fields MUST render as "--" or "N/A".
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch, AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ensure the TUI module is importable without textual/hazelcast/httpx installed
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
for _mod in ("textual", "textual.app", "textual.widgets", "textual.containers", "httpx", "hazelcast"):
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
import textual.app as _textual_app
|
||||
import textual.widgets as _textual_widgets
|
||||
import textual.containers as _textual_containers
|
||||
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
from dolphin_tui import (
|
||||
color_age,
|
||||
fmt_float,
|
||||
fmt_pnl,
|
||||
rm_bar,
|
||||
posture_color,
|
||||
status_color,
|
||||
DataSnapshot,
|
||||
DolphinDataFetcher,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_mock_map_returning_none():
|
||||
"""Return a mock IMap whose .get(key).result() always returns None."""
|
||||
future = MagicMock()
|
||||
future.result.return_value = None
|
||||
hz_map = MagicMock()
|
||||
hz_map.get.return_value = future
|
||||
hz_map.key_set.return_value = future
|
||||
return hz_map
|
||||
|
||||
|
||||
def _make_fetcher_with_mock_client():
|
||||
"""Create a DolphinDataFetcher whose hz_client returns None for every map key."""
|
||||
fetcher = DolphinDataFetcher()
|
||||
fetcher.hz_connected = True
|
||||
|
||||
mock_map = _make_mock_map_returning_none()
|
||||
map_future = MagicMock()
|
||||
map_future.result.return_value = mock_map
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_map.return_value = map_future
|
||||
fetcher.hz_client = mock_client
|
||||
return fetcher
|
||||
|
||||
|
||||
def _run(coro):
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _fetch_with_empty_maps():
|
||||
fetcher = _make_fetcher_with_mock_client()
|
||||
with patch.object(fetcher, "fetch_prefect", new=AsyncMock(return_value=(False, []))):
|
||||
with patch.object(fetcher, "tail_log", return_value=[]):
|
||||
with patch.object(fetcher, "_start_reconnect", return_value=None):
|
||||
return _run(fetcher.fetch())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Formatting helpers: None inputs
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fmt_float_none_returns_double_dash():
|
||||
assert fmt_float(None) == "--"
|
||||
|
||||
|
||||
def test_fmt_float_none_custom_decimals():
|
||||
assert fmt_float(None, decimals=2) == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_none_returns_double_dash():
|
||||
color, text = fmt_pnl(None)
|
||||
assert text == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_none_returns_white_color():
|
||||
color, text = fmt_pnl(None)
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_color_age_none_returns_na():
|
||||
color, text = color_age(None)
|
||||
assert text == "N/A"
|
||||
|
||||
|
||||
def test_color_age_none_returns_dim():
|
||||
color, text = color_age(None)
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_rm_bar_none_returns_double_dash():
|
||||
assert rm_bar(None) == "--"
|
||||
|
||||
|
||||
def test_posture_color_none_returns_dim():
|
||||
assert posture_color(None) == "dim"
|
||||
|
||||
|
||||
def test_status_color_none_returns_dim():
|
||||
assert status_color(None) == "dim"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sync parsers: None input -> all-None output, no crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_parse_scan_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_scan(None)
|
||||
assert result["scan_number"] is None
|
||||
assert result["vel_div"] is None
|
||||
assert result["w50_velocity"] is None
|
||||
assert result["w750_velocity"] is None
|
||||
assert result["instability_50"] is None
|
||||
assert result["scan_bridge_ts"] is None
|
||||
assert result["scan_age_s"] is None
|
||||
assert result["asset_prices"] == {}
|
||||
|
||||
|
||||
def test_parse_safety_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_safety(None)
|
||||
for key in ("posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5"):
|
||||
assert result[key] is None, "Expected {} to be None".format(key)
|
||||
|
||||
|
||||
def test_parse_heartbeat_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_heartbeat(None)
|
||||
for key in ("heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s"):
|
||||
assert result[key] is None, "Expected {} to be None".format(key)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fetch() with all-None HZ maps -> DataSnapshot with all HZ fields None
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fetch_empty_maps_returns_datasnapshot():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert isinstance(snap, DataSnapshot)
|
||||
|
||||
|
||||
def test_fetch_empty_maps_hz_connected_true():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.hz_connected is True
|
||||
|
||||
|
||||
def test_fetch_empty_maps_scan_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.scan_number is None
|
||||
assert snap.vel_div is None
|
||||
assert snap.w50_velocity is None
|
||||
assert snap.w750_velocity is None
|
||||
assert snap.instability_50 is None
|
||||
assert snap.scan_bridge_ts is None
|
||||
assert snap.scan_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_safety_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.posture is None
|
||||
assert snap.rm is None
|
||||
assert snap.cat1 is None
|
||||
assert snap.cat2 is None
|
||||
assert snap.cat3 is None
|
||||
assert snap.cat4 is None
|
||||
assert snap.cat5 is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_extf_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.funding_btc is None
|
||||
assert snap.dvol_btc is None
|
||||
assert snap.fng is None
|
||||
assert snap.vix is None
|
||||
assert snap.exf_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_state_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.capital is None
|
||||
assert snap.pnl is None
|
||||
assert snap.trades is None
|
||||
assert snap.nautilus_capital is None
|
||||
assert snap.nautilus_pnl is None
|
||||
assert snap.nautilus_trades is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_heartbeat_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.heartbeat_ts is None
|
||||
assert snap.heartbeat_phase is None
|
||||
assert snap.heartbeat_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_meta_health_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.meta_rm is None
|
||||
assert snap.meta_status is None
|
||||
assert snap.m1_proc is None
|
||||
assert snap.m2_heartbeat is None
|
||||
assert snap.m3_data is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_obf_top_empty_list():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.obf_top == []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# All-None DataSnapshot: formatting helpers must not crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_all_none_snap_fmt_float_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in (
|
||||
"vel_div", "w50_velocity", "w750_velocity", "instability_50",
|
||||
"acb_boost", "acb_beta", "funding_btc", "dvol_btc", "fng",
|
||||
"vix", "capital", "pnl", "rm",
|
||||
):
|
||||
val = getattr(snap, field_name)
|
||||
result = fmt_float(val)
|
||||
assert result == "--", "fmt_float({}=None) should be '--', got {!r}".format(field_name, result)
|
||||
|
||||
|
||||
def test_all_none_snap_fmt_pnl_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("pnl", "nautilus_pnl"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = fmt_pnl(val)
|
||||
assert text == "--", "fmt_pnl({}=None) text should be '--'".format(field_name)
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_all_none_snap_color_age_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("scan_age_s", "exf_age_s", "esof_age_s", "heartbeat_age_s"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = color_age(val)
|
||||
assert text == "N/A", "color_age({}=None) text should be 'N/A'".format(field_name)
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_all_none_snap_rm_bar():
|
||||
snap = DataSnapshot()
|
||||
assert rm_bar(snap.rm) == "--"
|
||||
|
||||
|
||||
def test_all_none_snap_posture_color():
|
||||
snap = DataSnapshot()
|
||||
assert posture_color(snap.posture) == "dim"
|
||||
|
||||
|
||||
def test_all_none_snap_status_color():
|
||||
snap = DataSnapshot()
|
||||
assert status_color(snap.meta_status) == "dim"
|
||||
291
Observability/TUI/test_dolphin_tui_prefect_offline.py
Executable file
291
Observability/TUI/test_dolphin_tui_prefect_offline.py
Executable file
@@ -0,0 +1,291 @@
|
||||
"""
|
||||
test_dolphin_tui_prefect_offline.py
|
||||
|
||||
Verifies Prefect-offline behavior of DolphinDataFetcher and PrefectPanel.
|
||||
|
||||
Tests:
|
||||
- test_fetch_prefect_returns_false_on_connection_error
|
||||
- test_fetch_prefect_returns_false_on_timeout
|
||||
- test_fetch_prefect_returns_false_on_non_200
|
||||
- test_fetch_prefect_does_not_crash
|
||||
- test_snapshot_prefect_offline_fields
|
||||
- test_prefect_panel_shows_offline_text
|
||||
|
||||
All tests are self-contained and do NOT require a live Prefect instance.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
import types
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub hazelcast so the import succeeds without the package
|
||||
# ---------------------------------------------------------------------------
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub textual so dolphin_tui imports cleanly without a terminal
|
||||
# ---------------------------------------------------------------------------
|
||||
for _mod in [
|
||||
"textual",
|
||||
"textual.app",
|
||||
"textual.containers",
|
||||
"textual.widgets",
|
||||
]:
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub httpx — we will patch individual methods per test
|
||||
# ---------------------------------------------------------------------------
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
_httpx_stub.ConnectError = type("ConnectError", (Exception,), {})
|
||||
_httpx_stub.TimeoutException = type("TimeoutException", (Exception,), {})
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Now import the module under test
|
||||
# ---------------------------------------------------------------------------
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
PrefectPanel,
|
||||
)
|
||||
import httpx # noqa: E402 (the stub or real module)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _run(coro):
|
||||
"""Run a coroutine in the current event loop."""
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _make_fetcher() -> DolphinDataFetcher:
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
fetcher._start_reconnect = MagicMock() # prevent background reconnect tasks
|
||||
return fetcher
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestFetchPrefectConnectionError(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_connection_error
|
||||
|
||||
When httpx raises ConnectError, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_connection_error(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=httpx.ConnectError("refused"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestFetchPrefectTimeout(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_timeout
|
||||
|
||||
When httpx raises TimeoutException, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_timeout(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=httpx.TimeoutException("timed out"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestFetchPrefectNon200(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_non_200
|
||||
|
||||
When /api/health returns HTTP 503, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_non_200(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
health_resp = MagicMock()
|
||||
health_resp.status_code = 503
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(return_value=health_resp)
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
healthy, flows = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertFalse(healthy, "healthy must be False when /api/health returns 503")
|
||||
# flows may be empty or populated depending on whether the flows call was made;
|
||||
# the key requirement is that healthy is False
|
||||
self.assertIsInstance(flows, list)
|
||||
|
||||
|
||||
class TestFetchPrefectDoesNotCrash(unittest.TestCase):
|
||||
"""test_fetch_prefect_does_not_crash
|
||||
|
||||
fetch_prefect() must never raise, even on unexpected exceptions.
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_does_not_crash_on_unexpected_exception(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
# Simulate AsyncClient itself raising an unexpected error
|
||||
with patch.object(httpx, "AsyncClient", side_effect=RuntimeError("unexpected")):
|
||||
try:
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
except Exception as exc:
|
||||
self.fail(f"fetch_prefect() raised unexpectedly: {exc}")
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
def test_fetch_prefect_does_not_crash_on_os_error(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=OSError("network unreachable"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
try:
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
except Exception as exc:
|
||||
self.fail(f"fetch_prefect() raised unexpectedly: {exc}")
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestSnapshotPrefectOfflineFields(unittest.TestCase):
|
||||
"""test_snapshot_prefect_offline_fields
|
||||
|
||||
When fetch_prefect() returns (False, []), the assembled DataSnapshot
|
||||
must have prefect_healthy=False and prefect_flows=[].
|
||||
"""
|
||||
|
||||
def test_snapshot_prefect_offline_fields(self):
|
||||
snap = DataSnapshot(
|
||||
prefect_healthy=False,
|
||||
prefect_flows=[],
|
||||
)
|
||||
|
||||
self.assertFalse(snap.prefect_healthy, "prefect_healthy must be False")
|
||||
self.assertEqual(snap.prefect_flows, [], "prefect_flows must be empty list")
|
||||
|
||||
def test_snapshot_default_is_offline(self):
|
||||
"""Default DataSnapshot should represent offline state."""
|
||||
snap = DataSnapshot()
|
||||
|
||||
self.assertFalse(snap.prefect_healthy)
|
||||
self.assertEqual(snap.prefect_flows, [])
|
||||
|
||||
|
||||
class TestPrefectPanelShowsOfflineText(unittest.TestCase):
|
||||
"""test_prefect_panel_shows_offline_text
|
||||
|
||||
When DataSnapshot.prefect_healthy=False, PrefectPanel._render_markup()
|
||||
must contain the string "PREFECT OFFLINE".
|
||||
"""
|
||||
|
||||
def test_prefect_panel_shows_offline_text_when_unhealthy(self):
|
||||
panel = PrefectPanel()
|
||||
snap = DataSnapshot(prefect_healthy=False, prefect_flows=[])
|
||||
|
||||
panel._snap = snap
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"Expected 'PREFECT OFFLINE' in panel markup, got:\n{markup}",
|
||||
)
|
||||
|
||||
def test_prefect_panel_shows_offline_text_when_snap_is_none(self):
|
||||
"""Panel must show PREFECT OFFLINE when no snapshot has been set."""
|
||||
panel = PrefectPanel()
|
||||
# _snap defaults to None
|
||||
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"Expected 'PREFECT OFFLINE' when snap is None, got:\n{markup}",
|
||||
)
|
||||
|
||||
def test_prefect_panel_does_not_show_offline_when_healthy(self):
|
||||
"""Sanity check: healthy snapshot should NOT show PREFECT OFFLINE."""
|
||||
panel = PrefectPanel()
|
||||
snap = DataSnapshot(prefect_healthy=True, prefect_flows=[])
|
||||
|
||||
panel._snap = snap
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertNotIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"'PREFECT OFFLINE' should not appear when prefect_healthy=True",
|
||||
)
|
||||
self.assertIn("PREFECT ✓", markup)
|
||||
|
||||
def test_update_data_does_not_crash_when_offline(self):
|
||||
"""update_data() must not raise when called with an offline snapshot."""
|
||||
panel = PrefectPanel()
|
||||
# Patch the inherited update() method (from Static/object) so it's a no-op
|
||||
panel.update = MagicMock()
|
||||
|
||||
snap = DataSnapshot(prefect_healthy=False, prefect_flows=[])
|
||||
|
||||
try:
|
||||
panel.update_data(snap)
|
||||
except Exception as exc:
|
||||
self.fail(f"update_data() raised unexpectedly: {exc}")
|
||||
|
||||
panel.update.assert_called_once()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
335
Observability/TUI/test_dolphin_tui_reconnect.py
Executable file
335
Observability/TUI/test_dolphin_tui_reconnect.py
Executable file
@@ -0,0 +1,335 @@
|
||||
"""
|
||||
test_dolphin_tui_reconnect.py
|
||||
|
||||
Verifies the HZ reconnect loop behavior of DolphinDataFetcher.
|
||||
|
||||
Tests:
|
||||
- test_hz_connected_flag_set_on_connect
|
||||
- test_hz_disconnected_flag_on_failure
|
||||
- test_reconnect_within_10s
|
||||
- test_backoff_resets_on_success
|
||||
- test_fetch_returns_none_fields_when_disconnected
|
||||
|
||||
All tests are self-contained and do NOT require a live Hazelcast instance.
|
||||
Backoff delays are patched to 0.05 s so the suite runs in seconds.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch, call
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# Provide a stub for hazelcast so the import succeeds even without the package
|
||||
import types
|
||||
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# Provide stubs for textual and httpx so dolphin_tui imports cleanly
|
||||
for _mod in [
|
||||
"textual",
|
||||
"textual.app",
|
||||
"textual.containers",
|
||||
"textual.widgets",
|
||||
]:
|
||||
if _mod not in sys.modules:
|
||||
_stub = types.ModuleType(_mod)
|
||||
sys.modules[_mod] = _stub
|
||||
|
||||
# Minimal textual stubs
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
RECONNECT_INIT_S,
|
||||
RECONNECT_MULT,
|
||||
RECONNECT_MAX_S,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
FAST_BACKOFF = 0.05 # seconds — replaces 5 s initial delay in tests
|
||||
|
||||
|
||||
def _make_mock_client() -> MagicMock:
|
||||
"""Return a minimal mock that looks like a HazelcastClient."""
|
||||
client = MagicMock()
|
||||
client.shutdown = MagicMock()
|
||||
return client
|
||||
|
||||
|
||||
def _run(coro):
|
||||
"""Run a coroutine in a fresh event loop."""
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestHZConnectedFlagOnConnect(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_hz_connected_flag_set_on_connect
|
||||
|
||||
A successful connect_hz() call must set hz_connected=True and store
|
||||
the client handle.
|
||||
"""
|
||||
|
||||
async def test_hz_connected_flag_set_on_connect(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
with patch("hazelcast.HazelcastClient", return_value=mock_client):
|
||||
result = await fetcher.connect_hz()
|
||||
|
||||
self.assertTrue(result, "connect_hz() should return True on success")
|
||||
self.assertTrue(fetcher.hz_connected, "hz_connected must be True after successful connect")
|
||||
self.assertIs(fetcher.hz_client, mock_client, "hz_client must be the returned client")
|
||||
|
||||
# Clean up any background task
|
||||
fetcher._running = False
|
||||
if fetcher._reconnect_task and not fetcher._reconnect_task.done():
|
||||
fetcher._reconnect_task.cancel()
|
||||
|
||||
|
||||
class TestHZDisconnectedFlagOnFailure(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_hz_disconnected_flag_on_failure
|
||||
|
||||
When HazelcastClient() raises, connect_hz() must return False and
|
||||
hz_connected must be False.
|
||||
"""
|
||||
|
||||
async def test_hz_disconnected_flag_on_failure(self):
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=Exception("Connection refused")):
|
||||
result = await fetcher.connect_hz()
|
||||
|
||||
self.assertFalse(result, "connect_hz() should return False on failure")
|
||||
self.assertFalse(fetcher.hz_connected, "hz_connected must be False after failed connect")
|
||||
self.assertIsNone(fetcher.hz_client, "hz_client must remain None after failed connect")
|
||||
|
||||
# Stop the reconnect loop that was started by connect_hz on failure
|
||||
fetcher._running = False
|
||||
if fetcher._reconnect_task and not fetcher._reconnect_task.done():
|
||||
fetcher._reconnect_task.cancel()
|
||||
try:
|
||||
await fetcher._reconnect_task
|
||||
except (asyncio.CancelledError, Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestReconnectWithin10s(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_reconnect_within_10s
|
||||
|
||||
Scenario:
|
||||
1. Initial connect fails → hz_connected=False, reconnect loop starts.
|
||||
2. After ~0.1 s the mock is switched to succeed.
|
||||
3. hz_connected must become True within 10 s of the mock being restored.
|
||||
|
||||
Backoff is patched to FAST_BACKOFF (0.05 s) so the test completes quickly.
|
||||
"""
|
||||
|
||||
async def test_reconnect_within_10s(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
# State shared between the mock and the test
|
||||
should_succeed = False
|
||||
|
||||
def hz_client_factory(**kwargs):
|
||||
if should_succeed:
|
||||
return mock_client
|
||||
raise Exception("Connection refused")
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
# Patch backoff to be very short so the test is fast
|
||||
fetcher._reconnect_backoff_initial = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff_max = FAST_BACKOFF * 3
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=hz_client_factory):
|
||||
# Start the reconnect loop manually (simulates connect_hz failing)
|
||||
fetcher._start_reconnect()
|
||||
|
||||
# Let the loop spin for a moment while HZ is "down"
|
||||
await asyncio.sleep(FAST_BACKOFF * 2)
|
||||
self.assertFalse(fetcher.hz_connected, "Should still be disconnected while HZ is down")
|
||||
|
||||
# "Restart" HZ
|
||||
should_succeed = True
|
||||
t0 = time.monotonic()
|
||||
|
||||
# Wait up to 10 s for reconnect
|
||||
deadline = 10.0
|
||||
while not fetcher.hz_connected and (time.monotonic() - t0) < deadline:
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
elapsed = time.monotonic() - t0
|
||||
self.assertTrue(
|
||||
fetcher.hz_connected,
|
||||
f"hz_connected must be True within 10 s of HZ restart (elapsed: {elapsed:.2f}s)",
|
||||
)
|
||||
self.assertLess(elapsed, 10.0, f"Reconnect took too long: {elapsed:.2f}s")
|
||||
|
||||
# Cleanup
|
||||
await fetcher.disconnect_hz()
|
||||
|
||||
|
||||
class TestBackoffResetsOnSuccess(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_backoff_resets_on_success
|
||||
|
||||
After a successful reconnect the backoff delay must be reset to the
|
||||
initial value (RECONNECT_INIT_S / patched FAST_BACKOFF).
|
||||
"""
|
||||
|
||||
async def test_backoff_resets_on_success(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
call_count = 0
|
||||
|
||||
def hz_client_factory(**kwargs):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count == 1:
|
||||
raise Exception("First attempt fails")
|
||||
return mock_client
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
fetcher._reconnect_backoff_initial = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff_max = FAST_BACKOFF * 10
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=hz_client_factory):
|
||||
fetcher._start_reconnect()
|
||||
|
||||
# Wait for reconnect to succeed
|
||||
deadline = 5.0
|
||||
t0 = time.monotonic()
|
||||
while not fetcher.hz_connected and (time.monotonic() - t0) < deadline:
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
self.assertTrue(fetcher.hz_connected, "Should have reconnected")
|
||||
self.assertAlmostEqual(
|
||||
fetcher._reconnect_backoff,
|
||||
FAST_BACKOFF,
|
||||
delta=1e-9,
|
||||
msg="Backoff must reset to initial value after successful reconnect",
|
||||
)
|
||||
|
||||
await fetcher.disconnect_hz()
|
||||
|
||||
|
||||
class TestFetchReturnsNoneFieldsWhenDisconnected(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_fetch_returns_none_fields_when_disconnected
|
||||
|
||||
When hz_client is None (disconnected), fetch() must return a DataSnapshot
|
||||
with hz_connected=False and all HZ-derived fields as None.
|
||||
"""
|
||||
|
||||
async def test_fetch_returns_none_fields_when_disconnected(self):
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
# Ensure no client is set
|
||||
fetcher.hz_client = None
|
||||
fetcher.hz_connected = False
|
||||
|
||||
# Patch fetch_prefect so we don't need a live Prefect server
|
||||
fetcher.fetch_prefect = AsyncMock(return_value=(False, []))
|
||||
# Patch tail_log so we don't need a real log file
|
||||
fetcher.tail_log = MagicMock(return_value=[])
|
||||
# Prevent reconnect loop from starting during fetch
|
||||
fetcher._start_reconnect = MagicMock()
|
||||
|
||||
snap = await fetcher.fetch()
|
||||
|
||||
self.assertIsInstance(snap, DataSnapshot)
|
||||
self.assertFalse(snap.hz_connected, "hz_connected must be False when disconnected")
|
||||
|
||||
# All HZ-derived numeric/string fields must be None
|
||||
hz_fields = [
|
||||
"scan_number", "vel_div", "w50_velocity", "w750_velocity",
|
||||
"instability_50", "scan_bridge_ts", "scan_age_s",
|
||||
"acb_boost", "acb_beta",
|
||||
"funding_btc", "dvol_btc", "fng", "taker", "vix", "ls_btc",
|
||||
"acb_ready", "acb_present", "exf_age_s",
|
||||
"moon_phase", "mercury_retro", "liquidity_session",
|
||||
"market_cycle_pos", "esof_age_s",
|
||||
"posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5",
|
||||
"capital", "drawdown", "peak_capital", "pnl", "trades",
|
||||
"nautilus_capital", "nautilus_pnl", "nautilus_trades",
|
||||
"nautilus_posture", "nautilus_param_hash",
|
||||
"heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s",
|
||||
"meta_rm", "meta_status",
|
||||
"m1_proc", "m2_heartbeat", "m3_data", "m4_cp", "m5_coh",
|
||||
]
|
||||
for field_name in hz_fields:
|
||||
value = getattr(snap, field_name)
|
||||
self.assertIsNone(
|
||||
value,
|
||||
f"Field '{field_name}' must be None when disconnected, got {value!r}",
|
||||
)
|
||||
|
||||
# Collection fields must be empty
|
||||
self.assertEqual(snap.asset_prices, {}, "asset_prices must be empty dict when disconnected")
|
||||
self.assertEqual(snap.obf_top, [], "obf_top must be empty list when disconnected")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backoff constants sanity check (not a reconnect test, but validates spec)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestBackoffConstants(unittest.TestCase):
|
||||
"""Verify the module-level backoff constants match the spec."""
|
||||
|
||||
def test_reconnect_init_s(self):
|
||||
self.assertEqual(RECONNECT_INIT_S, 5.0, "Initial backoff must be 5 s per spec")
|
||||
|
||||
def test_reconnect_multiplier(self):
|
||||
self.assertEqual(RECONNECT_MULT, 1.5, "Backoff multiplier must be 1.5x per spec")
|
||||
|
||||
def test_reconnect_max_s(self):
|
||||
self.assertEqual(RECONNECT_MAX_S, 60.0, "Max backoff must be 60 s per spec")
|
||||
|
||||
def test_backoff_sequence(self):
|
||||
"""Verify the exponential sequence: 5 → 7.5 → 11.25 → ... capped at 60."""
|
||||
backoff = RECONNECT_INIT_S
|
||||
sequence = [backoff]
|
||||
for _ in range(10):
|
||||
backoff = min(backoff * RECONNECT_MULT, RECONNECT_MAX_S)
|
||||
sequence.append(backoff)
|
||||
|
||||
self.assertAlmostEqual(sequence[0], 5.0)
|
||||
self.assertAlmostEqual(sequence[1], 7.5)
|
||||
self.assertAlmostEqual(sequence[2], 11.25)
|
||||
self.assertTrue(all(v <= RECONNECT_MAX_S for v in sequence))
|
||||
self.assertEqual(sequence[-1], RECONNECT_MAX_S)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
63
Observability/TUI/textual_poc.py
Executable file
63
Observability/TUI/textual_poc.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Minimal Textual proof-of-concept.
|
||||
Run: python3 textual_poc.py
|
||||
Press q to quit.
|
||||
"""
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.widgets import Static, Header, Footer
|
||||
from textual.containers import Horizontal
|
||||
import time
|
||||
|
||||
|
||||
class Box(Static):
|
||||
def on_mount(self) -> None:
|
||||
self.update(self.id or "box")
|
||||
|
||||
|
||||
class PocApp(App):
|
||||
CSS = """
|
||||
Screen { background: #111; }
|
||||
Box {
|
||||
border: solid green;
|
||||
height: 8;
|
||||
content-align: center middle;
|
||||
color: white;
|
||||
}
|
||||
#clock { border: solid cyan; height: 3; }
|
||||
"""
|
||||
|
||||
BINDINGS = [("q", "quit", "Quit")]
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static(id="clock")
|
||||
with Horizontal():
|
||||
yield Box("PANEL A\nstatic text", id="panel_a")
|
||||
yield Box("PANEL B\nstatic text", id="panel_b")
|
||||
yield Box("PANEL C\nstatic text", id="panel_c")
|
||||
yield Static("[green]q=quit[/green] | Textual POC running OK")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self.set_interval(1, self._tick)
|
||||
self._tick()
|
||||
|
||||
def _tick(self) -> None:
|
||||
t = time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime())
|
||||
self.query_one("#clock", Static).update(
|
||||
f"[bold cyan]🐬 DOLPHIN TUI POC[/bold cyan] | {t} | Textual is working"
|
||||
)
|
||||
# Update panels with incrementing counter
|
||||
n = int(time.time()) % 100
|
||||
self.query_one("#panel_a", Box).update(
|
||||
f"[green]PANEL A[/green]\nvalue = {n}\nstatus = OK"
|
||||
)
|
||||
self.query_one("#panel_b", Box).update(
|
||||
f"[yellow]PANEL B[/yellow]\nvalue = {n*2}\nstatus = WARN"
|
||||
)
|
||||
self.query_one("#panel_c", Box).update(
|
||||
f"[red]PANEL C[/red]\nvalue = {n*3}\nstatus = CRIT"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
PocApp().run()
|
||||
773
Observability/dolphin_status.py
Executable file
773
Observability/dolphin_status.py
Executable file
@@ -0,0 +1,773 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN live status — v6
|
||||
0.5s poll. SIG/TRD/FIL gear rows + last-5-trades + CH persistence + V7 exit cmp.
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status.py
|
||||
Quit: Ctrl-C
|
||||
"""
|
||||
# v1–v5 archived as dolphin_status_v{1..5}.py
|
||||
# v6: exit-comparison overlay (V7 preview), net-pnl pct fix
|
||||
|
||||
import json, re, threading, time, sys, urllib.request, urllib.parse
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import hazelcast
|
||||
|
||||
# ── ClickHouse fire-and-forget write ─────────────────────────────────────────
|
||||
_CH_URL = "http://localhost:8123"
|
||||
_CH_USER = "dolphin"
|
||||
_CH_PASS = "dolphin_ch_2026"
|
||||
_CH_Q: deque = deque(maxlen=500)
|
||||
|
||||
def _ch_worker():
|
||||
while True:
|
||||
time.sleep(2)
|
||||
rows = []
|
||||
while _CH_Q:
|
||||
try: rows.append(_CH_Q.popleft())
|
||||
except IndexError: break
|
||||
if not rows: continue
|
||||
body = "\n".join(json.dumps(r) for r in rows).encode()
|
||||
url = f"{_CH_URL}/?database=dolphin&query=INSERT+INTO+status_snapshots+FORMAT+JSONEachRow"
|
||||
req = urllib.request.Request(url, data=body, method="POST")
|
||||
req.add_header("X-ClickHouse-User", _CH_USER)
|
||||
req.add_header("X-ClickHouse-Key", _CH_PASS)
|
||||
req.add_header("Content-Type", "application/octet-stream")
|
||||
try: urllib.request.urlopen(req, timeout=4)
|
||||
except Exception: pass # observability is non-critical
|
||||
|
||||
threading.Thread(target=_ch_worker, daemon=True, name="ch-status").start()
|
||||
|
||||
def ch_put(row: dict):
|
||||
_CH_Q.append(row)
|
||||
|
||||
# ── Trade log parser ──────────────────────────────────────────────────────────
|
||||
_TRADER_LOG = Path("/mnt/dolphinng5_predict/prod/supervisor/logs/nautilus_trader.log")
|
||||
# Capture the JSON dict only — stop at first } closing the payload.
|
||||
# Lines may have a trailing tag like [v2_gold_fix_v50-v750] after the dict.
|
||||
_RE_ENTRY = re.compile(r"\[(.+?)\] ENTRY: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
_RE_EXIT = re.compile(r"\[(.+?)\] EXIT: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
|
||||
def _parse_log_dict(raw: str) -> dict:
|
||||
"""Parse a Python dict repr from a log line. Handles nan and single-quoted strings."""
|
||||
import ast
|
||||
# Replace nan/inf with JSON-safe equivalents before parsing
|
||||
cleaned = raw.replace(": nan", ": null").replace(": inf", ": null").replace(": -inf", ": null")
|
||||
try:
|
||||
return ast.literal_eval(raw) # handles all Python literal forms incl. nan
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
return json.loads(cleaned.replace("'", '"'))
|
||||
except Exception:
|
||||
raise ValueError(f"unparseable: {raw[:80]}")
|
||||
|
||||
def _last_n_trades(n=5):
|
||||
"""Parse last N completed trades from supervisor log. Returns list of dicts."""
|
||||
try:
|
||||
lines = _TRADER_LOG.read_text(errors="replace").splitlines()[-4000:]
|
||||
except Exception:
|
||||
return []
|
||||
entries = {}
|
||||
trades = []
|
||||
for line in lines:
|
||||
m = _RE_ENTRY.search(line)
|
||||
if m:
|
||||
try:
|
||||
d = _parse_log_dict(m.group(2))
|
||||
entries[d["trade_id"]] = {"ts": m.group(1), **d}
|
||||
except Exception:
|
||||
pass
|
||||
m = _RE_EXIT.search(line)
|
||||
if m:
|
||||
try:
|
||||
d = _parse_log_dict(m.group(2))
|
||||
tid = d.get("trade_id")
|
||||
if tid and tid in entries:
|
||||
e = entries.pop(tid)
|
||||
trades.append({**e, "exit_ts": m.group(1),
|
||||
"reason": d.get("reason","?"),
|
||||
"pnl_pct": d.get("pnl_pct", 0),
|
||||
"net_pnl": d.get("net_pnl", 0),
|
||||
"bars_held": d.get("bars_held", 0)})
|
||||
except Exception:
|
||||
pass
|
||||
return trades[-n:]
|
||||
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
ORANGE = "\033[38;5;208m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": ORANGE, "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": ORANGE, "DEAD": RED}
|
||||
|
||||
# Thresholds from nautilus_event_trader.py
|
||||
VEL_DIV_THRESHOLD = -0.020 # signal fires when vel_div < this
|
||||
VEL_DIV_EXTREME = -0.050 # extreme bearish
|
||||
VEL_DIV_WARN = -0.010 # approaching threshold (yellow)
|
||||
VEL_DIV_CLOSE = -0.015 # nearly there (orange→yellow)
|
||||
VOL_P60 = 0.00026414 # BTC 50-bar realised vol p60 — MASTER GATE
|
||||
BTC_VOL_WINDOW = 50 # bars used for vol calc
|
||||
|
||||
FIXED_TP_PCT = 0.0095 # BLUE TP target (0.95%)
|
||||
MAX_HOLD_BARS = 250 # BLUE max hold bars
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
_EXIT_TRACKER: dict = {} # (asset, entry_price) → accumulated V7 comparison state
|
||||
|
||||
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
return "█" * round(v * w) + "░" * (w - round(v * w))
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
# ── Gear items ────────────────────────────────────────────────────────────────
|
||||
# Each returns (label, color, value_str)
|
||||
def _item(label, color, val=""):
|
||||
dot = f"{color}●{RST}"
|
||||
v = f":{val}" if val else ""
|
||||
return f"{dot}{DIM}{label}{v}{RST}"
|
||||
|
||||
def _vel_item(vel_div):
|
||||
"""vel_div colored by distance to threshold (-0.02)."""
|
||||
v = f"{vel_div:+.4f}"
|
||||
if vel_div <= VEL_DIV_EXTREME:
|
||||
return _item("vel_div", GREEN, v) # extremely bearish — great
|
||||
elif vel_div <= VEL_DIV_THRESHOLD:
|
||||
return _item("vel_div", GREEN, v) # past threshold — signal green
|
||||
elif vel_div <= VEL_DIV_CLOSE:
|
||||
return _item("vel_div", YELLOW, v) # -0.015 to -0.020 — close
|
||||
elif vel_div <= VEL_DIV_WARN:
|
||||
return _item("vel_div", ORANGE, v) # -0.010 to -0.015 — approaching
|
||||
elif vel_div < 0:
|
||||
return _item("vel_div", RED, v) # negative but far
|
||||
else:
|
||||
return _item("vel_div", RED, v) # positive — not bearish
|
||||
|
||||
def signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt):
|
||||
"""True if ALL signal preconditions are green."""
|
||||
return (
|
||||
vel_div <= VEL_DIV_THRESHOLD
|
||||
and vol_ok
|
||||
and posture not in ("HIBERNATE", "TURTLE")
|
||||
and acb_ready
|
||||
and exf_ok
|
||||
and not halt
|
||||
)
|
||||
|
||||
def trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, boost):
|
||||
return (
|
||||
open_count == 0 # no open position already
|
||||
and lev < abs_cap # leverage headroom
|
||||
and daily_loss_ok
|
||||
and boost > 0
|
||||
)
|
||||
|
||||
OB_IMBALANCE_BIAS = -0.09 # from engine config: ob_imbalance_bias
|
||||
|
||||
def _best_fill_candidate(obf_universe):
|
||||
"""Pick best SHORT candidate from OBF universe.
|
||||
Criteria: negative imbalance (bearish pressure) + high fill_probability + low spread.
|
||||
Returns (symbol, asset_dict) or (None, {}).
|
||||
"""
|
||||
candidates = []
|
||||
for k, v in obf_universe.items():
|
||||
if not isinstance(v, dict) or "fill_probability" not in v:
|
||||
continue
|
||||
candidates.append((k, v))
|
||||
if not candidates:
|
||||
return None, {}
|
||||
# Score: fill_prob * (1 + bearish_imbalance_bonus) / (1 + spread_bps/10)
|
||||
def score(item):
|
||||
sym, a = item
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
sp = float(a.get("spread_bps", 99))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
# Bearish bias: reward negative imbalance, penalise positive
|
||||
imb_bonus = max(0.0, -imb) # 0..1 for imbalance in [-1,0]
|
||||
return fp * (1 + imb_bonus) * dq / max(0.1, sp)
|
||||
candidates.sort(key=score, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
|
||||
def _update_exit_tracker(positions, eng, live_price, ob_imbalance):
|
||||
"""Accumulate MAE/MFE state for V7 comparison from live OBF prices."""
|
||||
global _EXIT_TRACKER
|
||||
if not positions or live_price <= 0:
|
||||
_EXIT_TRACKER.clear()
|
||||
return None
|
||||
pos = positions[0]
|
||||
asset = pos.get("asset", "?")
|
||||
ep = float(pos.get("entry_price", 0) or 0)
|
||||
side = pos.get("side", "SHORT")
|
||||
if ep <= 0:
|
||||
return None
|
||||
key = (asset, ep)
|
||||
bar_idx = int(eng.get("bar_idx", 0) or 0)
|
||||
if key not in _EXIT_TRACKER:
|
||||
_EXIT_TRACKER.clear()
|
||||
_EXIT_TRACKER[key] = {
|
||||
"entry_bar": bar_idx, "first_seen": time.time(),
|
||||
"peak_adverse": 0.0, "peak_favorable": 0.0,
|
||||
"prev_mae": 0.0, "prev_mfe": 0.0,
|
||||
"mae_velocity": 0.0, "mfe_velocity": 0.0,
|
||||
"prices": deque(maxlen=60),
|
||||
}
|
||||
t = _EXIT_TRACKER[key]
|
||||
t["prices"].append(live_price)
|
||||
t["notional"] = float(pos.get("notional", 0) or 0)
|
||||
t["unrealized_pnl"] = float(pos.get("unrealized_pnl", 0) or 0)
|
||||
if side == "SHORT":
|
||||
pnl = (ep - live_price) / ep
|
||||
mae = max(0.0, (live_price - ep) / ep)
|
||||
mfe = max(0.0, (ep - live_price) / ep)
|
||||
else:
|
||||
pnl = (live_price - ep) / ep
|
||||
mae = max(0.0, (ep - live_price) / ep)
|
||||
mfe = max(0.0, (live_price - ep) / ep)
|
||||
t["peak_adverse"] = max(t["peak_adverse"], mae)
|
||||
t["peak_favorable"] = max(t["peak_favorable"], mfe)
|
||||
t["mae_velocity"] = mae - t["prev_mae"]
|
||||
t["mfe_velocity"] = mfe - t["prev_mfe"]
|
||||
t["prev_mae"] = mae
|
||||
t["prev_mfe"] = mfe
|
||||
t["bars_held"] = max(0, bar_idx - t["entry_bar"])
|
||||
t["pnl_pct"] = pnl
|
||||
t["live_price"] = live_price
|
||||
t["ob_imbalance"] = ob_imbalance
|
||||
t["entry_price"] = ep
|
||||
t["side"] = side
|
||||
t["asset"] = asset
|
||||
return t
|
||||
|
||||
|
||||
def _v7_preview(t):
|
||||
"""Simplified V7 decision from tracker state (MAE/MFE/time channels)."""
|
||||
if not t:
|
||||
return None
|
||||
bh = t.get("bars_held", 0)
|
||||
bf = min(1.0, bh / MAX_HOLD_BARS) if MAX_HOLD_BARS else 0
|
||||
pa = t["peak_adverse"]
|
||||
pf = t["peak_favorable"]
|
||||
mae = t["prev_mae"]
|
||||
mfe = t["prev_mfe"]
|
||||
pnl = t.get("pnl_pct", 0)
|
||||
# MAE risk — V7 floor thresholds (without vol-adaptive since TUI lacks full history)
|
||||
mae_risk = 0.0
|
||||
if pa > 0.005: mae_risk += 0.5
|
||||
if pa > 0.012: mae_risk += 0.8
|
||||
if pa > 0.020: mae_risk += 1.2
|
||||
# MAE-B: adverse acceleration
|
||||
if bh >= 3 and t["mae_velocity"] > 0 and mae > 0.003:
|
||||
mae_risk += 0.6
|
||||
# MAE-D: late-stage time-weighted
|
||||
if mae > 0.003 and bf > 0.60:
|
||||
mae_risk += (bf - 0.60) / 0.40 * 0.4
|
||||
# MFE risk — convexity decay
|
||||
mfe_risk = 0.0
|
||||
decay = (pf - mfe) / (pf + 1e-9) if pf > 0 else 0.0
|
||||
if decay > 0.35 and t["mfe_velocity"] < 0 and pf > 0.01:
|
||||
mfe_risk += 1.5
|
||||
if decay > 0.20:
|
||||
mfe_risk += 0.3
|
||||
# Exit pressure (simplified: MAE + MFE channels weighted as V7)
|
||||
pressure = 2.0 * mae_risk + 2.5 * mfe_risk
|
||||
if bf > 0.80 and pnl < 0:
|
||||
pressure += 0.5
|
||||
if bf > 0.95:
|
||||
pressure += 1.0
|
||||
# Decision (mirrors V7 thresholds)
|
||||
if pressure > 2.0:
|
||||
action = "EXIT"
|
||||
reason = "V7_MAE_SL" if mae_risk > mfe_risk else "V7_COMPOSITE"
|
||||
elif pressure > 1.0:
|
||||
action = "RETRACT"
|
||||
reason = "V7_RISK_DOM"
|
||||
elif pressure < -0.5 and pnl > 0:
|
||||
action = "EXTEND"
|
||||
reason = "V7_DIR_EDGE"
|
||||
else:
|
||||
action = "HOLD"
|
||||
reason = "\u2014"
|
||||
proj_usd = pnl * t.get("notional", 0)
|
||||
return {
|
||||
"action": action, "reason": reason, "pressure": pressure,
|
||||
"mae": pa, "mfe": pf, "mae_risk": mae_risk, "mfe_risk": mfe_risk,
|
||||
"bars_held": bh, "bars_frac": bf, "pnl_pct": pnl,
|
||||
"proj_usd": proj_usd,
|
||||
}
|
||||
|
||||
|
||||
def fill_row(obf_universe, acb, eng):
|
||||
"""Row 3: signal → asset-pick → OBF liquidity → size → ORDER."""
|
||||
f_items = []
|
||||
|
||||
# ── Asset picker (IRP/ARS) ─────────────────────────────────────────────
|
||||
n_assets = int(obf_universe.get("_n_assets", 0) if obf_universe else 0)
|
||||
n_stale = int(obf_universe.get("_n_stale", 0) if obf_universe else 0)
|
||||
n_fresh = n_assets - n_stale
|
||||
|
||||
f_items.append(_item("universe",
|
||||
GREEN if n_fresh >= 200 else (YELLOW if n_fresh >= 50 else RED),
|
||||
f"{n_fresh}/{n_assets}"))
|
||||
|
||||
sym, ab = _best_fill_candidate(obf_universe)
|
||||
if sym:
|
||||
fill_p = float(ab.get("fill_probability", 0))
|
||||
spread = float(ab.get("spread_bps", 99))
|
||||
dq = float(ab.get("depth_quality", 0))
|
||||
imb = float(ab.get("imbalance", 0))
|
||||
depth = float(ab.get("depth_1pct_usd", 0))
|
||||
|
||||
# Best candidate asset
|
||||
asset_color = GREEN if fill_p >= 0.80 else (YELLOW if fill_p >= 0.50 else RED)
|
||||
f_items.append(_item("best", asset_color, sym[:6]))
|
||||
|
||||
# OBF: fill probability
|
||||
f_items.append(_item("fill_p",
|
||||
GREEN if fill_p >= 0.85 else (YELLOW if fill_p >= 0.60 else RED),
|
||||
f"{fill_p:.2f}"))
|
||||
|
||||
# OBF: spread
|
||||
f_items.append(_item("spread",
|
||||
GREEN if spread <= 3 else (YELLOW if spread <= 8 else RED),
|
||||
f"{spread:.1f}bps"))
|
||||
|
||||
# OBF: depth quality
|
||||
f_items.append(_item("depth_q",
|
||||
GREEN if dq >= 0.5 else (YELLOW if dq >= 0.1 else RED),
|
||||
f"{dq:.2f}"))
|
||||
|
||||
# OBF: imbalance direction (SHORT needs bearish = negative)
|
||||
imb_ok = imb < OB_IMBALANCE_BIAS # confirmed bearish pressure
|
||||
f_items.append(_item("imb",
|
||||
GREEN if imb_ok else
|
||||
YELLOW if imb < 0 else
|
||||
ORANGE if imb < 0.1 else RED,
|
||||
f"{imb:+.2f}"))
|
||||
|
||||
# OBF: depth USD
|
||||
f_items.append(_item("depth",
|
||||
GREEN if depth >= 50_000 else (YELLOW if depth >= 10_000 else RED),
|
||||
f"${depth/1000:.0f}k"))
|
||||
|
||||
else:
|
||||
f_items.append(_item("OBF", RED, "no data"))
|
||||
|
||||
# ── Sizing — ACB boost × proxy_B prank ────────────────────────────────
|
||||
# proxy_B prank not exposed in HZ snapshot; show ACB boost as sizing proxy
|
||||
boost = float(acb.get("boost", 1.0) if acb else 1.0)
|
||||
beta = float(acb.get("beta", 0.8) if acb else 0.8)
|
||||
f_items.append(_item("acb_boost",
|
||||
GREEN if boost >= 1.5 else (YELLOW if boost >= 1.0 else ORANGE),
|
||||
f"×{boost:.2f}"))
|
||||
|
||||
f_items.append(_item("beta",
|
||||
GREEN if beta >= 0.7 else (YELLOW if beta >= 0.4 else RED),
|
||||
f"{beta:.2f}"))
|
||||
|
||||
# ── ORDER indicator ────────────────────────────────────────────────────
|
||||
# Would an order fire if signal were green right now?
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_c = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
order_ready = (
|
||||
sym is not None
|
||||
and fill_p >= 0.60
|
||||
and open_count == 0
|
||||
and lev < abs_c
|
||||
and boost > 0
|
||||
) if sym else False
|
||||
|
||||
if order_ready:
|
||||
f_items.append(f" {CYAN}{BOLD}◉ ORDER READY{RST}")
|
||||
else:
|
||||
f_items.append(f" {DIM}(order: waiting){RST}")
|
||||
|
||||
return " ".join(f_items)
|
||||
|
||||
|
||||
def gear_rows(eng, safe, acb, exf, hb, obf_universe=None):
|
||||
"""Return three formatted rows: SIGNAL, TRADE gates, FILL path."""
|
||||
vel_div = float(eng.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(eng.get("vol_ok", False))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
halt = posture in ("HIBERNATE", "TURTLE")
|
||||
|
||||
acb_boost_val = float(acb.get("boost", acb.get("cut", 0)) or 0)
|
||||
acb_ready = acb_boost_val > 0 # cut=0 means blocked
|
||||
exf_ok_count = int(exf.get("_ok_count", 0) if exf else 0)
|
||||
exf_ok = exf_ok_count >= 3
|
||||
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
trades_ex = int(eng.get("trades_executed") or 0)
|
||||
|
||||
hb_ts = hb.get("ts")
|
||||
hb_ok = bool(hb_ts and (time.time() - hb_ts) < 30)
|
||||
|
||||
# ── SIGNAL ROW ────────────────────────────────────────────────────────────
|
||||
# vol_ok is the MASTER GATE — listed first. When False, _try_entry is never
|
||||
# called regardless of vel_div. BTC 50-bar realised vol must exceed p60=0.000264.
|
||||
s_items = []
|
||||
|
||||
# BTC vol — try to get live reading from exf or obf for display context
|
||||
btc_vol_str = "—"
|
||||
if exf:
|
||||
dvol_raw = exf.get("dvol_btc") or exf.get("dvol")
|
||||
fng_raw = exf.get("fng")
|
||||
if dvol_raw:
|
||||
btc_vol_str = f"dV:{float(dvol_raw):.0f}"
|
||||
if fng_raw:
|
||||
btc_vol_str += f" FnG:{float(fng_raw):.0f}"
|
||||
|
||||
vol_label = f"vol_ok({btc_vol_str})"
|
||||
s_items.append(_item(vol_label,
|
||||
GREEN if vol_ok else RED,
|
||||
"✓" if vol_ok else f"✗ BLOCKED"))
|
||||
|
||||
s_items.append(_vel_item(vel_div))
|
||||
|
||||
# posture gate
|
||||
pc = PC.get(posture, DIM)
|
||||
posture_ok = posture in ("APEX", "STALKER")
|
||||
s_items.append(_item("posture",
|
||||
GREEN if posture == "APEX" else (YELLOW if posture == "STALKER" else RED),
|
||||
posture))
|
||||
|
||||
# acb_ready
|
||||
s_items.append(_item("acb",
|
||||
GREEN if acb_ready else (ORANGE if acb_boost_val > 0 else RED),
|
||||
f"{acb_boost_val:.2f}"))
|
||||
|
||||
# exf_ok — external factors pipeline
|
||||
s_items.append(_item("exf",
|
||||
GREEN if exf_ok else (YELLOW if exf_ok_count >= 1 else RED),
|
||||
f"{exf_ok_count}/5"))
|
||||
|
||||
# halt gate
|
||||
s_items.append(_item("no_halt",
|
||||
GREEN if not halt else RED,
|
||||
"✓" if not halt else "HALT"))
|
||||
|
||||
# heartbeat
|
||||
s_items.append(_item("hb",
|
||||
GREEN if hb_ok else RED,
|
||||
_age(hb_ts)))
|
||||
|
||||
# ALL GREEN → fire indicator
|
||||
all_sig = signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt)
|
||||
if all_sig:
|
||||
s_items.append(f" {GREEN}{BOLD}◉ SIGNAL{RST}")
|
||||
|
||||
# ── TRADE ROW ─────────────────────────────────────────────────────────────
|
||||
# Additional gates that must pass before a matched signal becomes a fill
|
||||
t_items = []
|
||||
|
||||
# open positions
|
||||
t_items.append(_item("open_pos",
|
||||
GREEN if open_count == 0 else ORANGE,
|
||||
str(open_count)))
|
||||
|
||||
# leverage headroom
|
||||
lev_pct = lev / abs_cap if abs_cap else 0
|
||||
t_items.append(_item("lev",
|
||||
GREEN if lev_pct < 0.3 else (YELLOW if lev_pct < 0.7 else RED),
|
||||
f"{lev:.2f}x/{abs_cap:.0f}"))
|
||||
|
||||
# regime_dd_halt
|
||||
t_items.append(_item("regime",
|
||||
GREEN if not halt else RED,
|
||||
"free" if not halt else "HALTED"))
|
||||
|
||||
# Rm strength
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
t_items.append(_item("Rm",
|
||||
GREEN if rm >= 0.90 else (YELLOW if rm >= 0.70 else (ORANGE if rm >= 0.50 else RED)),
|
||||
f"{rm:.3f}"))
|
||||
|
||||
# Cat5 (intraday drawdown contribution)
|
||||
c5 = float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0)
|
||||
t_items.append(_item("Cat5",
|
||||
GREEN if c5 >= 0.95 else (YELLOW if c5 >= 0.85 else (ORANGE if c5 >= 0.70 else RED)),
|
||||
f"{c5:.3f}"))
|
||||
|
||||
# trades today
|
||||
t_items.append(_item("trades",
|
||||
GREEN if trades_ex < 20 else (YELLOW if trades_ex < 35 else ORANGE),
|
||||
str(trades_ex)))
|
||||
|
||||
# ALL GREEN trade execute indicator
|
||||
daily_loss_ok = c5 > 0.50 # reasonable proxy — Cat5 tracks drawdown
|
||||
all_trade = all_sig and trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, acb_boost_val)
|
||||
if all_trade:
|
||||
t_items.append(f" {CYAN}{BOLD}◉ TRADE{RST}")
|
||||
|
||||
sig_row = " ".join(s_items)
|
||||
trade_row = " ".join(t_items)
|
||||
fill = fill_row(obf_universe or {}, acb, eng)
|
||||
return sig_row, trade_row, fill
|
||||
|
||||
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, "DOLPHIN_STATE_BLUE", "engine_snapshot")
|
||||
cap = _get(hz, "DOLPHIN_STATE_BLUE", "capital_checkpoint")
|
||||
safe = _get(hz, "DOLPHIN_SAFETY", "latest")
|
||||
hb = _get(hz, "DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, "DOLPHIN_META_HEALTH", "latest")
|
||||
acb = _get(hz, "DOLPHIN_FEATURES", "acb_boost")
|
||||
exf = _get(hz, "DOLPHIN_FEATURES", "exf_latest")
|
||||
obf = _get(hz, "DOLPHIN_FEATURES", "obf_universe_latest")
|
||||
esof = _get(hz, "DOLPHIN_FEATURES", "esof_advisor_latest")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0))
|
||||
hb_ts = hb.get("ts")
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trades = eng.get("trades_executed", "—")
|
||||
scans = eng.get("scans_processed", "—")
|
||||
lev = float(eng.get("current_leverage", 0))
|
||||
notional= float(eng.get("open_notional", 0))
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0))
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None: START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK: CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = GREEN if trader_up else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
sig_row, trade_row, fill_row_str = gear_rows(eng, safe, acb, exf, hb, obf)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
L = []
|
||||
L.append(f"{BOLD}{CYAN}🐬 DOLPHIN-NAUTILUS{RST} {BOLD}v6{RST} {DIM}{now}{RST}")
|
||||
L.append("─" * 65)
|
||||
|
||||
# TRADER
|
||||
L.append(f"{BOLD}TRADER{RST} {tc}{'● LIVE' if trader_up else '● DOWN'}{RST}"
|
||||
f" phase:{phase} hb:{_age(hb_ts)}"
|
||||
f" scan:#{eng.get('last_scan_number','?')}")
|
||||
|
||||
# ── SIGNAL → FILL GEARS ───────────────────────────────────────────────────
|
||||
vol_ok_live = bool(eng.get("vol_ok", False))
|
||||
if not vol_ok_live:
|
||||
L.append(f" {RED}{BOLD}⛔ VOL_OK=FALSE — engine gate closed, NO trades until BTC vol > {VOL_P60:.6f}{RST}")
|
||||
L.append(f" {DIM}SIG │{RST} {sig_row}")
|
||||
L.append(f" {DIM}TRD │{RST} {trade_row}")
|
||||
L.append(f" {DIM}FIL │{RST} {fill_row_str}")
|
||||
|
||||
# ── EsoF ADVISORY ─────────────────────────────────────────────────────────
|
||||
if esof:
|
||||
_ec = {
|
||||
"FAVORABLE": GREEN, "MILD_POSITIVE": "\033[92m",
|
||||
"NEUTRAL": YELLOW, "MILD_NEGATIVE": "\033[91m",
|
||||
"UNFAVORABLE": RED,
|
||||
}
|
||||
_lbl = esof.get("advisory_label", "?")
|
||||
_col = _ec.get(_lbl, DIM)
|
||||
_sc = float(esof.get("advisory_score", 0))
|
||||
_sess = esof.get("session", "?")
|
||||
_dow = esof.get("dow_name", "?")
|
||||
_slot = esof.get("slot_15m", "?")
|
||||
_swr = esof.get("session_wr_pct", 0)
|
||||
_dwr = esof.get("dow_wr_pct", 0)
|
||||
_moon = esof.get("moon_phase", "?")[:8]
|
||||
_retro= " ☿RETRO" if esof.get("mercury_retrograde") else ""
|
||||
L.append(f" {DIM}EsoF│{RST} {_col}{_lbl:<13}{RST} sc:{_col}{_sc:+.3f}{RST} "
|
||||
f"sess:{_sess}({_swr:.0f}%) "
|
||||
f"dow:{_dow}({_dwr:.0f}%) "
|
||||
f"slot:{_slot} {DIM}{_moon}{_retro}{RST}")
|
||||
else:
|
||||
L.append(f" {DIM}EsoF│ (start esof_advisor.py for advisory){RST}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# CAPITAL
|
||||
L.append(f"{BOLD}CAPITAL{RST} {CYAN}${capital:,.2f}{RST}"
|
||||
+ (f" ROI:{roi_c}{roi:+.2f}%{RST} DD:{dd_c}{dd:.2f}%{RST}"
|
||||
f" start:${START_CAP:,.0f}" if START_CAP else ""))
|
||||
L.append(f" trades:{trades} scans:{scans} bar:{eng.get('bar_idx','?')}"
|
||||
f" lev:{lev:.2f}x notional:${notional:,.0f}")
|
||||
|
||||
# Open positions + EXIT COMPARISON
|
||||
positions = eng.get("open_positions") or []
|
||||
if positions:
|
||||
_pa = positions[0].get("asset", "")
|
||||
_lp = 0.0; _obi = 0.0
|
||||
if _pa and obf:
|
||||
_oad = obf.get(_pa, {})
|
||||
_ob_bid = float(_oad.get("best_bid", 0) or 0)
|
||||
_ob_ask = float(_oad.get("best_ask", 0) or 0)
|
||||
_lp = (_ob_bid + _ob_ask) / 2 if _ob_bid > 0 and _ob_ask > 0 else 0
|
||||
_obi = float(_oad.get("imbalance", 0) or 0)
|
||||
L.append(f" {BOLD}OPEN:{RST}")
|
||||
for p in positions:
|
||||
sc2 = GREEN if p.get("side") == "LONG" else RED
|
||||
L.append(f" {sc2}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" upnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
# ── EXIT COMPARISON: base engine vs V7 ──────────────────────────────
|
||||
_etr = _update_exit_tracker(positions, eng, _lp, _obi)
|
||||
_v7p = _v7_preview(_etr)
|
||||
if _v7p:
|
||||
bh = _v7p["bars_held"]
|
||||
bf = _v7p["bars_frac"]
|
||||
pp = _v7p["pnl_pct"]
|
||||
tp_pct = abs(pp) / FIXED_TP_PCT * 100 if FIXED_TP_PCT else 0
|
||||
bf_bar = round(bf * 20)
|
||||
bc = GREEN if bf < 0.6 else (YELLOW if bf < 0.85 else RED)
|
||||
tc = GREEN if pp >= FIXED_TP_PCT else (YELLOW if tp_pct > 50 else DIM)
|
||||
vc = RED if _v7p["action"] == "EXIT" else (YELLOW if _v7p["action"] == "RETRACT" else GREEN)
|
||||
ps = "+" if _v7p["proj_usd"] >= 0 else ""
|
||||
L.append(f" {BOLD}EXIT CMP{RST} {DIM}bar{RST} {bc}{bh}/{MAX_HOLD_BARS} [{'\u2588'*bf_bar+'\u2591'*(20-bf_bar)}]{RST} {bf*100:.0f}%"
|
||||
f" {DIM}TP{RST} {tc}{abs(pp)*100:.3f}%/{FIXED_TP_PCT*100:.2f}% ({tp_pct:.0f}%){RST}")
|
||||
L.append(f" {vc}V7:{_v7p['action']:<8}{RST} P={_v7p['pressure']:.2f}"
|
||||
f" mae:{_v7p['mae']:.4f} mfe:{_v7p['mfe']:.4f}"
|
||||
f" {DIM}\u2192{RST} {ps}${_v7p['proj_usd']:.2f} ({pp*100:+.3f}%)"
|
||||
f" {DIM}[{_v7p['reason']}]{RST}")
|
||||
else:
|
||||
L.append(f" {DIM}no open positions{RST}")
|
||||
_EXIT_TRACKER.clear()
|
||||
|
||||
L.append("")
|
||||
|
||||
# POSTURE
|
||||
bd = safe.get("breakdown") or {}
|
||||
L.append(f"{BOLD}POSTURE{RST} {pc}{posture}{RST} Rm:{pc}{_bar(rm,20)}{RST} {rm:.4f}")
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}',0)):.2f}" for i in range(1,6))
|
||||
L.append(f" {cats} f_env:{float(bd.get('f_env',0)):.3f} f_exe:{float(bd.get('f_exe',0)):.3f}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# SYS HEALTH
|
||||
L.append(f"{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST} rm_meta:{rm_meta:.3f}")
|
||||
for m in ("m1_data_infra","m1_trader","m2_heartbeat",
|
||||
"m3_data_freshness","m4_control_plane","m5_coherence"):
|
||||
v = float(mh.get(m, 0))
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
L.append(f" {c}{m}:{v:.3f}{RST}")
|
||||
|
||||
L.append(f" {DIM}services:{RST} "
|
||||
+ " ".join(
|
||||
f"{'●' if st=='RUNNING' else f'{RED}●{RST}'}{DIM}{n.split(':')[-1]}{RST}"
|
||||
if st == "RUNNING" else
|
||||
f"{RED}●{DIM}{n.split(':')[-1]}{RST}"
|
||||
for n, st in sorted(svc.items())))
|
||||
|
||||
L.append(f" {DIM}hz_keys:{RST} "
|
||||
+ " ".join(
|
||||
f"{GREEN if float(i.get('score',0))>=0.9 else (YELLOW if float(i.get('score',0))>=0.5 else RED)}●{RST}{DIM}{k}{RST}"
|
||||
for k, i in sorted(hz_ks.items())))
|
||||
|
||||
# ── LAST TRADES ──────────────────────────────────────────────────────────
|
||||
trades_hist = _last_n_trades(30)
|
||||
if trades_hist:
|
||||
L.append("")
|
||||
L.append(f"{BOLD}LAST TRADES{RST} {DIM}(from log){RST}")
|
||||
for t in trades_hist:
|
||||
pnl = float(t.get("net_pnl", 0))
|
||||
_not = float(t.get("notional", 0))
|
||||
pct = (pnl / _not * 100) if _not else float(t.get("pnl_pct", 0)) * 100
|
||||
lev = float(t.get("leverage", 0))
|
||||
ep = float(t.get("entry_price", 0))
|
||||
reason = t.get("reason", "?")
|
||||
asset = t.get("asset", "?")
|
||||
bars = t.get("bars_held", 0)
|
||||
ts_raw = t.get("ts", "")[:16].replace("T", " ")
|
||||
pc2 = GREEN if pnl >= 0 else RED
|
||||
L.append(
|
||||
f" {pc2}{'▲' if pnl>=0 else '▼'}{RST}"
|
||||
f" {asset:<12} "
|
||||
f"ep:{ep:.4g} "
|
||||
f"lev:{lev:.2f}x "
|
||||
f"pnl:{pc2}{pnl:+.2f}({pct:+.2f}%){RST} "
|
||||
f"exit:{reason} bars:{bars} {DIM}{ts_raw}{RST}"
|
||||
)
|
||||
else:
|
||||
L.append(f" {DIM}no completed trades in log yet{RST}")
|
||||
|
||||
L.append("")
|
||||
L.append(f"{DIM}v6 • 0.5s poll • CH→status_snapshots • Ctrl-C quit{RST}")
|
||||
|
||||
# ── CH persistence ─────────────────────────────────────────────────────────
|
||||
# Write every other cycle (1s effective rate) to avoid CH noise
|
||||
if int(time.time() * 2) % 2 == 0:
|
||||
ch_put({
|
||||
"ts": int(time.time() * 1000),
|
||||
"capital": capital,
|
||||
"roi_pct": round(roi, 4),
|
||||
"dd_pct": round(dd, 4),
|
||||
"trades_executed": int(eng.get("trades_executed", 0) or 0),
|
||||
"posture": posture,
|
||||
"rm": round(rm, 6),
|
||||
"vel_div": round(float(eng.get("last_vel_div", 0) or 0), 6),
|
||||
"vol_ok": 1 if eng.get("vol_ok") else 0,
|
||||
"phase": phase,
|
||||
"mhs_status": mhs_st,
|
||||
"boost": round(float(acb.get("boost", 1.0) if acb else 1.0), 4),
|
||||
"cat5": round(float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0), 6),
|
||||
})
|
||||
|
||||
return "\n".join(L)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected.\n")
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
sys.stdout.write(CLEAR + render(hz) + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(0.5)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
663
Observability/dolphin_status_green.py
Executable file
663
Observability/dolphin_status_green.py
Executable file
@@ -0,0 +1,663 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN GREEN live status — v1
|
||||
WHITE-on-DARK theme. Reads GREEN-specific HZ maps + shared infrastructure maps.
|
||||
Parses /tmp/green_launch.log for live engine state (LATENCY scan lines).
|
||||
|
||||
Data source mapping:
|
||||
GREEN-unique : DOLPHIN_STATE_GREEN, DOLPHIN_PNL_GREEN, /tmp/green_launch.log
|
||||
Shared : DOLPHIN_SAFETY, DOLPHIN_META_HEALTH, DOLPHIN_FEATURES, DOLPHIN_HEARTBEAT
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status_green.py
|
||||
Quit: Ctrl-C
|
||||
"""
|
||||
|
||||
import json, math, os, re, threading, time, sys, urllib.request, urllib.parse
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import hazelcast
|
||||
|
||||
# ── GREEN configuration ─────────────────────────────────────────────────────────
|
||||
STRATEGY = "green"
|
||||
STATE_MAP = "DOLPHIN_STATE_GREEN"
|
||||
PNL_MAP = "DOLPHIN_PNL_GREEN"
|
||||
SAFETY_MAP = "DOLPHIN_SAFETY" # shared
|
||||
META_MAP = "DOLPHIN_META_HEALTH" # shared
|
||||
FEAT_MAP = "DOLPHIN_FEATURES" # shared
|
||||
HB_MAP = "DOLPHIN_HEARTBEAT" # shared (BLUE heartbeat — reference only)
|
||||
GREEN_LOG = Path("/tmp/green_launch.log")
|
||||
|
||||
# ── ClickHouse fire-and-forget write ─────────────────────────────────────────
|
||||
_CH_URL = "http://localhost:8123"
|
||||
_CH_USER = "dolphin"
|
||||
_CH_PASS = "dolphin_ch_2026"
|
||||
_CH_Q: deque = deque(maxlen=500)
|
||||
|
||||
def _ch_worker():
|
||||
while True:
|
||||
time.sleep(2)
|
||||
rows = []
|
||||
while _CH_Q:
|
||||
try: rows.append(_CH_Q.popleft())
|
||||
except IndexError: break
|
||||
if not rows: continue
|
||||
body = "\n".join(json.dumps(r) for r in rows).encode()
|
||||
url = f"{_CH_URL}/?database=dolphin&query=INSERT+INTO+status_snapshots+FORMAT+JSONEachRow"
|
||||
req = urllib.request.Request(url, data=body, method="POST")
|
||||
req.add_header("X-ClickHouse-User", _CH_USER)
|
||||
req.add_header("X-ClickHouse-Key", _CH_PASS)
|
||||
req.add_header("Content-Type", "application/octet-stream")
|
||||
try: urllib.request.urlopen(req, timeout=4)
|
||||
except Exception: pass
|
||||
|
||||
threading.Thread(target=_ch_worker, daemon=True, name="ch-status-green").start()
|
||||
|
||||
def ch_put(row: dict):
|
||||
_CH_Q.append(row)
|
||||
|
||||
# ── ANSI theme ──────────────────────────────────────────────────────────────────
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
|
||||
# GREEN TUI theme — bright white primary on dark green header
|
||||
BW = "\033[97m" # bright white (replaces CYAN from BLUE TUI)
|
||||
DG = "\033[38;5;46m" # bright green accent
|
||||
BG_HDR = "\033[48;5;22m" # dark green background for header banner
|
||||
LG = "\033[38;5;82m" # light green
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
ORANGE = "\033[38;5;208m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": ORANGE, "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": ORANGE, "DEAD": RED}
|
||||
|
||||
# Thresholds (same algorithm — shared with BLUE)
|
||||
VEL_DIV_THRESHOLD = -0.020
|
||||
VEL_DIV_EXTREME = -0.050
|
||||
VEL_DIV_WARN = -0.010
|
||||
VEL_DIV_CLOSE = -0.015
|
||||
VOL_P60 = 0.00026414
|
||||
BTC_VOL_WINDOW = 50
|
||||
FIXED_TP_PCT = 0.0095
|
||||
MAX_HOLD_BARS = 250
|
||||
OB_IMBALANCE_BIAS = -0.09
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
# ── Log parsing ─────────────────────────────────────────────────────────────────
|
||||
_RE_ANSI = re.compile(r'\x1b\[[0-9;]*m')
|
||||
_RE_LATENCY = re.compile(
|
||||
r"LATENCY scan #(\d+): bar=(\d+) vel_div=([-\d.]+) step_bar=([\d.]+)ms vol_ok=(\w+)"
|
||||
)
|
||||
_RE_TS = re.compile(r"^(\S+Z)\s+")
|
||||
_RE_ENTRY = re.compile(r"ENTRY: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
_RE_EXIT = re.compile(r"EXIT: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
|
||||
def _strip_ansi(s: str) -> str:
|
||||
return _RE_ANSI.sub('', s)
|
||||
|
||||
|
||||
def _parse_log_dict(raw: str) -> dict:
|
||||
import ast
|
||||
cleaned = raw.replace(": nan", ": null").replace(": inf", ": null").replace(": -inf", ": null")
|
||||
try:
|
||||
return ast.literal_eval(raw)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
return json.loads(cleaned.replace("'", '"'))
|
||||
except Exception:
|
||||
raise ValueError(f"unparseable: {raw[:80]}")
|
||||
|
||||
|
||||
def _parse_green_state(n_lines=600):
|
||||
"""Parse last N lines of GREEN log for live engine state + trade history."""
|
||||
try:
|
||||
raw_lines = GREEN_LOG.read_text(errors="replace").splitlines()[-n_lines:]
|
||||
except Exception:
|
||||
return {}, [], []
|
||||
|
||||
lines = [_strip_ansi(l) for l in raw_lines]
|
||||
|
||||
state = {}
|
||||
for line in lines:
|
||||
m = _RE_LATENCY.search(line)
|
||||
if m:
|
||||
ts_m = _RE_TS.match(line)
|
||||
state = {
|
||||
"last_scan_number": int(m.group(1)),
|
||||
"bar_idx": int(m.group(2)),
|
||||
"last_vel_div": float(m.group(3)),
|
||||
"step_bar_ms": float(m.group(4)),
|
||||
"vol_ok": m.group(5) == "True",
|
||||
"last_ts": ts_m.group(1) if ts_m else "",
|
||||
}
|
||||
|
||||
entries = {}
|
||||
trades = []
|
||||
for line in lines:
|
||||
m = _RE_ENTRY.search(line)
|
||||
if m:
|
||||
ts_m = _RE_TS.match(line)
|
||||
try:
|
||||
d = _parse_log_dict(m.group(1))
|
||||
entries[d["trade_id"]] = {"ts": ts_m.group(1) if ts_m else "", **d}
|
||||
except Exception:
|
||||
pass
|
||||
m = _RE_EXIT.search(line)
|
||||
if m:
|
||||
ts_m = _RE_TS.match(line)
|
||||
try:
|
||||
d = _parse_log_dict(m.group(1))
|
||||
tid = d.get("trade_id")
|
||||
if tid and tid in entries:
|
||||
e = entries.pop(tid)
|
||||
trades.append({**e, "exit_ts": ts_m.group(1) if ts_m else "",
|
||||
"reason": d.get("reason", "?"),
|
||||
"pnl_pct": d.get("pnl_pct", 0),
|
||||
"net_pnl": d.get("net_pnl", 0),
|
||||
"bars_held": d.get("bars_held", 0)})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
errors = [l for l in lines if re.search(r'\[ERROR\]|\[WARNING\]|\[STALE', l)][-5:]
|
||||
|
||||
return state, trades[-5:], errors[-3:]
|
||||
|
||||
|
||||
# ── Helpers ─────────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
if isinstance(ts, str):
|
||||
try:
|
||||
dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
||||
s = time.time() - dt.timestamp()
|
||||
except Exception:
|
||||
return "?"
|
||||
else:
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
|
||||
def _age_seconds(ts_str):
|
||||
try:
|
||||
dt = datetime.fromisoformat(ts_str.replace("Z", "+00:00"))
|
||||
return time.time() - dt.timestamp()
|
||||
except Exception:
|
||||
return 9999
|
||||
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
return "\u2588" * round(v * w) + "\u2591" * (w - round(v * w))
|
||||
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _item(label, color, val=""):
|
||||
dot = f"{color}\u25cf{RST}"
|
||||
v = f":{val}" if val else ""
|
||||
return f"{dot}{DIM}{label}{v}{RST}"
|
||||
|
||||
|
||||
def _vel_item(vel_div):
|
||||
v = f"{vel_div:+.4f}"
|
||||
if vel_div <= VEL_DIV_EXTREME:
|
||||
return _item("vel_div", GREEN, v)
|
||||
elif vel_div <= VEL_DIV_THRESHOLD:
|
||||
return _item("vel_div", GREEN, v)
|
||||
elif vel_div <= VEL_DIV_CLOSE:
|
||||
return _item("vel_div", YELLOW, v)
|
||||
elif vel_div <= VEL_DIV_WARN:
|
||||
return _item("vel_div", ORANGE, v)
|
||||
elif vel_div < 0:
|
||||
return _item("vel_div", RED, v)
|
||||
else:
|
||||
return _item("vel_div", RED, v)
|
||||
|
||||
|
||||
def signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt):
|
||||
return (
|
||||
vel_div <= VEL_DIV_THRESHOLD
|
||||
and vol_ok
|
||||
and posture not in ("HIBERNATE", "TURTLE")
|
||||
and acb_ready
|
||||
and exf_ok
|
||||
and not halt
|
||||
)
|
||||
|
||||
|
||||
def trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, boost):
|
||||
return (
|
||||
open_count == 0
|
||||
and lev < abs_cap
|
||||
and daily_loss_ok
|
||||
and boost > 0
|
||||
)
|
||||
|
||||
|
||||
def _best_fill_candidate(obf_universe):
|
||||
candidates = []
|
||||
for k, v in obf_universe.items():
|
||||
if not isinstance(v, dict) or "fill_probability" not in v:
|
||||
continue
|
||||
candidates.append((k, v))
|
||||
if not candidates:
|
||||
return None, {}
|
||||
def score(item):
|
||||
sym, a = item
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
sp = float(a.get("spread_bps", 99))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_bonus = max(0.0, -imb)
|
||||
return fp * (1 + imb_bonus) * dq / max(0.1, sp)
|
||||
candidates.sort(key=score, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
|
||||
# ── Gear rows ──────────────────────────────────────────────────────────────────
|
||||
def gear_rows(gstate, safe, acb, exf, hb, obf_universe=None):
|
||||
vel_div = float(gstate.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(gstate.get("vol_ok", False))
|
||||
posture = safe.get("posture") or gstate.get("posture") or "?"
|
||||
halt = posture in ("HIBERNATE", "TURTLE")
|
||||
|
||||
acb_boost_val = float(acb.get("boost", acb.get("cut", 0)) or 0)
|
||||
acb_ready = acb_boost_val > 0
|
||||
exf_ok_count = int(exf.get("_ok_count", 0) if exf else 0)
|
||||
exf_ok = exf_ok_count >= 3
|
||||
|
||||
hb_ts = hb.get("ts")
|
||||
hb_ok = bool(hb_ts and (time.time() - hb_ts) < 30)
|
||||
|
||||
# ── SIGNAL ROW ────────────────────────────────────────────────────────────
|
||||
s_items = []
|
||||
|
||||
btc_vol_str = "\u2014"
|
||||
if exf:
|
||||
dvol_raw = exf.get("dvol_btc") or exf.get("dvol")
|
||||
fng_raw = exf.get("fng")
|
||||
if dvol_raw:
|
||||
btc_vol_str = f"dV:{float(dvol_raw):.0f}"
|
||||
if fng_raw:
|
||||
btc_vol_str += f" FnG:{float(fng_raw):.0f}"
|
||||
|
||||
vol_label = f"vol_ok({btc_vol_str})"
|
||||
s_items.append(_item(vol_label,
|
||||
GREEN if vol_ok else RED,
|
||||
"\u2713" if vol_ok else "\u2717 BLOCKED"))
|
||||
|
||||
s_items.append(_vel_item(vel_div))
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
s_items.append(_item("posture",
|
||||
GREEN if posture == "APEX" else (YELLOW if posture == "STALKER" else RED),
|
||||
posture))
|
||||
|
||||
s_items.append(_item("acb",
|
||||
GREEN if acb_ready else (ORANGE if acb_boost_val > 0 else RED),
|
||||
f"{acb_boost_val:.2f}"))
|
||||
|
||||
s_items.append(_item("exf",
|
||||
GREEN if exf_ok else (YELLOW if exf_ok_count >= 1 else RED),
|
||||
f"{exf_ok_count}/5"))
|
||||
|
||||
s_items.append(_item("no_halt",
|
||||
GREEN if not halt else RED,
|
||||
"\u2713" if not halt else "HALT"))
|
||||
|
||||
s_items.append(_item("hb",
|
||||
GREEN if hb_ok else RED,
|
||||
_age(hb_ts)))
|
||||
|
||||
all_sig = signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt)
|
||||
if all_sig:
|
||||
s_items.append(f" {DG}{BOLD}\u25c9 SIGNAL{RST}")
|
||||
|
||||
# ── TRADE ROW ─────────────────────────────────────────────────────────────
|
||||
t_items = []
|
||||
|
||||
t_items.append(_item("paper", DG, "Nautilus"))
|
||||
|
||||
t_items.append(_item("regime",
|
||||
GREEN if not halt else RED,
|
||||
"free" if not halt else "HALTED"))
|
||||
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
t_items.append(_item("Rm",
|
||||
GREEN if rm >= 0.90 else (YELLOW if rm >= 0.70 else (ORANGE if rm >= 0.50 else RED)),
|
||||
f"{rm:.3f}"))
|
||||
|
||||
c5 = float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0)
|
||||
t_items.append(_item("Cat5",
|
||||
GREEN if c5 >= 0.95 else (YELLOW if c5 >= 0.85 else (ORANGE if c5 >= 0.70 else RED)),
|
||||
f"{c5:.3f}"))
|
||||
|
||||
bar_idx = int(gstate.get("bar_idx", 0) or 0)
|
||||
t_items.append(_item("bar", DIM, str(bar_idx)))
|
||||
|
||||
all_trade = all_sig and trade_can_execute(0, 0.0, 8.0, c5 > 0.50, acb_boost_val)
|
||||
if all_trade:
|
||||
t_items.append(f" {DG}{BOLD}\u25c9 TRADE{RST}")
|
||||
|
||||
sig_row = " ".join(s_items)
|
||||
trade_row = " ".join(t_items)
|
||||
|
||||
# ── FILL ROW ──────────────────────────────────────────────────────────────
|
||||
f_items = []
|
||||
|
||||
n_assets = int(obf_universe.get("_n_assets", 0) if obf_universe else 0)
|
||||
n_stale = int(obf_universe.get("_n_stale", 0) if obf_universe else 0)
|
||||
n_fresh = n_assets - n_stale
|
||||
|
||||
f_items.append(_item("universe",
|
||||
GREEN if n_fresh >= 200 else (YELLOW if n_fresh >= 50 else RED),
|
||||
f"{n_fresh}/{n_assets}"))
|
||||
|
||||
sym, ab = _best_fill_candidate(obf_universe or {})
|
||||
if sym:
|
||||
fill_p = float(ab.get("fill_probability", 0))
|
||||
spread = float(ab.get("spread_bps", 99))
|
||||
dq = float(ab.get("depth_quality", 0))
|
||||
imb = float(ab.get("imbalance", 0))
|
||||
depth = float(ab.get("depth_1pct_usd", 0))
|
||||
asset_color = GREEN if fill_p >= 0.80 else (YELLOW if fill_p >= 0.50 else RED)
|
||||
f_items.append(_item("best", asset_color, sym[:6]))
|
||||
f_items.append(_item("fill_p",
|
||||
GREEN if fill_p >= 0.85 else (YELLOW if fill_p >= 0.60 else RED),
|
||||
f"{fill_p:.2f}"))
|
||||
f_items.append(_item("spread",
|
||||
GREEN if spread <= 3 else (YELLOW if spread <= 8 else RED),
|
||||
f"{spread:.1f}bps"))
|
||||
f_items.append(_item("depth_q",
|
||||
GREEN if dq >= 0.5 else (YELLOW if dq >= 0.1 else RED),
|
||||
f"{dq:.2f}"))
|
||||
imb_ok = imb < OB_IMBALANCE_BIAS
|
||||
f_items.append(_item("imb",
|
||||
GREEN if imb_ok else
|
||||
YELLOW if imb < 0 else
|
||||
ORANGE if imb < 0.1 else RED,
|
||||
f"{imb:+.2f}"))
|
||||
f_items.append(_item("depth",
|
||||
GREEN if depth >= 50_000 else (YELLOW if depth >= 10_000 else RED),
|
||||
f"${depth/1000:.0f}k"))
|
||||
else:
|
||||
f_items.append(_item("OBF", RED, "no data"))
|
||||
|
||||
boost = float(acb.get("boost", 1.0) if acb else 1.0)
|
||||
beta = float(acb.get("beta", 0.8) if acb else 0.8)
|
||||
f_items.append(_item("acb_boost",
|
||||
GREEN if boost >= 1.5 else (YELLOW if boost >= 1.0 else ORANGE),
|
||||
f"\u00d7{boost:.2f}"))
|
||||
f_items.append(_item("beta",
|
||||
GREEN if beta >= 0.7 else (YELLOW if beta >= 0.4 else RED),
|
||||
f"{beta:.2f}"))
|
||||
|
||||
f_items.append(f" {DIM}(paper: Nautilus internal){RST}")
|
||||
|
||||
fill = " ".join(f_items)
|
||||
return sig_row, trade_row, fill
|
||||
|
||||
|
||||
# ── Render ──────────────────────────────────────────────────────────────────────
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, STATE_MAP, "engine_snapshot")
|
||||
cap = _get(hz, STATE_MAP, "capital_checkpoint")
|
||||
safe = _get(hz, SAFETY_MAP, "latest")
|
||||
hb = _get(hz, HB_MAP, "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, META_MAP, "latest")
|
||||
acb = _get(hz, FEAT_MAP, "acb_boost")
|
||||
exf = _get(hz, FEAT_MAP, "exf_latest")
|
||||
obf = _get(hz, FEAT_MAP, "obf_universe_latest")
|
||||
|
||||
# Log parsing for liveness + trade history (engine_snapshot is primary for state)
|
||||
glog_state, trades_hist, log_errors = _parse_green_state(600)
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
hb_ts = hb.get("ts")
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
scans = eng.get("scans_processed", "\u2014")
|
||||
bar_idx = eng.get("bar_idx", "\u2014")
|
||||
vel_div = float(eng.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(eng.get("vol_ok", False))
|
||||
trades_ex = int(eng.get("trades_executed", 0) or 0)
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
notional= float(eng.get("open_notional", 0) or 0)
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0) or 0)
|
||||
snap_ts = eng.get("timestamp", "")
|
||||
|
||||
# Liveness from log (engine_snapshot may lag a few seconds)
|
||||
log_ts_str = glog_state.get("last_ts", "")
|
||||
log_age = _age(log_ts_str) if log_ts_str else "?"
|
||||
green_alive = log_ts_str and _age_seconds(log_ts_str) < 30
|
||||
step_ms = float(glog_state.get("step_bar_ms", 0) or 0)
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None: START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK: CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = DG if green_alive else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
sig_row, trade_row, fill_row_str = gear_rows(eng, safe, acb, exf, hb, obf)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
L = []
|
||||
|
||||
# HEADER — bright white on dark green background
|
||||
L.append(
|
||||
f"{BG_HDR}{BW}{BOLD} \U0001f42c DOLPHIN-GREEN {RST}"
|
||||
f" {BW}{BOLD}v1{RST} {DIM}{now}{RST} {DG}\u25cf STRATEGY=green{RST}"
|
||||
)
|
||||
L.append("\u2501" * 70)
|
||||
|
||||
# GREEN PROCESS
|
||||
L.append(
|
||||
f"{BW}{BOLD}GREEN{RST} {tc}{'\u25cf LIVE' if green_alive else '\u25cf DOWN'}{RST}"
|
||||
f" log_age:{log_age}"
|
||||
f" scan:#{scans}"
|
||||
f" step:{step_ms:.1f}ms"
|
||||
)
|
||||
L.append(f" {DIM}[shared] BLUE hb:{_age(hb_ts)} phase:{phase}{RST}")
|
||||
|
||||
# SIGNAL → FILL GEARS
|
||||
if not vol_ok:
|
||||
L.append(
|
||||
f" {RED}{BOLD}\u26d4 VOL_OK=FALSE \u2014 engine gate closed,"
|
||||
f" NO trades until BTC vol > {VOL_P60:.6f}{RST}"
|
||||
)
|
||||
L.append(f" {DIM}SIG \u2502{RST} {sig_row}")
|
||||
L.append(f" {DIM}TRD \u2502{RST} {trade_row}")
|
||||
L.append(f" {DIM}FIL \u2502{RST} {fill_row_str}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# CAPITAL
|
||||
L.append(
|
||||
f"{BW}{BOLD}CAPITAL{RST} {BW}${capital:,.2f}{RST}"
|
||||
+ (f" ROI:{roi_c}{roi:+.2f}%{RST} DD:{dd_c}{dd:.2f}%{RST}"
|
||||
f" start:${START_CAP:,.0f}" if START_CAP else "")
|
||||
)
|
||||
L.append(
|
||||
f" trades:{trades_ex} scans:{scans} bar:{bar_idx}"
|
||||
f" lev:{lev:.2f}x notional:${notional:,.0f}"
|
||||
)
|
||||
|
||||
# Open positions (from engine_snapshot — same structure as BLUE)
|
||||
positions = eng.get("open_positions") or []
|
||||
if positions:
|
||||
L.append(f" {BW}{BOLD}OPEN:{RST}")
|
||||
for p in positions:
|
||||
sc2 = GREEN if p.get("side") == "LONG" else RED
|
||||
L.append(f" {sc2}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" upnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
else:
|
||||
L.append(f" {DIM}no open positions{RST}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# POSTURE (shared map)
|
||||
bd = safe.get("breakdown") or {}
|
||||
L.append(
|
||||
f"{BW}{BOLD}POSTURE{RST} {pc}{posture}{RST}"
|
||||
f" Rm:{pc}{_bar(rm,20)}{RST} {rm:.4f} {DIM}[shared]{RST}"
|
||||
)
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}',0)):.2f}" for i in range(1,6))
|
||||
L.append(
|
||||
f" {cats} f_env:{float(bd.get('f_env',0)):.3f}"
|
||||
f" f_exe:{float(bd.get('f_exe',0)):.3f}"
|
||||
)
|
||||
|
||||
L.append("")
|
||||
|
||||
# SYS HEALTH (shared map)
|
||||
L.append(
|
||||
f"{BW}{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST}"
|
||||
f" rm_meta:{rm_meta:.3f} {DIM}[shared]{RST}"
|
||||
)
|
||||
for m in ("m1_data_infra","m1_trader","m2_heartbeat",
|
||||
"m3_data_freshness","m4_control_plane","m5_coherence"):
|
||||
v = float(mh.get(m, 0) or 0)
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
L.append(f" {c}{m}:{v:.3f}{RST}")
|
||||
|
||||
L.append(f" {DIM}services:{RST} "
|
||||
+ " ".join(
|
||||
f"{'\u25cf' if st=='RUNNING' else f'{RED}\u25cf{RST}'}{DIM}{n.split(':')[-1]}{RST}"
|
||||
if st == "RUNNING" else
|
||||
f"{RED}\u25cf{DIM}{n.split(':')[-1]}{RST}"
|
||||
for n, st in sorted(svc.items())))
|
||||
|
||||
L.append(f" {DIM}hz_keys:{RST} "
|
||||
+ " ".join(
|
||||
f"{GREEN if float(i.get('score',0))>=0.9 else (YELLOW if float(i.get('score',0))>=0.5 else RED)}\u25cf{RST}{DIM}{k}{RST}"
|
||||
for k, i in sorted(hz_ks.items())))
|
||||
|
||||
# LAST TRADES (from GREEN log)
|
||||
if trades_hist:
|
||||
L.append("")
|
||||
L.append(f"{BW}{BOLD}LAST TRADES{RST} {DIM}(from GREEN log){RST}")
|
||||
for t in trades_hist:
|
||||
pnl = float(t.get("net_pnl", 0))
|
||||
_not = float(t.get("notional", 0))
|
||||
pct = (pnl / _not * 100) if _not else float(t.get("pnl_pct", 0)) * 100
|
||||
lev = float(t.get("leverage", 0))
|
||||
ep = float(t.get("entry_price", 0))
|
||||
reason = t.get("reason", "?")
|
||||
asset = t.get("asset", "?")
|
||||
bars = t.get("bars_held", 0)
|
||||
ts_raw = t.get("ts", "")[:16].replace("T", " ")
|
||||
pc2 = GREEN if pnl >= 0 else RED
|
||||
L.append(
|
||||
f" {pc2}{'\u25b2' if pnl>=0 else '\u25bc'}{RST}"
|
||||
f" {asset:<12} "
|
||||
f"ep:{ep:.4g} "
|
||||
f"lev:{lev:.2f}x "
|
||||
f"pnl:{pc2}{pnl:+.2f}({pct:+.2f}%){RST} "
|
||||
f"exit:{reason} bars:{bars} {DIM}{ts_raw}{RST}"
|
||||
)
|
||||
else:
|
||||
L.append(f" {DIM}no completed trades yet (GREEN paper mode){RST}")
|
||||
|
||||
# LOG ERRORS
|
||||
if log_errors:
|
||||
L.append("")
|
||||
L.append(f"{RED}{BOLD}LOG WARNINGS{RST}")
|
||||
for e in log_errors:
|
||||
L.append(f" {RED}{e[-120:]}{RST}")
|
||||
|
||||
# PNL HISTORY
|
||||
try:
|
||||
pnl_map = hz.get_map(PNL_MAP).blocking()
|
||||
pnl_keys = sorted(pnl_map.key_set())
|
||||
if pnl_keys:
|
||||
L.append("")
|
||||
L.append(f"{BW}{BOLD}PNL HISTORY{RST} {DIM}({PNL_MAP}){RST}")
|
||||
for k in pnl_keys[-7:]:
|
||||
v_raw = pnl_map.get(k)
|
||||
if v_raw:
|
||||
v = json.loads(v_raw)
|
||||
ec = float(v.get("engine_capital", 0) or 0)
|
||||
L.append(f" {DIM}{k}{RST} capital:${ec:,.2f}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
L.append("")
|
||||
L.append(f"{DIM}GREEN v1 \u2022 0.5s poll \u2022 CH\u2192status_snapshots \u2022 Ctrl-C quit{RST}")
|
||||
|
||||
# CH persistence
|
||||
if int(time.time() * 2) % 2 == 0:
|
||||
ch_put({
|
||||
"ts": int(time.time() * 1000),
|
||||
"strategy": STRATEGY,
|
||||
"capital": capital,
|
||||
"roi_pct": round(roi, 4),
|
||||
"dd_pct": round(dd, 4),
|
||||
"trades_executed": trades_ex,
|
||||
"scans_processed": int(eng.get("scans_processed", 0) or 0),
|
||||
"bar_idx": int(eng.get("bar_idx", 0) or 0),
|
||||
"posture": posture,
|
||||
"rm": round(rm, 6),
|
||||
"vel_div": round(vel_div, 6),
|
||||
"vol_ok": 1 if vol_ok else 0,
|
||||
"phase": phase,
|
||||
"mhs_status": mhs_st,
|
||||
"boost": round(float(acb.get("boost", 1.0) if acb else 1.0), 4),
|
||||
"cat5": round(float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0), 6),
|
||||
"step_bar_ms": round(step_ms, 2),
|
||||
})
|
||||
|
||||
return "\n".join(L)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected.\n")
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
sys.stdout.write(CLEAR + render(hz) + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(0.5)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
185
Observability/dolphin_status_v1.py
Executable file
185
Observability/dolphin_status_v1.py
Executable file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN live status — zero-dependency rolling display.
|
||||
|
||||
Polls HZ every 5s, prints a compact status block. No curses, no textual.
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status.py
|
||||
"""
|
||||
import json, os, time, sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import hazelcast
|
||||
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": "\033[38;5;208m", "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": "\033[38;5;208m", "DEAD": RED}
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * w)
|
||||
return "█" * f + "░" * (w - f)
|
||||
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, "DOLPHIN_STATE_BLUE", "engine_snapshot")
|
||||
cap = _get(hz, "DOLPHIN_STATE_BLUE", "capital_checkpoint")
|
||||
safe = _get(hz, "DOLPHIN_SAFETY", "latest")
|
||||
hb = _get(hz, "DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, "DOLPHIN_META_HEALTH", "latest")
|
||||
acb = _get(hz, "DOLPHIN_FEATURES", "acb_boost")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0))
|
||||
bd = safe.get("breakdown", {})
|
||||
hb_ts = hb.get("ts")
|
||||
hb_age = _age(hb_ts)
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trades = eng.get("trades_executed", "—")
|
||||
scans = eng.get("scans_processed", "—")
|
||||
lev = float(eng.get("current_leverage", 0))
|
||||
notional = float(eng.get("open_notional", 0))
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0))
|
||||
boost = float(acb.get("boost", acb.get("cut", 0)))
|
||||
vel_div = float(eng.get("last_vel_div", 0))
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None:
|
||||
START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK:
|
||||
CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = GREEN if trader_up else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
lines = []
|
||||
lines.append(f"{BOLD}{CYAN}🐬 DOLPHIN-NAUTILUS{RST} {DIM}{now}{RST}")
|
||||
lines.append(f"{'─' * 60}")
|
||||
|
||||
# TRADER
|
||||
lines.append(f"{BOLD}TRADER{RST} {tc}{'● LIVE' if trader_up else '● DOWN'}{RST}"
|
||||
f" phase:{phase} hb:{hb_age}")
|
||||
lines.append(f" vel_div:{vel_div:+.5f} scan:#{eng.get('last_scan_number', '?')}")
|
||||
|
||||
lines.append("")
|
||||
|
||||
# CAPITAL
|
||||
lines.append(f"{BOLD}CAPITAL{RST} {CYAN}${capital:,.2f}{RST}")
|
||||
lines.append(f" ROI: {roi_c}{roi:+.2f}%{RST} DD: {dd_c}{dd:.2f}%{RST}"
|
||||
f" start: ${START_CAP:,.0f}" if START_CAP else "")
|
||||
lines.append(f" trades:{trades} scans:{scans} bar:{eng.get('bar_idx', '?')}")
|
||||
lines.append(f" lev:{lev:.2f}x notional:${notional:,.0f}")
|
||||
|
||||
# Open positions
|
||||
positions = eng.get("open_positions", [])
|
||||
if positions:
|
||||
lines.append(f" {BOLD}OPEN POSITIONS:{RST}")
|
||||
for p in positions:
|
||||
side_c = GREEN if p.get("side") == "LONG" else RED
|
||||
lines.append(f" {side_c}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" pnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
else:
|
||||
lines.append(f" {DIM}no open positions{RST}")
|
||||
|
||||
lines.append("")
|
||||
|
||||
# POSTURE
|
||||
lines.append(f"{BOLD}POSTURE{RST} {pc}{posture}{RST} Rm:{pc}{_bar(rm, 20)}{RST} {rm:.4f}")
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}', 0)):.2f}" for i in range(1, 6))
|
||||
lines.append(f" {cats}")
|
||||
lines.append(f" f_env:{float(bd.get('f_env', 0)):.3f} f_exe:{float(bd.get('f_exe', 0)):.3f}"
|
||||
f" boost:{boost:.2f}")
|
||||
|
||||
lines.append("")
|
||||
|
||||
# SYS HEALTH
|
||||
lines.append(f"{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST} rm_meta:{rm_meta:.3f}")
|
||||
for m in ("m1_data_infra", "m1_trader", "m2_heartbeat",
|
||||
"m3_data_freshness", "m4_control_plane", "m5_coherence"):
|
||||
v = float(mh.get(m, 0))
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
lines.append(f" {m}: {c}{v:.3f}{RST}")
|
||||
|
||||
# Services
|
||||
lines.append(f" {DIM}services:{RST}")
|
||||
for name, st in sorted(svc.items()):
|
||||
dot = f"{GREEN}●{RST}" if st == "RUNNING" else f"{RED}●{RST}"
|
||||
lines.append(f" {dot} {name}")
|
||||
|
||||
# HZ keys
|
||||
lines.append(f" {DIM}hz keys:{RST}")
|
||||
for name, info in sorted(hz_ks.items()):
|
||||
score = float(info.get("score", 0))
|
||||
c = GREEN if score >= 0.9 else (YELLOW if score >= 0.5 else RED)
|
||||
lines.append(f" {c}●{RST} {name}: {info.get('status', '?')}")
|
||||
|
||||
lines.append(f"\n{'─' * 60}")
|
||||
lines.append(f"{DIM}polling 5s • q to quit • {now}{RST}")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected. Starting status display...\n")
|
||||
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
out = render(hz)
|
||||
sys.stdout.write(CLEAR + out + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}Render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(5)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
463
Observability/dolphin_status_v3.py
Executable file
463
Observability/dolphin_status_v3.py
Executable file
@@ -0,0 +1,463 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN live status — v3
|
||||
Polls HZ every 1s. Three gear rows: SIG / TRD / FIL (signal→fill path).
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status.py
|
||||
Quit: Ctrl-C
|
||||
"""
|
||||
# v1 archived as dolphin_status_v1.py
|
||||
# v2 archived inline (added SIG+TRD rows)
|
||||
# v3: added FIL row — full signal→asset-pick→OBF→size→order visibility
|
||||
|
||||
import json, os, time, sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import hazelcast
|
||||
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
ORANGE = "\033[38;5;208m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": ORANGE, "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": ORANGE, "DEAD": RED}
|
||||
|
||||
# Thresholds from nautilus_event_trader.py
|
||||
VEL_DIV_THRESHOLD = -0.020 # signal fires when vel_div < this
|
||||
VEL_DIV_EXTREME = -0.050 # extreme bearish
|
||||
VEL_DIV_WARN = -0.010 # approaching threshold (yellow)
|
||||
VEL_DIV_CLOSE = -0.015 # nearly there (orange→yellow)
|
||||
VOL_P60 = 0.00026414
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
return "█" * round(v * w) + "░" * (w - round(v * w))
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
# ── Gear items ────────────────────────────────────────────────────────────────
|
||||
# Each returns (label, color, value_str)
|
||||
def _item(label, color, val=""):
|
||||
dot = f"{color}●{RST}"
|
||||
v = f":{val}" if val else ""
|
||||
return f"{dot}{DIM}{label}{v}{RST}"
|
||||
|
||||
def _vel_item(vel_div):
|
||||
"""vel_div colored by distance to threshold (-0.02)."""
|
||||
v = f"{vel_div:+.4f}"
|
||||
if vel_div <= VEL_DIV_EXTREME:
|
||||
return _item("vel_div", GREEN, v) # extremely bearish — great
|
||||
elif vel_div <= VEL_DIV_THRESHOLD:
|
||||
return _item("vel_div", GREEN, v) # past threshold — signal green
|
||||
elif vel_div <= VEL_DIV_CLOSE:
|
||||
return _item("vel_div", YELLOW, v) # -0.015 to -0.020 — close
|
||||
elif vel_div <= VEL_DIV_WARN:
|
||||
return _item("vel_div", ORANGE, v) # -0.010 to -0.015 — approaching
|
||||
elif vel_div < 0:
|
||||
return _item("vel_div", RED, v) # negative but far
|
||||
else:
|
||||
return _item("vel_div", RED, v) # positive — not bearish
|
||||
|
||||
def signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt):
|
||||
"""True if ALL signal preconditions are green."""
|
||||
return (
|
||||
vel_div <= VEL_DIV_THRESHOLD
|
||||
and vol_ok
|
||||
and posture not in ("HIBERNATE", "TURTLE")
|
||||
and acb_ready
|
||||
and exf_ok
|
||||
and not halt
|
||||
)
|
||||
|
||||
def trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, boost):
|
||||
return (
|
||||
open_count == 0 # no open position already
|
||||
and lev < abs_cap # leverage headroom
|
||||
and daily_loss_ok
|
||||
and boost > 0
|
||||
)
|
||||
|
||||
OB_IMBALANCE_BIAS = -0.09 # from engine config: ob_imbalance_bias
|
||||
|
||||
def _best_fill_candidate(obf_universe):
|
||||
"""Pick best SHORT candidate from OBF universe.
|
||||
Criteria: negative imbalance (bearish pressure) + high fill_probability + low spread.
|
||||
Returns (symbol, asset_dict) or (None, {}).
|
||||
"""
|
||||
candidates = []
|
||||
for k, v in obf_universe.items():
|
||||
if not isinstance(v, dict) or "fill_probability" not in v:
|
||||
continue
|
||||
candidates.append((k, v))
|
||||
if not candidates:
|
||||
return None, {}
|
||||
# Score: fill_prob * (1 + bearish_imbalance_bonus) / (1 + spread_bps/10)
|
||||
def score(item):
|
||||
sym, a = item
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
sp = float(a.get("spread_bps", 99))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
# Bearish bias: reward negative imbalance, penalise positive
|
||||
imb_bonus = max(0.0, -imb) # 0..1 for imbalance in [-1,0]
|
||||
return fp * (1 + imb_bonus) * dq / max(0.1, sp)
|
||||
candidates.sort(key=score, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
|
||||
def fill_row(obf_universe, acb, eng):
|
||||
"""Row 3: signal → asset-pick → OBF liquidity → size → ORDER."""
|
||||
f_items = []
|
||||
|
||||
# ── Asset picker (IRP/ARS) ─────────────────────────────────────────────
|
||||
n_assets = int(obf_universe.get("_n_assets", 0) if obf_universe else 0)
|
||||
n_stale = int(obf_universe.get("_n_stale", 0) if obf_universe else 0)
|
||||
n_fresh = n_assets - n_stale
|
||||
|
||||
f_items.append(_item("universe",
|
||||
GREEN if n_fresh >= 200 else (YELLOW if n_fresh >= 50 else RED),
|
||||
f"{n_fresh}/{n_assets}"))
|
||||
|
||||
sym, ab = _best_fill_candidate(obf_universe)
|
||||
if sym:
|
||||
fill_p = float(ab.get("fill_probability", 0))
|
||||
spread = float(ab.get("spread_bps", 99))
|
||||
dq = float(ab.get("depth_quality", 0))
|
||||
imb = float(ab.get("imbalance", 0))
|
||||
depth = float(ab.get("depth_1pct_usd", 0))
|
||||
|
||||
# Best candidate asset
|
||||
asset_color = GREEN if fill_p >= 0.80 else (YELLOW if fill_p >= 0.50 else RED)
|
||||
f_items.append(_item("best", asset_color, sym[:6]))
|
||||
|
||||
# OBF: fill probability
|
||||
f_items.append(_item("fill_p",
|
||||
GREEN if fill_p >= 0.85 else (YELLOW if fill_p >= 0.60 else RED),
|
||||
f"{fill_p:.2f}"))
|
||||
|
||||
# OBF: spread
|
||||
f_items.append(_item("spread",
|
||||
GREEN if spread <= 3 else (YELLOW if spread <= 8 else RED),
|
||||
f"{spread:.1f}bps"))
|
||||
|
||||
# OBF: depth quality
|
||||
f_items.append(_item("depth_q",
|
||||
GREEN if dq >= 0.5 else (YELLOW if dq >= 0.1 else RED),
|
||||
f"{dq:.2f}"))
|
||||
|
||||
# OBF: imbalance direction (SHORT needs bearish = negative)
|
||||
imb_ok = imb < OB_IMBALANCE_BIAS # confirmed bearish pressure
|
||||
f_items.append(_item("imb",
|
||||
GREEN if imb_ok else
|
||||
YELLOW if imb < 0 else
|
||||
ORANGE if imb < 0.1 else RED,
|
||||
f"{imb:+.2f}"))
|
||||
|
||||
# OBF: depth USD
|
||||
f_items.append(_item("depth",
|
||||
GREEN if depth >= 50_000 else (YELLOW if depth >= 10_000 else RED),
|
||||
f"${depth/1000:.0f}k"))
|
||||
|
||||
else:
|
||||
f_items.append(_item("OBF", RED, "no data"))
|
||||
|
||||
# ── Sizing — ACB boost × proxy_B prank ────────────────────────────────
|
||||
# proxy_B prank not exposed in HZ snapshot; show ACB boost as sizing proxy
|
||||
boost = float(acb.get("boost", 1.0) if acb else 1.0)
|
||||
beta = float(acb.get("beta", 0.8) if acb else 0.8)
|
||||
f_items.append(_item("acb_boost",
|
||||
GREEN if boost >= 1.5 else (YELLOW if boost >= 1.0 else ORANGE),
|
||||
f"×{boost:.2f}"))
|
||||
|
||||
f_items.append(_item("beta",
|
||||
GREEN if beta >= 0.7 else (YELLOW if beta >= 0.4 else RED),
|
||||
f"{beta:.2f}"))
|
||||
|
||||
# ── ORDER indicator ────────────────────────────────────────────────────
|
||||
# Would an order fire if signal were green right now?
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_c = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
order_ready = (
|
||||
sym is not None
|
||||
and fill_p >= 0.60
|
||||
and open_count == 0
|
||||
and lev < abs_c
|
||||
and boost > 0
|
||||
) if sym else False
|
||||
|
||||
if order_ready:
|
||||
f_items.append(f" {CYAN}{BOLD}◉ ORDER READY{RST}")
|
||||
else:
|
||||
f_items.append(f" {DIM}(order: waiting){RST}")
|
||||
|
||||
return " ".join(f_items)
|
||||
|
||||
|
||||
def gear_rows(eng, safe, acb, exf, hb, obf_universe=None):
|
||||
"""Return three formatted rows: SIGNAL, TRADE gates, FILL path."""
|
||||
vel_div = float(eng.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(eng.get("vol_ok", False))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
halt = posture in ("HIBERNATE", "TURTLE")
|
||||
|
||||
acb_boost_val = float(acb.get("boost", acb.get("cut", 0)) or 0)
|
||||
acb_ready = acb_boost_val > 0 # cut=0 means blocked
|
||||
exf_ok_count = int(exf.get("_ok_count", 0) if exf else 0)
|
||||
exf_ok = exf_ok_count >= 3
|
||||
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
trades_ex = int(eng.get("trades_executed") or 0)
|
||||
|
||||
hb_ts = hb.get("ts")
|
||||
hb_ok = bool(hb_ts and (time.time() - hb_ts) < 30)
|
||||
|
||||
# ── SIGNAL ROW ────────────────────────────────────────────────────────────
|
||||
# Preconditions for the engine to generate a signal
|
||||
s_items = []
|
||||
s_items.append(_vel_item(vel_div))
|
||||
|
||||
# vol_ok — BTC vol above p60
|
||||
s_items.append(_item("vol_ok",
|
||||
GREEN if vol_ok else RED,
|
||||
"✓" if vol_ok else "✗"))
|
||||
|
||||
# posture gate
|
||||
pc = PC.get(posture, DIM)
|
||||
posture_ok = posture in ("APEX", "STALKER")
|
||||
s_items.append(_item("posture",
|
||||
GREEN if posture == "APEX" else (YELLOW if posture == "STALKER" else RED),
|
||||
posture))
|
||||
|
||||
# acb_ready
|
||||
s_items.append(_item("acb",
|
||||
GREEN if acb_ready else (ORANGE if acb_boost_val > 0 else RED),
|
||||
f"{acb_boost_val:.2f}"))
|
||||
|
||||
# exf_ok — external factors pipeline
|
||||
s_items.append(_item("exf",
|
||||
GREEN if exf_ok else (YELLOW if exf_ok_count >= 1 else RED),
|
||||
f"{exf_ok_count}/5"))
|
||||
|
||||
# halt gate
|
||||
s_items.append(_item("no_halt",
|
||||
GREEN if not halt else RED,
|
||||
"✓" if not halt else "HALT"))
|
||||
|
||||
# heartbeat
|
||||
s_items.append(_item("hb",
|
||||
GREEN if hb_ok else RED,
|
||||
_age(hb_ts)))
|
||||
|
||||
# ALL GREEN → fire indicator
|
||||
all_sig = signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt)
|
||||
if all_sig:
|
||||
s_items.append(f" {GREEN}{BOLD}◉ SIGNAL{RST}")
|
||||
|
||||
# ── TRADE ROW ─────────────────────────────────────────────────────────────
|
||||
# Additional gates that must pass before a matched signal becomes a fill
|
||||
t_items = []
|
||||
|
||||
# open positions
|
||||
t_items.append(_item("open_pos",
|
||||
GREEN if open_count == 0 else ORANGE,
|
||||
str(open_count)))
|
||||
|
||||
# leverage headroom
|
||||
lev_pct = lev / abs_cap if abs_cap else 0
|
||||
t_items.append(_item("lev",
|
||||
GREEN if lev_pct < 0.3 else (YELLOW if lev_pct < 0.7 else RED),
|
||||
f"{lev:.2f}x/{abs_cap:.0f}"))
|
||||
|
||||
# regime_dd_halt
|
||||
t_items.append(_item("regime",
|
||||
GREEN if not halt else RED,
|
||||
"free" if not halt else "HALTED"))
|
||||
|
||||
# Rm strength
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
t_items.append(_item("Rm",
|
||||
GREEN if rm >= 0.90 else (YELLOW if rm >= 0.70 else (ORANGE if rm >= 0.50 else RED)),
|
||||
f"{rm:.3f}"))
|
||||
|
||||
# Cat5 (intraday drawdown contribution)
|
||||
c5 = float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0)
|
||||
t_items.append(_item("Cat5",
|
||||
GREEN if c5 >= 0.95 else (YELLOW if c5 >= 0.85 else (ORANGE if c5 >= 0.70 else RED)),
|
||||
f"{c5:.3f}"))
|
||||
|
||||
# trades today
|
||||
t_items.append(_item("trades",
|
||||
GREEN if trades_ex < 20 else (YELLOW if trades_ex < 35 else ORANGE),
|
||||
str(trades_ex)))
|
||||
|
||||
# ALL GREEN trade execute indicator
|
||||
daily_loss_ok = c5 > 0.50 # reasonable proxy — Cat5 tracks drawdown
|
||||
all_trade = all_sig and trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, acb_boost_val)
|
||||
if all_trade:
|
||||
t_items.append(f" {CYAN}{BOLD}◉ TRADE{RST}")
|
||||
|
||||
sig_row = " ".join(s_items)
|
||||
trade_row = " ".join(t_items)
|
||||
fill = fill_row(obf_universe or {}, acb, eng)
|
||||
return sig_row, trade_row, fill
|
||||
|
||||
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, "DOLPHIN_STATE_BLUE", "engine_snapshot")
|
||||
cap = _get(hz, "DOLPHIN_STATE_BLUE", "capital_checkpoint")
|
||||
safe = _get(hz, "DOLPHIN_SAFETY", "latest")
|
||||
hb = _get(hz, "DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, "DOLPHIN_META_HEALTH", "latest")
|
||||
acb = _get(hz, "DOLPHIN_FEATURES", "acb_boost")
|
||||
exf = _get(hz, "DOLPHIN_FEATURES", "exf_latest")
|
||||
obf = _get(hz, "DOLPHIN_FEATURES", "obf_universe_latest")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0))
|
||||
hb_ts = hb.get("ts")
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trades = eng.get("trades_executed", "—")
|
||||
scans = eng.get("scans_processed", "—")
|
||||
lev = float(eng.get("current_leverage", 0))
|
||||
notional= float(eng.get("open_notional", 0))
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0))
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None: START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK: CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = GREEN if trader_up else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
sig_row, trade_row, fill_row_str = gear_rows(eng, safe, acb, exf, hb, obf)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
L = []
|
||||
L.append(f"{BOLD}{CYAN}🐬 DOLPHIN-NAUTILUS{RST} {DIM}{now}{RST}")
|
||||
L.append("─" * 60)
|
||||
|
||||
# TRADER
|
||||
L.append(f"{BOLD}TRADER{RST} {tc}{'● LIVE' if trader_up else '● DOWN'}{RST}"
|
||||
f" phase:{phase} hb:{_age(hb_ts)}"
|
||||
f" scan:#{eng.get('last_scan_number','?')}")
|
||||
|
||||
# ── SIGNAL → FILL GEARS ───────────────────────────────────────────────────
|
||||
L.append(f" {DIM}SIG │{RST} {sig_row}")
|
||||
L.append(f" {DIM}TRD │{RST} {trade_row}")
|
||||
L.append(f" {DIM}FIL │{RST} {fill_row_str}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# CAPITAL
|
||||
L.append(f"{BOLD}CAPITAL{RST} {CYAN}${capital:,.2f}{RST}"
|
||||
+ (f" ROI:{roi_c}{roi:+.2f}%{RST} DD:{dd_c}{dd:.2f}%{RST}"
|
||||
f" start:${START_CAP:,.0f}" if START_CAP else ""))
|
||||
L.append(f" trades:{trades} scans:{scans} bar:{eng.get('bar_idx','?')}"
|
||||
f" lev:{lev:.2f}x notional:${notional:,.0f}")
|
||||
|
||||
# Open positions
|
||||
positions = eng.get("open_positions") or []
|
||||
if positions:
|
||||
L.append(f" {BOLD}OPEN:{RST}")
|
||||
for p in positions:
|
||||
sc2 = GREEN if p.get("side") == "LONG" else RED
|
||||
L.append(f" {sc2}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" upnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
else:
|
||||
L.append(f" {DIM}no open positions{RST}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# POSTURE
|
||||
bd = safe.get("breakdown") or {}
|
||||
L.append(f"{BOLD}POSTURE{RST} {pc}{posture}{RST} Rm:{pc}{_bar(rm,20)}{RST} {rm:.4f}")
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}',0)):.2f}" for i in range(1,6))
|
||||
L.append(f" {cats} f_env:{float(bd.get('f_env',0)):.3f} f_exe:{float(bd.get('f_exe',0)):.3f}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# SYS HEALTH
|
||||
L.append(f"{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST} rm_meta:{rm_meta:.3f}")
|
||||
for m in ("m1_data_infra","m1_trader","m2_heartbeat",
|
||||
"m3_data_freshness","m4_control_plane","m5_coherence"):
|
||||
v = float(mh.get(m, 0))
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
L.append(f" {c}{m}:{v:.3f}{RST}")
|
||||
|
||||
L.append(f" {DIM}services:{RST} "
|
||||
+ " ".join(
|
||||
f"{'●' if st=='RUNNING' else f'{RED}●{RST}'}{DIM}{n.split(':')[-1]}{RST}"
|
||||
if st == "RUNNING" else
|
||||
f"{RED}●{DIM}{n.split(':')[-1]}{RST}"
|
||||
for n, st in sorted(svc.items())))
|
||||
|
||||
L.append(f" {DIM}hz_keys:{RST} "
|
||||
+ " ".join(
|
||||
f"{GREEN if float(i.get('score',0))>=0.9 else (YELLOW if float(i.get('score',0))>=0.5 else RED)}●{RST}{DIM}{k}{RST}"
|
||||
for k, i in sorted(hz_ks.items())))
|
||||
|
||||
L.append("")
|
||||
L.append(f"{DIM}v3 • 1s poll • Ctrl-C quit{RST}")
|
||||
|
||||
return "\n".join(L)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected.\n")
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
sys.stdout.write(CLEAR + render(hz) + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
481
Observability/dolphin_status_v4.py
Executable file
481
Observability/dolphin_status_v4.py
Executable file
@@ -0,0 +1,481 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN live status — v4
|
||||
Polls HZ every 1s. Three gear rows: SIG / TRD / FIL (signal→fill path).
|
||||
v4: vol_ok is THE master gate — shown prominently; live BTC vol vs threshold.
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status.py
|
||||
Quit: Ctrl-C
|
||||
"""
|
||||
# v1 archived as dolphin_status_v1.py
|
||||
# v2 archived inline (added SIG+TRD rows)
|
||||
# v3 archived as dolphin_status_v3.py (added FIL row)
|
||||
# v4: vol_ok promoted to master-gate prominence; BTC vol readout
|
||||
|
||||
import json, os, time, sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import hazelcast
|
||||
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
ORANGE = "\033[38;5;208m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": ORANGE, "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": ORANGE, "DEAD": RED}
|
||||
|
||||
# Thresholds from nautilus_event_trader.py
|
||||
VEL_DIV_THRESHOLD = -0.020 # signal fires when vel_div < this
|
||||
VEL_DIV_EXTREME = -0.050 # extreme bearish
|
||||
VEL_DIV_WARN = -0.010 # approaching threshold (yellow)
|
||||
VEL_DIV_CLOSE = -0.015 # nearly there (orange→yellow)
|
||||
VOL_P60 = 0.00026414 # BTC 50-bar realised vol p60 — MASTER GATE
|
||||
BTC_VOL_WINDOW = 50 # bars used for vol calc
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
return "█" * round(v * w) + "░" * (w - round(v * w))
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
# ── Gear items ────────────────────────────────────────────────────────────────
|
||||
# Each returns (label, color, value_str)
|
||||
def _item(label, color, val=""):
|
||||
dot = f"{color}●{RST}"
|
||||
v = f":{val}" if val else ""
|
||||
return f"{dot}{DIM}{label}{v}{RST}"
|
||||
|
||||
def _vel_item(vel_div):
|
||||
"""vel_div colored by distance to threshold (-0.02)."""
|
||||
v = f"{vel_div:+.4f}"
|
||||
if vel_div <= VEL_DIV_EXTREME:
|
||||
return _item("vel_div", GREEN, v) # extremely bearish — great
|
||||
elif vel_div <= VEL_DIV_THRESHOLD:
|
||||
return _item("vel_div", GREEN, v) # past threshold — signal green
|
||||
elif vel_div <= VEL_DIV_CLOSE:
|
||||
return _item("vel_div", YELLOW, v) # -0.015 to -0.020 — close
|
||||
elif vel_div <= VEL_DIV_WARN:
|
||||
return _item("vel_div", ORANGE, v) # -0.010 to -0.015 — approaching
|
||||
elif vel_div < 0:
|
||||
return _item("vel_div", RED, v) # negative but far
|
||||
else:
|
||||
return _item("vel_div", RED, v) # positive — not bearish
|
||||
|
||||
def signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt):
|
||||
"""True if ALL signal preconditions are green."""
|
||||
return (
|
||||
vel_div <= VEL_DIV_THRESHOLD
|
||||
and vol_ok
|
||||
and posture not in ("HIBERNATE", "TURTLE")
|
||||
and acb_ready
|
||||
and exf_ok
|
||||
and not halt
|
||||
)
|
||||
|
||||
def trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, boost):
|
||||
return (
|
||||
open_count == 0 # no open position already
|
||||
and lev < abs_cap # leverage headroom
|
||||
and daily_loss_ok
|
||||
and boost > 0
|
||||
)
|
||||
|
||||
OB_IMBALANCE_BIAS = -0.09 # from engine config: ob_imbalance_bias
|
||||
|
||||
def _best_fill_candidate(obf_universe):
|
||||
"""Pick best SHORT candidate from OBF universe.
|
||||
Criteria: negative imbalance (bearish pressure) + high fill_probability + low spread.
|
||||
Returns (symbol, asset_dict) or (None, {}).
|
||||
"""
|
||||
candidates = []
|
||||
for k, v in obf_universe.items():
|
||||
if not isinstance(v, dict) or "fill_probability" not in v:
|
||||
continue
|
||||
candidates.append((k, v))
|
||||
if not candidates:
|
||||
return None, {}
|
||||
# Score: fill_prob * (1 + bearish_imbalance_bonus) / (1 + spread_bps/10)
|
||||
def score(item):
|
||||
sym, a = item
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
sp = float(a.get("spread_bps", 99))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
# Bearish bias: reward negative imbalance, penalise positive
|
||||
imb_bonus = max(0.0, -imb) # 0..1 for imbalance in [-1,0]
|
||||
return fp * (1 + imb_bonus) * dq / max(0.1, sp)
|
||||
candidates.sort(key=score, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
|
||||
def fill_row(obf_universe, acb, eng):
|
||||
"""Row 3: signal → asset-pick → OBF liquidity → size → ORDER."""
|
||||
f_items = []
|
||||
|
||||
# ── Asset picker (IRP/ARS) ─────────────────────────────────────────────
|
||||
n_assets = int(obf_universe.get("_n_assets", 0) if obf_universe else 0)
|
||||
n_stale = int(obf_universe.get("_n_stale", 0) if obf_universe else 0)
|
||||
n_fresh = n_assets - n_stale
|
||||
|
||||
f_items.append(_item("universe",
|
||||
GREEN if n_fresh >= 200 else (YELLOW if n_fresh >= 50 else RED),
|
||||
f"{n_fresh}/{n_assets}"))
|
||||
|
||||
sym, ab = _best_fill_candidate(obf_universe)
|
||||
if sym:
|
||||
fill_p = float(ab.get("fill_probability", 0))
|
||||
spread = float(ab.get("spread_bps", 99))
|
||||
dq = float(ab.get("depth_quality", 0))
|
||||
imb = float(ab.get("imbalance", 0))
|
||||
depth = float(ab.get("depth_1pct_usd", 0))
|
||||
|
||||
# Best candidate asset
|
||||
asset_color = GREEN if fill_p >= 0.80 else (YELLOW if fill_p >= 0.50 else RED)
|
||||
f_items.append(_item("best", asset_color, sym[:6]))
|
||||
|
||||
# OBF: fill probability
|
||||
f_items.append(_item("fill_p",
|
||||
GREEN if fill_p >= 0.85 else (YELLOW if fill_p >= 0.60 else RED),
|
||||
f"{fill_p:.2f}"))
|
||||
|
||||
# OBF: spread
|
||||
f_items.append(_item("spread",
|
||||
GREEN if spread <= 3 else (YELLOW if spread <= 8 else RED),
|
||||
f"{spread:.1f}bps"))
|
||||
|
||||
# OBF: depth quality
|
||||
f_items.append(_item("depth_q",
|
||||
GREEN if dq >= 0.5 else (YELLOW if dq >= 0.1 else RED),
|
||||
f"{dq:.2f}"))
|
||||
|
||||
# OBF: imbalance direction (SHORT needs bearish = negative)
|
||||
imb_ok = imb < OB_IMBALANCE_BIAS # confirmed bearish pressure
|
||||
f_items.append(_item("imb",
|
||||
GREEN if imb_ok else
|
||||
YELLOW if imb < 0 else
|
||||
ORANGE if imb < 0.1 else RED,
|
||||
f"{imb:+.2f}"))
|
||||
|
||||
# OBF: depth USD
|
||||
f_items.append(_item("depth",
|
||||
GREEN if depth >= 50_000 else (YELLOW if depth >= 10_000 else RED),
|
||||
f"${depth/1000:.0f}k"))
|
||||
|
||||
else:
|
||||
f_items.append(_item("OBF", RED, "no data"))
|
||||
|
||||
# ── Sizing — ACB boost × proxy_B prank ────────────────────────────────
|
||||
# proxy_B prank not exposed in HZ snapshot; show ACB boost as sizing proxy
|
||||
boost = float(acb.get("boost", 1.0) if acb else 1.0)
|
||||
beta = float(acb.get("beta", 0.8) if acb else 0.8)
|
||||
f_items.append(_item("acb_boost",
|
||||
GREEN if boost >= 1.5 else (YELLOW if boost >= 1.0 else ORANGE),
|
||||
f"×{boost:.2f}"))
|
||||
|
||||
f_items.append(_item("beta",
|
||||
GREEN if beta >= 0.7 else (YELLOW if beta >= 0.4 else RED),
|
||||
f"{beta:.2f}"))
|
||||
|
||||
# ── ORDER indicator ────────────────────────────────────────────────────
|
||||
# Would an order fire if signal were green right now?
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_c = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
order_ready = (
|
||||
sym is not None
|
||||
and fill_p >= 0.60
|
||||
and open_count == 0
|
||||
and lev < abs_c
|
||||
and boost > 0
|
||||
) if sym else False
|
||||
|
||||
if order_ready:
|
||||
f_items.append(f" {CYAN}{BOLD}◉ ORDER READY{RST}")
|
||||
else:
|
||||
f_items.append(f" {DIM}(order: waiting){RST}")
|
||||
|
||||
return " ".join(f_items)
|
||||
|
||||
|
||||
def gear_rows(eng, safe, acb, exf, hb, obf_universe=None):
|
||||
"""Return three formatted rows: SIGNAL, TRADE gates, FILL path."""
|
||||
vel_div = float(eng.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(eng.get("vol_ok", False))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
halt = posture in ("HIBERNATE", "TURTLE")
|
||||
|
||||
acb_boost_val = float(acb.get("boost", acb.get("cut", 0)) or 0)
|
||||
acb_ready = acb_boost_val > 0 # cut=0 means blocked
|
||||
exf_ok_count = int(exf.get("_ok_count", 0) if exf else 0)
|
||||
exf_ok = exf_ok_count >= 3
|
||||
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
trades_ex = int(eng.get("trades_executed") or 0)
|
||||
|
||||
hb_ts = hb.get("ts")
|
||||
hb_ok = bool(hb_ts and (time.time() - hb_ts) < 30)
|
||||
|
||||
# ── SIGNAL ROW ────────────────────────────────────────────────────────────
|
||||
# vol_ok is the MASTER GATE — listed first. When False, _try_entry is never
|
||||
# called regardless of vel_div. BTC 50-bar realised vol must exceed p60=0.000264.
|
||||
s_items = []
|
||||
|
||||
# BTC vol — try to get live reading from exf or obf for display context
|
||||
btc_vol_str = "—"
|
||||
if exf:
|
||||
dvol_raw = exf.get("dvol_btc") or exf.get("dvol")
|
||||
fng_raw = exf.get("fng")
|
||||
if dvol_raw:
|
||||
btc_vol_str = f"dV:{float(dvol_raw):.0f}"
|
||||
if fng_raw:
|
||||
btc_vol_str += f" FnG:{float(fng_raw):.0f}"
|
||||
|
||||
vol_label = f"vol_ok({btc_vol_str})"
|
||||
s_items.append(_item(vol_label,
|
||||
GREEN if vol_ok else RED,
|
||||
"✓" if vol_ok else f"✗ BLOCKED"))
|
||||
|
||||
s_items.append(_vel_item(vel_div))
|
||||
|
||||
# posture gate
|
||||
pc = PC.get(posture, DIM)
|
||||
posture_ok = posture in ("APEX", "STALKER")
|
||||
s_items.append(_item("posture",
|
||||
GREEN if posture == "APEX" else (YELLOW if posture == "STALKER" else RED),
|
||||
posture))
|
||||
|
||||
# acb_ready
|
||||
s_items.append(_item("acb",
|
||||
GREEN if acb_ready else (ORANGE if acb_boost_val > 0 else RED),
|
||||
f"{acb_boost_val:.2f}"))
|
||||
|
||||
# exf_ok — external factors pipeline
|
||||
s_items.append(_item("exf",
|
||||
GREEN if exf_ok else (YELLOW if exf_ok_count >= 1 else RED),
|
||||
f"{exf_ok_count}/5"))
|
||||
|
||||
# halt gate
|
||||
s_items.append(_item("no_halt",
|
||||
GREEN if not halt else RED,
|
||||
"✓" if not halt else "HALT"))
|
||||
|
||||
# heartbeat
|
||||
s_items.append(_item("hb",
|
||||
GREEN if hb_ok else RED,
|
||||
_age(hb_ts)))
|
||||
|
||||
# ALL GREEN → fire indicator
|
||||
all_sig = signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt)
|
||||
if all_sig:
|
||||
s_items.append(f" {GREEN}{BOLD}◉ SIGNAL{RST}")
|
||||
|
||||
# ── TRADE ROW ─────────────────────────────────────────────────────────────
|
||||
# Additional gates that must pass before a matched signal becomes a fill
|
||||
t_items = []
|
||||
|
||||
# open positions
|
||||
t_items.append(_item("open_pos",
|
||||
GREEN if open_count == 0 else ORANGE,
|
||||
str(open_count)))
|
||||
|
||||
# leverage headroom
|
||||
lev_pct = lev / abs_cap if abs_cap else 0
|
||||
t_items.append(_item("lev",
|
||||
GREEN if lev_pct < 0.3 else (YELLOW if lev_pct < 0.7 else RED),
|
||||
f"{lev:.2f}x/{abs_cap:.0f}"))
|
||||
|
||||
# regime_dd_halt
|
||||
t_items.append(_item("regime",
|
||||
GREEN if not halt else RED,
|
||||
"free" if not halt else "HALTED"))
|
||||
|
||||
# Rm strength
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
t_items.append(_item("Rm",
|
||||
GREEN if rm >= 0.90 else (YELLOW if rm >= 0.70 else (ORANGE if rm >= 0.50 else RED)),
|
||||
f"{rm:.3f}"))
|
||||
|
||||
# Cat5 (intraday drawdown contribution)
|
||||
c5 = float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0)
|
||||
t_items.append(_item("Cat5",
|
||||
GREEN if c5 >= 0.95 else (YELLOW if c5 >= 0.85 else (ORANGE if c5 >= 0.70 else RED)),
|
||||
f"{c5:.3f}"))
|
||||
|
||||
# trades today
|
||||
t_items.append(_item("trades",
|
||||
GREEN if trades_ex < 20 else (YELLOW if trades_ex < 35 else ORANGE),
|
||||
str(trades_ex)))
|
||||
|
||||
# ALL GREEN trade execute indicator
|
||||
daily_loss_ok = c5 > 0.50 # reasonable proxy — Cat5 tracks drawdown
|
||||
all_trade = all_sig and trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, acb_boost_val)
|
||||
if all_trade:
|
||||
t_items.append(f" {CYAN}{BOLD}◉ TRADE{RST}")
|
||||
|
||||
sig_row = " ".join(s_items)
|
||||
trade_row = " ".join(t_items)
|
||||
fill = fill_row(obf_universe or {}, acb, eng)
|
||||
return sig_row, trade_row, fill
|
||||
|
||||
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, "DOLPHIN_STATE_BLUE", "engine_snapshot")
|
||||
cap = _get(hz, "DOLPHIN_STATE_BLUE", "capital_checkpoint")
|
||||
safe = _get(hz, "DOLPHIN_SAFETY", "latest")
|
||||
hb = _get(hz, "DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, "DOLPHIN_META_HEALTH", "latest")
|
||||
acb = _get(hz, "DOLPHIN_FEATURES", "acb_boost")
|
||||
exf = _get(hz, "DOLPHIN_FEATURES", "exf_latest")
|
||||
obf = _get(hz, "DOLPHIN_FEATURES", "obf_universe_latest")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0))
|
||||
hb_ts = hb.get("ts")
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trades = eng.get("trades_executed", "—")
|
||||
scans = eng.get("scans_processed", "—")
|
||||
lev = float(eng.get("current_leverage", 0))
|
||||
notional= float(eng.get("open_notional", 0))
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0))
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None: START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK: CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = GREEN if trader_up else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
sig_row, trade_row, fill_row_str = gear_rows(eng, safe, acb, exf, hb, obf)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
L = []
|
||||
L.append(f"{BOLD}{CYAN}🐬 DOLPHIN-NAUTILUS{RST} {DIM}{now}{RST}")
|
||||
L.append("─" * 60)
|
||||
|
||||
# TRADER
|
||||
L.append(f"{BOLD}TRADER{RST} {tc}{'● LIVE' if trader_up else '● DOWN'}{RST}"
|
||||
f" phase:{phase} hb:{_age(hb_ts)}"
|
||||
f" scan:#{eng.get('last_scan_number','?')}")
|
||||
|
||||
# ── SIGNAL → FILL GEARS ───────────────────────────────────────────────────
|
||||
vol_ok_live = bool(eng.get("vol_ok", False))
|
||||
if not vol_ok_live:
|
||||
L.append(f" {RED}{BOLD}⛔ VOL_OK=FALSE — engine gate closed, NO trades until BTC vol > {VOL_P60:.6f}{RST}")
|
||||
L.append(f" {DIM}SIG │{RST} {sig_row}")
|
||||
L.append(f" {DIM}TRD │{RST} {trade_row}")
|
||||
L.append(f" {DIM}FIL │{RST} {fill_row_str}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# CAPITAL
|
||||
L.append(f"{BOLD}CAPITAL{RST} {CYAN}${capital:,.2f}{RST}"
|
||||
+ (f" ROI:{roi_c}{roi:+.2f}%{RST} DD:{dd_c}{dd:.2f}%{RST}"
|
||||
f" start:${START_CAP:,.0f}" if START_CAP else ""))
|
||||
L.append(f" trades:{trades} scans:{scans} bar:{eng.get('bar_idx','?')}"
|
||||
f" lev:{lev:.2f}x notional:${notional:,.0f}")
|
||||
|
||||
# Open positions
|
||||
positions = eng.get("open_positions") or []
|
||||
if positions:
|
||||
L.append(f" {BOLD}OPEN:{RST}")
|
||||
for p in positions:
|
||||
sc2 = GREEN if p.get("side") == "LONG" else RED
|
||||
L.append(f" {sc2}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" upnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
else:
|
||||
L.append(f" {DIM}no open positions{RST}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# POSTURE
|
||||
bd = safe.get("breakdown") or {}
|
||||
L.append(f"{BOLD}POSTURE{RST} {pc}{posture}{RST} Rm:{pc}{_bar(rm,20)}{RST} {rm:.4f}")
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}',0)):.2f}" for i in range(1,6))
|
||||
L.append(f" {cats} f_env:{float(bd.get('f_env',0)):.3f} f_exe:{float(bd.get('f_exe',0)):.3f}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# SYS HEALTH
|
||||
L.append(f"{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST} rm_meta:{rm_meta:.3f}")
|
||||
for m in ("m1_data_infra","m1_trader","m2_heartbeat",
|
||||
"m3_data_freshness","m4_control_plane","m5_coherence"):
|
||||
v = float(mh.get(m, 0))
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
L.append(f" {c}{m}:{v:.3f}{RST}")
|
||||
|
||||
L.append(f" {DIM}services:{RST} "
|
||||
+ " ".join(
|
||||
f"{'●' if st=='RUNNING' else f'{RED}●{RST}'}{DIM}{n.split(':')[-1]}{RST}"
|
||||
if st == "RUNNING" else
|
||||
f"{RED}●{DIM}{n.split(':')[-1]}{RST}"
|
||||
for n, st in sorted(svc.items())))
|
||||
|
||||
L.append(f" {DIM}hz_keys:{RST} "
|
||||
+ " ".join(
|
||||
f"{GREEN if float(i.get('score',0))>=0.9 else (YELLOW if float(i.get('score',0))>=0.5 else RED)}●{RST}{DIM}{k}{RST}"
|
||||
for k, i in sorted(hz_ks.items())))
|
||||
|
||||
L.append("")
|
||||
L.append(f"{DIM}v4 • 1s poll • vol_ok is master gate • Ctrl-C quit{RST}")
|
||||
|
||||
return "\n".join(L)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected.\n")
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
sys.stdout.write(CLEAR + render(hz) + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
602
Observability/dolphin_status_v5.py
Executable file
602
Observability/dolphin_status_v5.py
Executable file
@@ -0,0 +1,602 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN live status — v5
|
||||
0.5s poll. SIG/TRD/FIL gear rows + last-5-trades + CH persistence.
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_status.py
|
||||
Quit: Ctrl-C
|
||||
"""
|
||||
# v1–v4 archived as dolphin_status_v{1..4}.py
|
||||
# v5: 0.5s, last-5-trades row, CH status_snapshots write
|
||||
|
||||
import json, re, threading, time, sys, urllib.request, urllib.parse
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import hazelcast
|
||||
|
||||
# ── ClickHouse fire-and-forget write ─────────────────────────────────────────
|
||||
_CH_URL = "http://localhost:8123"
|
||||
_CH_USER = "dolphin"
|
||||
_CH_PASS = "dolphin_ch_2026"
|
||||
_CH_Q: deque = deque(maxlen=500)
|
||||
|
||||
def _ch_worker():
|
||||
while True:
|
||||
time.sleep(2)
|
||||
rows = []
|
||||
while _CH_Q:
|
||||
try: rows.append(_CH_Q.popleft())
|
||||
except IndexError: break
|
||||
if not rows: continue
|
||||
body = "\n".join(json.dumps(r) for r in rows).encode()
|
||||
url = f"{_CH_URL}/?database=dolphin&query=INSERT+INTO+status_snapshots+FORMAT+JSONEachRow"
|
||||
req = urllib.request.Request(url, data=body, method="POST")
|
||||
req.add_header("X-ClickHouse-User", _CH_USER)
|
||||
req.add_header("X-ClickHouse-Key", _CH_PASS)
|
||||
req.add_header("Content-Type", "application/octet-stream")
|
||||
try: urllib.request.urlopen(req, timeout=4)
|
||||
except Exception: pass # observability is non-critical
|
||||
|
||||
threading.Thread(target=_ch_worker, daemon=True, name="ch-status").start()
|
||||
|
||||
def ch_put(row: dict):
|
||||
_CH_Q.append(row)
|
||||
|
||||
# ── Trade log parser ──────────────────────────────────────────────────────────
|
||||
_TRADER_LOG = Path("/mnt/dolphinng5_predict/prod/supervisor/logs/nautilus_trader.log")
|
||||
# Capture the JSON dict only — stop at the first } that closes the payload.
|
||||
# Lines may have a trailing tag like [v2_gold_fix_v50-v750] after the dict.
|
||||
_RE_ENTRY = re.compile(r"\[(.+?)\] ENTRY: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
_RE_EXIT = re.compile(r"\[(.+?)\] EXIT: (\{.+?\})(?:\s*\[.*\])?$")
|
||||
|
||||
def _parse_log_dict(raw: str) -> dict:
|
||||
"""Convert single-quoted Python dict repr to JSON-parsed dict."""
|
||||
# Replace single quotes used as string delimiters, but preserve apostrophes
|
||||
# inside values by only replacing quote chars that precede a key or follow a value.
|
||||
import ast
|
||||
try:
|
||||
return ast.literal_eval(raw) # safest: handles all Python literal forms
|
||||
except Exception:
|
||||
return json.loads(raw.replace("'", '"'))
|
||||
|
||||
def _last_n_trades(n=5):
|
||||
"""Parse last N completed trades from supervisor log. Returns list of dicts."""
|
||||
try:
|
||||
lines = _TRADER_LOG.read_text(errors="replace").splitlines()[-4000:]
|
||||
except Exception:
|
||||
return []
|
||||
entries = {}
|
||||
trades = []
|
||||
for line in lines:
|
||||
m = _RE_ENTRY.search(line)
|
||||
if m:
|
||||
try:
|
||||
d = _parse_log_dict(m.group(2))
|
||||
entries[d["trade_id"]] = {"ts": m.group(1), **d}
|
||||
except Exception:
|
||||
pass
|
||||
m = _RE_EXIT.search(line)
|
||||
if m:
|
||||
try:
|
||||
d = _parse_log_dict(m.group(2))
|
||||
tid = d.get("trade_id")
|
||||
if tid and tid in entries:
|
||||
e = entries.pop(tid)
|
||||
trades.append({**e, "exit_ts": m.group(1),
|
||||
"reason": d.get("reason","?"),
|
||||
"pnl_pct": d.get("pnl_pct", 0),
|
||||
"net_pnl": d.get("net_pnl", 0),
|
||||
"bars_held": d.get("bars_held", 0)})
|
||||
except Exception:
|
||||
pass
|
||||
return trades[-n:]
|
||||
|
||||
CLEAR = "\033[2J\033[H"
|
||||
BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
GREEN = "\033[32m"; YELLOW = "\033[33m"; RED = "\033[31m"; CYAN = "\033[36m"
|
||||
ORANGE = "\033[38;5;208m"
|
||||
|
||||
PC = {"APEX": GREEN, "STALKER": YELLOW, "TURTLE": ORANGE, "HIBERNATE": RED}
|
||||
SC = {"GREEN": GREEN, "DEGRADED": YELLOW, "CRITICAL": ORANGE, "DEAD": RED}
|
||||
|
||||
# Thresholds from nautilus_event_trader.py
|
||||
VEL_DIV_THRESHOLD = -0.020 # signal fires when vel_div < this
|
||||
VEL_DIV_EXTREME = -0.050 # extreme bearish
|
||||
VEL_DIV_WARN = -0.010 # approaching threshold (yellow)
|
||||
VEL_DIV_CLOSE = -0.015 # nearly there (orange→yellow)
|
||||
VOL_P60 = 0.00026414 # BTC 50-bar realised vol p60 — MASTER GATE
|
||||
BTC_VOL_WINDOW = 50 # bars used for vol calc
|
||||
|
||||
START_CAP = None
|
||||
CAP_PEAK = None
|
||||
|
||||
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _bar(v, w=20):
|
||||
v = max(0.0, min(1.0, v))
|
||||
return "█" * round(v * w) + "░" * (w - round(v * w))
|
||||
|
||||
def _get(hz, map_name, key):
|
||||
try:
|
||||
raw = hz.get_map(map_name).blocking().get(key)
|
||||
return json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
# ── Gear items ────────────────────────────────────────────────────────────────
|
||||
# Each returns (label, color, value_str)
|
||||
def _item(label, color, val=""):
|
||||
dot = f"{color}●{RST}"
|
||||
v = f":{val}" if val else ""
|
||||
return f"{dot}{DIM}{label}{v}{RST}"
|
||||
|
||||
def _vel_item(vel_div):
|
||||
"""vel_div colored by distance to threshold (-0.02)."""
|
||||
v = f"{vel_div:+.4f}"
|
||||
if vel_div <= VEL_DIV_EXTREME:
|
||||
return _item("vel_div", GREEN, v) # extremely bearish — great
|
||||
elif vel_div <= VEL_DIV_THRESHOLD:
|
||||
return _item("vel_div", GREEN, v) # past threshold — signal green
|
||||
elif vel_div <= VEL_DIV_CLOSE:
|
||||
return _item("vel_div", YELLOW, v) # -0.015 to -0.020 — close
|
||||
elif vel_div <= VEL_DIV_WARN:
|
||||
return _item("vel_div", ORANGE, v) # -0.010 to -0.015 — approaching
|
||||
elif vel_div < 0:
|
||||
return _item("vel_div", RED, v) # negative but far
|
||||
else:
|
||||
return _item("vel_div", RED, v) # positive — not bearish
|
||||
|
||||
def signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt):
|
||||
"""True if ALL signal preconditions are green."""
|
||||
return (
|
||||
vel_div <= VEL_DIV_THRESHOLD
|
||||
and vol_ok
|
||||
and posture not in ("HIBERNATE", "TURTLE")
|
||||
and acb_ready
|
||||
and exf_ok
|
||||
and not halt
|
||||
)
|
||||
|
||||
def trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, boost):
|
||||
return (
|
||||
open_count == 0 # no open position already
|
||||
and lev < abs_cap # leverage headroom
|
||||
and daily_loss_ok
|
||||
and boost > 0
|
||||
)
|
||||
|
||||
OB_IMBALANCE_BIAS = -0.09 # from engine config: ob_imbalance_bias
|
||||
|
||||
def _best_fill_candidate(obf_universe):
|
||||
"""Pick best SHORT candidate from OBF universe.
|
||||
Criteria: negative imbalance (bearish pressure) + high fill_probability + low spread.
|
||||
Returns (symbol, asset_dict) or (None, {}).
|
||||
"""
|
||||
candidates = []
|
||||
for k, v in obf_universe.items():
|
||||
if not isinstance(v, dict) or "fill_probability" not in v:
|
||||
continue
|
||||
candidates.append((k, v))
|
||||
if not candidates:
|
||||
return None, {}
|
||||
# Score: fill_prob * (1 + bearish_imbalance_bonus) / (1 + spread_bps/10)
|
||||
def score(item):
|
||||
sym, a = item
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
sp = float(a.get("spread_bps", 99))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
# Bearish bias: reward negative imbalance, penalise positive
|
||||
imb_bonus = max(0.0, -imb) # 0..1 for imbalance in [-1,0]
|
||||
return fp * (1 + imb_bonus) * dq / max(0.1, sp)
|
||||
candidates.sort(key=score, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
|
||||
def fill_row(obf_universe, acb, eng):
|
||||
"""Row 3: signal → asset-pick → OBF liquidity → size → ORDER."""
|
||||
f_items = []
|
||||
|
||||
# ── Asset picker (IRP/ARS) ─────────────────────────────────────────────
|
||||
n_assets = int(obf_universe.get("_n_assets", 0) if obf_universe else 0)
|
||||
n_stale = int(obf_universe.get("_n_stale", 0) if obf_universe else 0)
|
||||
n_fresh = n_assets - n_stale
|
||||
|
||||
f_items.append(_item("universe",
|
||||
GREEN if n_fresh >= 200 else (YELLOW if n_fresh >= 50 else RED),
|
||||
f"{n_fresh}/{n_assets}"))
|
||||
|
||||
sym, ab = _best_fill_candidate(obf_universe)
|
||||
if sym:
|
||||
fill_p = float(ab.get("fill_probability", 0))
|
||||
spread = float(ab.get("spread_bps", 99))
|
||||
dq = float(ab.get("depth_quality", 0))
|
||||
imb = float(ab.get("imbalance", 0))
|
||||
depth = float(ab.get("depth_1pct_usd", 0))
|
||||
|
||||
# Best candidate asset
|
||||
asset_color = GREEN if fill_p >= 0.80 else (YELLOW if fill_p >= 0.50 else RED)
|
||||
f_items.append(_item("best", asset_color, sym[:6]))
|
||||
|
||||
# OBF: fill probability
|
||||
f_items.append(_item("fill_p",
|
||||
GREEN if fill_p >= 0.85 else (YELLOW if fill_p >= 0.60 else RED),
|
||||
f"{fill_p:.2f}"))
|
||||
|
||||
# OBF: spread
|
||||
f_items.append(_item("spread",
|
||||
GREEN if spread <= 3 else (YELLOW if spread <= 8 else RED),
|
||||
f"{spread:.1f}bps"))
|
||||
|
||||
# OBF: depth quality
|
||||
f_items.append(_item("depth_q",
|
||||
GREEN if dq >= 0.5 else (YELLOW if dq >= 0.1 else RED),
|
||||
f"{dq:.2f}"))
|
||||
|
||||
# OBF: imbalance direction (SHORT needs bearish = negative)
|
||||
imb_ok = imb < OB_IMBALANCE_BIAS # confirmed bearish pressure
|
||||
f_items.append(_item("imb",
|
||||
GREEN if imb_ok else
|
||||
YELLOW if imb < 0 else
|
||||
ORANGE if imb < 0.1 else RED,
|
||||
f"{imb:+.2f}"))
|
||||
|
||||
# OBF: depth USD
|
||||
f_items.append(_item("depth",
|
||||
GREEN if depth >= 50_000 else (YELLOW if depth >= 10_000 else RED),
|
||||
f"${depth/1000:.0f}k"))
|
||||
|
||||
else:
|
||||
f_items.append(_item("OBF", RED, "no data"))
|
||||
|
||||
# ── Sizing — ACB boost × proxy_B prank ────────────────────────────────
|
||||
# proxy_B prank not exposed in HZ snapshot; show ACB boost as sizing proxy
|
||||
boost = float(acb.get("boost", 1.0) if acb else 1.0)
|
||||
beta = float(acb.get("beta", 0.8) if acb else 0.8)
|
||||
f_items.append(_item("acb_boost",
|
||||
GREEN if boost >= 1.5 else (YELLOW if boost >= 1.0 else ORANGE),
|
||||
f"×{boost:.2f}"))
|
||||
|
||||
f_items.append(_item("beta",
|
||||
GREEN if beta >= 0.7 else (YELLOW if beta >= 0.4 else RED),
|
||||
f"{beta:.2f}"))
|
||||
|
||||
# ── ORDER indicator ────────────────────────────────────────────────────
|
||||
# Would an order fire if signal were green right now?
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_c = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
order_ready = (
|
||||
sym is not None
|
||||
and fill_p >= 0.60
|
||||
and open_count == 0
|
||||
and lev < abs_c
|
||||
and boost > 0
|
||||
) if sym else False
|
||||
|
||||
if order_ready:
|
||||
f_items.append(f" {CYAN}{BOLD}◉ ORDER READY{RST}")
|
||||
else:
|
||||
f_items.append(f" {DIM}(order: waiting){RST}")
|
||||
|
||||
return " ".join(f_items)
|
||||
|
||||
|
||||
def gear_rows(eng, safe, acb, exf, hb, obf_universe=None):
|
||||
"""Return three formatted rows: SIGNAL, TRADE gates, FILL path."""
|
||||
vel_div = float(eng.get("last_vel_div", 0) or 0)
|
||||
vol_ok = bool(eng.get("vol_ok", False))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
halt = posture in ("HIBERNATE", "TURTLE")
|
||||
|
||||
acb_boost_val = float(acb.get("boost", acb.get("cut", 0)) or 0)
|
||||
acb_ready = acb_boost_val > 0 # cut=0 means blocked
|
||||
exf_ok_count = int(exf.get("_ok_count", 0) if exf else 0)
|
||||
exf_ok = exf_ok_count >= 3
|
||||
|
||||
open_count = len(eng.get("open_positions") or [])
|
||||
lev = float(eng.get("current_leverage", 0) or 0)
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0) or 9.0)
|
||||
trades_ex = int(eng.get("trades_executed") or 0)
|
||||
|
||||
hb_ts = hb.get("ts")
|
||||
hb_ok = bool(hb_ts and (time.time() - hb_ts) < 30)
|
||||
|
||||
# ── SIGNAL ROW ────────────────────────────────────────────────────────────
|
||||
# vol_ok is the MASTER GATE — listed first. When False, _try_entry is never
|
||||
# called regardless of vel_div. BTC 50-bar realised vol must exceed p60=0.000264.
|
||||
s_items = []
|
||||
|
||||
# BTC vol — try to get live reading from exf or obf for display context
|
||||
btc_vol_str = "—"
|
||||
if exf:
|
||||
dvol_raw = exf.get("dvol_btc") or exf.get("dvol")
|
||||
fng_raw = exf.get("fng")
|
||||
if dvol_raw:
|
||||
btc_vol_str = f"dV:{float(dvol_raw):.0f}"
|
||||
if fng_raw:
|
||||
btc_vol_str += f" FnG:{float(fng_raw):.0f}"
|
||||
|
||||
vol_label = f"vol_ok({btc_vol_str})"
|
||||
s_items.append(_item(vol_label,
|
||||
GREEN if vol_ok else RED,
|
||||
"✓" if vol_ok else f"✗ BLOCKED"))
|
||||
|
||||
s_items.append(_vel_item(vel_div))
|
||||
|
||||
# posture gate
|
||||
pc = PC.get(posture, DIM)
|
||||
posture_ok = posture in ("APEX", "STALKER")
|
||||
s_items.append(_item("posture",
|
||||
GREEN if posture == "APEX" else (YELLOW if posture == "STALKER" else RED),
|
||||
posture))
|
||||
|
||||
# acb_ready
|
||||
s_items.append(_item("acb",
|
||||
GREEN if acb_ready else (ORANGE if acb_boost_val > 0 else RED),
|
||||
f"{acb_boost_val:.2f}"))
|
||||
|
||||
# exf_ok — external factors pipeline
|
||||
s_items.append(_item("exf",
|
||||
GREEN if exf_ok else (YELLOW if exf_ok_count >= 1 else RED),
|
||||
f"{exf_ok_count}/5"))
|
||||
|
||||
# halt gate
|
||||
s_items.append(_item("no_halt",
|
||||
GREEN if not halt else RED,
|
||||
"✓" if not halt else "HALT"))
|
||||
|
||||
# heartbeat
|
||||
s_items.append(_item("hb",
|
||||
GREEN if hb_ok else RED,
|
||||
_age(hb_ts)))
|
||||
|
||||
# ALL GREEN → fire indicator
|
||||
all_sig = signal_fired(vel_div, vol_ok, posture, acb_ready, exf_ok, halt)
|
||||
if all_sig:
|
||||
s_items.append(f" {GREEN}{BOLD}◉ SIGNAL{RST}")
|
||||
|
||||
# ── TRADE ROW ─────────────────────────────────────────────────────────────
|
||||
# Additional gates that must pass before a matched signal becomes a fill
|
||||
t_items = []
|
||||
|
||||
# open positions
|
||||
t_items.append(_item("open_pos",
|
||||
GREEN if open_count == 0 else ORANGE,
|
||||
str(open_count)))
|
||||
|
||||
# leverage headroom
|
||||
lev_pct = lev / abs_cap if abs_cap else 0
|
||||
t_items.append(_item("lev",
|
||||
GREEN if lev_pct < 0.3 else (YELLOW if lev_pct < 0.7 else RED),
|
||||
f"{lev:.2f}x/{abs_cap:.0f}"))
|
||||
|
||||
# regime_dd_halt
|
||||
t_items.append(_item("regime",
|
||||
GREEN if not halt else RED,
|
||||
"free" if not halt else "HALTED"))
|
||||
|
||||
# Rm strength
|
||||
rm = float(safe.get("Rm", 0) or 0)
|
||||
t_items.append(_item("Rm",
|
||||
GREEN if rm >= 0.90 else (YELLOW if rm >= 0.70 else (ORANGE if rm >= 0.50 else RED)),
|
||||
f"{rm:.3f}"))
|
||||
|
||||
# Cat5 (intraday drawdown contribution)
|
||||
c5 = float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0)
|
||||
t_items.append(_item("Cat5",
|
||||
GREEN if c5 >= 0.95 else (YELLOW if c5 >= 0.85 else (ORANGE if c5 >= 0.70 else RED)),
|
||||
f"{c5:.3f}"))
|
||||
|
||||
# trades today
|
||||
t_items.append(_item("trades",
|
||||
GREEN if trades_ex < 20 else (YELLOW if trades_ex < 35 else ORANGE),
|
||||
str(trades_ex)))
|
||||
|
||||
# ALL GREEN trade execute indicator
|
||||
daily_loss_ok = c5 > 0.50 # reasonable proxy — Cat5 tracks drawdown
|
||||
all_trade = all_sig and trade_can_execute(open_count, lev, abs_cap, daily_loss_ok, acb_boost_val)
|
||||
if all_trade:
|
||||
t_items.append(f" {CYAN}{BOLD}◉ TRADE{RST}")
|
||||
|
||||
sig_row = " ".join(s_items)
|
||||
trade_row = " ".join(t_items)
|
||||
fill = fill_row(obf_universe or {}, acb, eng)
|
||||
return sig_row, trade_row, fill
|
||||
|
||||
|
||||
def render(hz):
|
||||
global START_CAP, CAP_PEAK
|
||||
|
||||
eng = _get(hz, "DOLPHIN_STATE_BLUE", "engine_snapshot")
|
||||
cap = _get(hz, "DOLPHIN_STATE_BLUE", "capital_checkpoint")
|
||||
safe = _get(hz, "DOLPHIN_SAFETY", "latest")
|
||||
hb = _get(hz, "DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat")
|
||||
mh = _get(hz, "DOLPHIN_META_HEALTH", "latest")
|
||||
acb = _get(hz, "DOLPHIN_FEATURES", "acb_boost")
|
||||
exf = _get(hz, "DOLPHIN_FEATURES", "exf_latest")
|
||||
obf = _get(hz, "DOLPHIN_FEATURES", "obf_universe_latest")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
capital = float(eng.get("capital", 0) or cap.get("capital", 0))
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
rm = float(safe.get("Rm", 0))
|
||||
hb_ts = hb.get("ts")
|
||||
phase = hb.get("phase", "?")
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trades = eng.get("trades_executed", "—")
|
||||
scans = eng.get("scans_processed", "—")
|
||||
lev = float(eng.get("current_leverage", 0))
|
||||
notional= float(eng.get("open_notional", 0))
|
||||
mhs_st = mh.get("status", "?")
|
||||
rm_meta = float(mh.get("rm_meta", 0))
|
||||
|
||||
if capital > 0:
|
||||
if START_CAP is None: START_CAP = capital
|
||||
if CAP_PEAK is None or capital > CAP_PEAK: CAP_PEAK = capital
|
||||
|
||||
roi = ((capital - START_CAP) / START_CAP * 100) if START_CAP and capital else 0
|
||||
dd = ((CAP_PEAK - capital) / CAP_PEAK * 100) if CAP_PEAK and capital < CAP_PEAK else 0
|
||||
|
||||
pc = PC.get(posture, DIM)
|
||||
sc = SC.get(mhs_st, DIM)
|
||||
tc = GREEN if trader_up else RED
|
||||
roi_c = GREEN if roi >= 0 else RED
|
||||
dd_c = RED if dd > 15 else (YELLOW if dd > 5 else GREEN)
|
||||
|
||||
sig_row, trade_row, fill_row_str = gear_rows(eng, safe, acb, exf, hb, obf)
|
||||
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks = mh.get("hz_key_status", {})
|
||||
|
||||
L = []
|
||||
L.append(f"{BOLD}{CYAN}🐬 DOLPHIN-NAUTILUS{RST} {DIM}{now}{RST}")
|
||||
L.append("─" * 60)
|
||||
|
||||
# TRADER
|
||||
L.append(f"{BOLD}TRADER{RST} {tc}{'● LIVE' if trader_up else '● DOWN'}{RST}"
|
||||
f" phase:{phase} hb:{_age(hb_ts)}"
|
||||
f" scan:#{eng.get('last_scan_number','?')}")
|
||||
|
||||
# ── SIGNAL → FILL GEARS ───────────────────────────────────────────────────
|
||||
vol_ok_live = bool(eng.get("vol_ok", False))
|
||||
if not vol_ok_live:
|
||||
L.append(f" {RED}{BOLD}⛔ VOL_OK=FALSE — engine gate closed, NO trades until BTC vol > {VOL_P60:.6f}{RST}")
|
||||
L.append(f" {DIM}SIG │{RST} {sig_row}")
|
||||
L.append(f" {DIM}TRD │{RST} {trade_row}")
|
||||
L.append(f" {DIM}FIL │{RST} {fill_row_str}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# CAPITAL
|
||||
L.append(f"{BOLD}CAPITAL{RST} {CYAN}${capital:,.2f}{RST}"
|
||||
+ (f" ROI:{roi_c}{roi:+.2f}%{RST} DD:{dd_c}{dd:.2f}%{RST}"
|
||||
f" start:${START_CAP:,.0f}" if START_CAP else ""))
|
||||
L.append(f" trades:{trades} scans:{scans} bar:{eng.get('bar_idx','?')}"
|
||||
f" lev:{lev:.2f}x notional:${notional:,.0f}")
|
||||
|
||||
# Open positions
|
||||
positions = eng.get("open_positions") or []
|
||||
if positions:
|
||||
L.append(f" {BOLD}OPEN:{RST}")
|
||||
for p in positions:
|
||||
sc2 = GREEN if p.get("side") == "LONG" else RED
|
||||
L.append(f" {sc2}{p.get('asset','?')} {p.get('side','?')}{RST}"
|
||||
f" qty:{p.get('quantity',0):.4f}"
|
||||
f" entry:{p.get('entry_price',0):.2f}"
|
||||
f" upnl:{p.get('unrealized_pnl',0):+.2f}")
|
||||
else:
|
||||
L.append(f" {DIM}no open positions{RST}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# POSTURE
|
||||
bd = safe.get("breakdown") or {}
|
||||
L.append(f"{BOLD}POSTURE{RST} {pc}{posture}{RST} Rm:{pc}{_bar(rm,20)}{RST} {rm:.4f}")
|
||||
cats = " ".join(f"C{i}:{float(bd.get(f'Cat{i}',0)):.2f}" for i in range(1,6))
|
||||
L.append(f" {cats} f_env:{float(bd.get('f_env',0)):.3f} f_exe:{float(bd.get('f_exe',0)):.3f}")
|
||||
|
||||
L.append("")
|
||||
|
||||
# SYS HEALTH
|
||||
L.append(f"{BOLD}SYS HEALTH{RST} {sc}{mhs_st}{RST} rm_meta:{rm_meta:.3f}")
|
||||
for m in ("m1_data_infra","m1_trader","m2_heartbeat",
|
||||
"m3_data_freshness","m4_control_plane","m5_coherence"):
|
||||
v = float(mh.get(m, 0))
|
||||
c = GREEN if v >= 0.9 else (YELLOW if v >= 0.5 else RED)
|
||||
L.append(f" {c}{m}:{v:.3f}{RST}")
|
||||
|
||||
L.append(f" {DIM}services:{RST} "
|
||||
+ " ".join(
|
||||
f"{'●' if st=='RUNNING' else f'{RED}●{RST}'}{DIM}{n.split(':')[-1]}{RST}"
|
||||
if st == "RUNNING" else
|
||||
f"{RED}●{DIM}{n.split(':')[-1]}{RST}"
|
||||
for n, st in sorted(svc.items())))
|
||||
|
||||
L.append(f" {DIM}hz_keys:{RST} "
|
||||
+ " ".join(
|
||||
f"{GREEN if float(i.get('score',0))>=0.9 else (YELLOW if float(i.get('score',0))>=0.5 else RED)}●{RST}{DIM}{k}{RST}"
|
||||
for k, i in sorted(hz_ks.items())))
|
||||
|
||||
# ── LAST 5 TRADES ──────────────────────────────────────────────────────────
|
||||
trades_hist = _last_n_trades(5)
|
||||
if trades_hist:
|
||||
L.append("")
|
||||
L.append(f"{BOLD}LAST TRADES{RST} {DIM}(from log){RST}")
|
||||
for t in trades_hist:
|
||||
pnl = float(t.get("net_pnl", 0))
|
||||
pct = float(t.get("pnl_pct", 0)) * 100
|
||||
lev = float(t.get("leverage", 0))
|
||||
ep = float(t.get("entry_price", 0))
|
||||
reason = t.get("reason", "?")
|
||||
asset = t.get("asset", "?")
|
||||
bars = t.get("bars_held", 0)
|
||||
ts_raw = t.get("ts", "")[:16].replace("T", " ")
|
||||
pc2 = GREEN if pnl >= 0 else RED
|
||||
L.append(
|
||||
f" {pc2}{'▲' if pnl>=0 else '▼'}{RST}"
|
||||
f" {asset:<12} "
|
||||
f"ep:{ep:.4g} "
|
||||
f"lev:{lev:.2f}x "
|
||||
f"pnl:{pc2}{pnl:+.2f}({pct:+.2f}%){RST} "
|
||||
f"exit:{reason} bars:{bars} {DIM}{ts_raw}{RST}"
|
||||
)
|
||||
else:
|
||||
L.append(f" {DIM}no completed trades in log yet{RST}")
|
||||
|
||||
L.append("")
|
||||
L.append(f"{DIM}v5 • 0.5s poll • CH→status_snapshots • Ctrl-C quit{RST}")
|
||||
|
||||
# ── CH persistence ─────────────────────────────────────────────────────────
|
||||
# Write every other cycle (1s effective rate) to avoid CH noise
|
||||
if int(time.time() * 2) % 2 == 0:
|
||||
ch_put({
|
||||
"ts": int(time.time() * 1000),
|
||||
"capital": capital,
|
||||
"roi_pct": round(roi, 4),
|
||||
"dd_pct": round(dd, 4),
|
||||
"trades_executed": int(eng.get("trades_executed", 0) or 0),
|
||||
"posture": posture,
|
||||
"rm": round(rm, 6),
|
||||
"vel_div": round(float(eng.get("last_vel_div", 0) or 0), 6),
|
||||
"vol_ok": 1 if eng.get("vol_ok") else 0,
|
||||
"phase": phase,
|
||||
"mhs_status": mhs_st,
|
||||
"boost": round(float(acb.get("boost", 1.0) if acb else 1.0), 4),
|
||||
"cat5": round(float((safe.get("breakdown") or {}).get("Cat5", 1.0) or 1.0), 6),
|
||||
})
|
||||
|
||||
return "\n".join(L)
|
||||
|
||||
|
||||
def main():
|
||||
print("Connecting to HZ...")
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
print("Connected.\n")
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
sys.stdout.write(CLEAR + render(hz) + "\n")
|
||||
sys.stdout.flush()
|
||||
except Exception as e:
|
||||
sys.stdout.write(f"\n{RED}render error: {e}{RST}\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(0.5)
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n{DIM}Bye.{RST}")
|
||||
finally:
|
||||
hz.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
478
Observability/esof_advisor.py
Executable file
478
Observability/esof_advisor.py
Executable file
@@ -0,0 +1,478 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN EsoF Advisory — v2.0 (2026-04-19)
|
||||
==========================================
|
||||
Advisory-only (NOT wired into BLUE engine).
|
||||
|
||||
Computes esoteric/calendar/session factors every 15s and:
|
||||
- Writes to HZ DOLPHIN_FEATURES['esof_advisor_latest']
|
||||
- Writes to CH dolphin.esof_advisory (fire-and-forget)
|
||||
- Stdout: live display (run standalone or import get_advisory())
|
||||
|
||||
Expectancy tables derived from 637 live trades (2026-03-31 → 2026-04-19).
|
||||
Update these tables periodically as more data accumulates.
|
||||
|
||||
Weighted hours: uses MarketIndicators.get_weighted_times() from
|
||||
external_factors/esoteric_factors_service.py (requires astropy).
|
||||
Falls back to UTC-based approximation if astropy not available.
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python esof_advisor.py
|
||||
"""
|
||||
import json
|
||||
import math
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import urllib.request
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# ── MarketIndicators integration (real weighted hours) ────────────────────────
|
||||
_EF_PATH = Path(__file__).parent.parent / "external_factors"
|
||||
if str(_EF_PATH) not in sys.path:
|
||||
sys.path.insert(0, str(_EF_PATH))
|
||||
|
||||
try:
|
||||
from esoteric_factors_service import MarketIndicators as _MI
|
||||
_market_indicators = _MI()
|
||||
_WEIGHTED_HOURS_AVAILABLE = True
|
||||
except Exception:
|
||||
_market_indicators = None
|
||||
_WEIGHTED_HOURS_AVAILABLE = False
|
||||
|
||||
def _get_weighted_hours(now: datetime):
|
||||
"""Returns (pop_hour, liq_hour). Falls back to UTC approximation."""
|
||||
if _WEIGHTED_HOURS_AVAILABLE:
|
||||
return _market_indicators.get_weighted_times(now)
|
||||
# Fallback: pop≈UTC+4.2, liq≈UTC+1.0 (empirical offsets)
|
||||
h = now.hour + now.minute / 60.0
|
||||
return ((h + 4.21) % 24), ((h + 0.98) % 24)
|
||||
|
||||
# ── Expectancy tables (from CH analysis, 637 trades) ──────────────────────────
|
||||
# Key: liq_hour 3h bucket start → (trades, wr_pct, net_pnl, avg_pnl)
|
||||
# liq_hour ≈ UTC + 0.98h (liquidity-weighted centroid: Americas 35%, EMEA 30%,
|
||||
# East_Asia 20%, Oceania_SEA 10%, South_Asia 5%)
|
||||
LIQ_HOUR_STATS = {
|
||||
0: ( 70, 51.4, +1466, +20.9), # liq 0-3h ≈ UTC 23-2h (Asia open)
|
||||
3: ( 73, 46.6, -1166, -16.0), # liq 3-6h ≈ UTC 2-5h (deep Asia)
|
||||
6: ( 62, 41.9, +1026, +16.5), # liq 6-9h ≈ UTC 5-8h (Asia/EMEA handoff)
|
||||
9: ( 65, 43.1, +476, +7.3), # liq 9-12h ≈ UTC 8-11h (EMEA morning)
|
||||
12: ( 84, 52.4, +3532, +42.0), # liq 12-15h ≈ UTC 11-14h (EMEA pm + US open) ★ BEST
|
||||
15: (113, 43.4, -770, -6.8), # liq 15-18h ≈ UTC 14-17h (US morning)
|
||||
18: ( 99, 35.4, -2846, -28.8), # liq 18-21h ≈ UTC 17-20h (US afternoon) ✗ WORST
|
||||
21: ( 72, 36.1, -1545, -21.5), # liq 21-24h ≈ UTC 20-23h (US close/late)
|
||||
}
|
||||
|
||||
# Key: session name → (trades, wr_pct, net_pnl, avg_pnl)
|
||||
SESSION_STATS = {
|
||||
"LONDON_MORNING": (111, 47.7, +4132.94, +37.23),
|
||||
"ASIA_PACIFIC": (182, 46.7, +1600.04, +8.79),
|
||||
"LOW_LIQUIDITY": ( 71, 39.4, -809.19, -11.40),
|
||||
"LN_NY_OVERLAP": (147, 45.6, -894.86, -6.09),
|
||||
"NY_AFTERNOON": (127, 35.4, -3857.09, -30.37),
|
||||
}
|
||||
|
||||
# Key: dow (0=Mon) → (trades, wr_pct, net_pnl, avg_pnl)
|
||||
DOW_STATS = {
|
||||
0: (81, 27.2, -1053.91, -13.01), # Mon — worst
|
||||
1: (77, 54.5, +3823.81, +49.66), # Tue — best
|
||||
2: (98, 43.9, -385.08, -3.93), # Wed
|
||||
3: (115, 44.3, -4017.06, -34.93), # Thu — 2nd worst
|
||||
4: (106, 39.6, -1968.41, -18.57), # Fri
|
||||
5: (82, 43.9, +43.37, +0.53), # Sat
|
||||
6: (78, 53.8, +3729.73, +47.82), # Sun — 2nd best
|
||||
}
|
||||
DOW_NAMES = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
||||
|
||||
# DoW × Session notable extremes (subset — key cells only)
|
||||
# Format: (dow, session) → (trades, wr_pct, net_pnl)
|
||||
DOW_SESSION_STATS = {
|
||||
(6, "LONDON_MORNING"): (13, 85.0, +2153), # Sun LDN — best cell
|
||||
(6, "LN_NY_OVERLAP"): (24, 75.0, +2110), # Sun OVLP — 2nd best
|
||||
(1, "ASIA_PACIFIC"): (27, 67.0, +2522), # Tue ASIA — 3rd
|
||||
(1, "LN_NY_OVERLAP"): (18, 56.0, +2260), # Tue OVLP — 4th
|
||||
(6, "NY_AFTERNOON"): (17, 6.0, -1025), # Sun NY — worst cell
|
||||
(0, "ASIA_PACIFIC"): (21, 19.0, -411), # Mon ASIA — bad
|
||||
(3, "LN_NY_OVERLAP"): (27, 41.0, -3310), # Thu OVLP — catastrophic
|
||||
}
|
||||
|
||||
# 15m slot stats: slot_key → (trades, wr_pct, net_pnl, avg_pnl)
|
||||
# Only slots with n >= 5 included
|
||||
SLOT_STATS = {
|
||||
"0:00": (7, 57.1, +32.52, +4.65),
|
||||
"0:15": (5, 80.0,+103.19, +20.64),
|
||||
"0:30": (6, 33.3,-203.12, -33.85),
|
||||
"1:00": (7, 42.9,-271.32, -38.76),
|
||||
"1:30": (10, 50.0,+1606.66,+160.67),
|
||||
"1:45": (5, 80.0, +458.74, +91.75),
|
||||
"2:00": (8, 62.5,-214.45, -26.81),
|
||||
"2:15": (5, 0.0, -851.56,-170.31),
|
||||
"2:30": (7, 57.1,-157.04, -22.43),
|
||||
"2:45": (7, 57.1, +83.24, +11.89),
|
||||
"3:00": (8, 50.0, +65.0, +8.13),
|
||||
"3:30": (7, 14.3,-230.05, -32.86),
|
||||
"4:00": (8, 37.5, +38.73, +4.84),
|
||||
"4:15": (5, 60.0, +525.75,+105.15),
|
||||
"4:30": (6, 50.0, +221.14, +36.86),
|
||||
"4:45": (7, 28.6,-777.03,-111.00),
|
||||
"5:00": (5, 40.0,-120.47, -24.09),
|
||||
"5:15": (4, 50.0, +559.32,+139.83),
|
||||
"5:30": (5, 40.0, +345.88, +69.18),
|
||||
"5:45": (5, 40.0,-1665.24,-333.05),
|
||||
"6:00": (5, 80.0, +635.74,+127.15),
|
||||
"6:30": (5, 60.0,-191.66, -38.33),
|
||||
"6:45": (8, 37.5, +325.97, +40.75),
|
||||
"7:15": (7, 42.9, +763.60,+109.09),
|
||||
"7:30": (5, 20.0,-162.27, -32.45),
|
||||
"7:45": (6, 66.7, -18.42, -3.07),
|
||||
"8:00": (5, 40.0, +10.23, +2.05),
|
||||
"8:15": (5, 20.0, -31.26, -6.25),
|
||||
"8:30": (5, 40.0, -69.76, -13.95),
|
||||
"8:45": (6, 50.0, +302.53, +50.42),
|
||||
"9:00": (5, 60.0, -62.44, -12.49),
|
||||
"9:15": (6, 66.7, +81.85, +13.64),
|
||||
"9:30": (5, 20.0, -23.36, -4.67),
|
||||
"9:45": (7, 42.9, -8.20, -1.17),
|
||||
"10:15": (8, 62.5, +542.20, +67.77),
|
||||
"10:30": (5, 80.0, +37.19, +7.44),
|
||||
"10:45": (6, 0.0,-223.62, -37.27),
|
||||
"11:00": (9, 44.4, +737.87, +81.99),
|
||||
"11:30": (8, 87.5,+1074.52,+134.32),
|
||||
"11:45": (5, 60.0, +558.01,+111.60),
|
||||
"12:00": (5, 60.0, +660.08,+132.02),
|
||||
"12:15": (6, 66.7, +705.15,+117.53),
|
||||
"12:30": (6, 33.3, +513.91, +85.65),
|
||||
"12:45": (6, 16.7,-1178.07,-196.35),
|
||||
"13:00": (7, 14.3, -878.41,-125.49),
|
||||
"13:15": (10, 60.0, +419.31, +41.93),
|
||||
"13:30": (9, 44.4, -699.33, -77.70),
|
||||
"13:45": (10, 70.0,+1082.10,+108.21),
|
||||
"14:00": (7, 42.9,-388.03, -55.43),
|
||||
"14:15": (9, 55.6, +215.29, +23.92),
|
||||
"14:30": (7, 28.6, +413.16, +59.02),
|
||||
"14:45": (11, 27.3, -65.79, -5.98),
|
||||
"15:00": (10, 70.0,+2265.83,+226.58),
|
||||
"15:15": (9, 55.6,-1225.87,-136.21),
|
||||
"15:30": (11, 63.6, -65.03, -5.91),
|
||||
"15:45": (10, 30.0, +81.01, +8.10),
|
||||
"16:00": (5, 60.0, +691.34,+138.27),
|
||||
"16:15": (9, 22.2, -78.42, -8.71),
|
||||
"16:30": (4, 25.0,-2024.04,-506.01),
|
||||
"16:45": (19, 42.1,-637.98, -33.58),
|
||||
"17:00": (13, 38.5, +410.17, +31.55),
|
||||
"17:15": (15, 46.7,-439.31, -29.29),
|
||||
"17:30": (10, 60.0,-157.24, -15.72),
|
||||
"18:00": (6, 16.7,-1595.60,-265.93),
|
||||
"18:15": (17, 17.6, +60.98, +3.59),
|
||||
"18:30": (9, 22.2,-317.64, -35.29),
|
||||
"19:00": (8, 50.0,-157.93, -19.74),
|
||||
"19:15": (5, 60.0, -95.94, -19.19),
|
||||
"19:45": (7, 28.6,-392.53, -56.08),
|
||||
"20:00": (5, 60.0, +409.41, +81.88),
|
||||
"20:15": (8, 12.5,-1116.49,-139.56),
|
||||
"20:45": (9, 44.4,-173.96, -19.33),
|
||||
"21:15": (8, 50.0,-653.67, -81.71),
|
||||
"21:30": (6, 33.3, +338.33, +56.39),
|
||||
"22:00": (8, 25.0,-360.17, -45.02),
|
||||
"22:15": (5, 60.0, +73.44, +14.69),
|
||||
"22:30": (7, 28.6,-248.96, -35.57),
|
||||
"23:00": (8, 62.5, +476.83, +59.60),
|
||||
"23:15": (7, 71.4, +82.51, +11.79),
|
||||
"23:30": (7, 42.9, -69.24, -9.89),
|
||||
}
|
||||
|
||||
# Baseline: overall WR 43.7% — score is deviation from baseline
|
||||
BASELINE_WR = 43.7
|
||||
|
||||
# ── Session classification ─────────────────────────────────────────────────────
|
||||
def get_session(hour_utc: float) -> str:
|
||||
if hour_utc < 8: return "ASIA_PACIFIC"
|
||||
elif hour_utc < 13: return "LONDON_MORNING"
|
||||
elif hour_utc < 17: return "LN_NY_OVERLAP"
|
||||
elif hour_utc < 21: return "NY_AFTERNOON"
|
||||
else: return "LOW_LIQUIDITY"
|
||||
|
||||
# ── EsoF computation ───────────────────────────────────────────────────────────
|
||||
def compute_esof(now: datetime = None) -> dict:
|
||||
"""Compute all EsoF advisory signals for a given UTC datetime."""
|
||||
if now is None:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
dow = now.weekday() # 0=Mon
|
||||
hour_utc = now.hour + now.minute / 60.0
|
||||
min_bucket = (now.minute // 15) * 15
|
||||
slot_key = f"{now.hour}:{min_bucket:02d}"
|
||||
session = get_session(hour_utc)
|
||||
|
||||
# ── Weighted hours (real computation via MarketIndicators) ─────────────────
|
||||
pop_hour, liq_hour = _get_weighted_hours(now)
|
||||
liq_bkt = int(liq_hour // 3) * 3
|
||||
|
||||
# ── Session expectancy ─────────────────────────────────────────────────────
|
||||
sess_data = SESSION_STATS.get(session, (0, BASELINE_WR, 0, 0))
|
||||
sess_wr, sess_net = sess_data[1], sess_data[2]
|
||||
|
||||
# ── Liq_hour expectancy (more granular than session) ───────────────────────
|
||||
liq_data = LIQ_HOUR_STATS.get(liq_bkt, (0, BASELINE_WR, 0, 0))
|
||||
liq_wr, liq_net = liq_data[1], liq_data[2]
|
||||
|
||||
# ── DoW expectancy ─────────────────────────────────────────────────────────
|
||||
dow_data = DOW_STATS.get(dow, (0, BASELINE_WR, 0, 0))
|
||||
dow_wr, dow_net = dow_data[1], dow_data[2]
|
||||
|
||||
# ── Slot expectancy ────────────────────────────────────────────────────────
|
||||
slot_data = SLOT_STATS.get(slot_key, None)
|
||||
if slot_data:
|
||||
slot_wr, slot_net, slot_avg = slot_data[1], slot_data[2], slot_data[3]
|
||||
else:
|
||||
slot_wr, slot_net, slot_avg = BASELINE_WR, 0.0, 0.0
|
||||
|
||||
# ── DoW × Session notable cell ─────────────────────────────────────────────
|
||||
cell_data = DOW_SESSION_STATS.get((dow, session), None)
|
||||
cell_bonus = 0.0
|
||||
if cell_data:
|
||||
cell_trades, cell_wr, cell_net = cell_data
|
||||
# bonus/penalty proportional to deviation from baseline
|
||||
cell_bonus = (cell_wr - BASELINE_WR) / 100.0 * 0.3 # ±0.3 max contribution
|
||||
|
||||
# ── Fibonacci time ─────────────────────────────────────────────────────────
|
||||
mins_passed = now.hour * 60 + now.minute
|
||||
fib_seq = [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1440]
|
||||
closest_fib = min(fib_seq, key=lambda x: abs(x - mins_passed))
|
||||
fib_dist = abs(mins_passed - closest_fib)
|
||||
fib_strength = 1.0 - min(fib_dist / 30.0, 1.0)
|
||||
|
||||
# ── Market cycle position (BTC halving: Apr 19 2024) ──────────────────────
|
||||
halving = datetime(2024, 4, 19, tzinfo=timezone.utc)
|
||||
days_since = (now - halving).days
|
||||
cycle_pos = (days_since % 1461) / 1461.0 # 4yr cycle
|
||||
|
||||
# ── Moon (simple approximation without astropy dependency) ────────────────
|
||||
# Synodic period 29.53d; reference new moon 2024-01-11
|
||||
ref_new_moon = datetime(2024, 1, 11, tzinfo=timezone.utc)
|
||||
days_since_ref = (now - ref_new_moon).days + (now - ref_new_moon).seconds / 86400
|
||||
moon_age = days_since_ref % 29.53059
|
||||
moon_illumination = (1 - math.cos(2 * math.pi * moon_age / 29.53059)) / 2.0
|
||||
if moon_illumination < 0.03: moon_phase = "NEW_MOON"
|
||||
elif moon_illumination > 0.97: moon_phase = "FULL_MOON"
|
||||
elif moon_age < 14.77:
|
||||
moon_phase = "WAXING_CRESCENT" if moon_illumination < 0.5 else "WAXING_GIBBOUS"
|
||||
else:
|
||||
moon_phase = "WANING_GIBBOUS" if moon_illumination > 0.5 else "WANING_CRESCENT"
|
||||
|
||||
# Mercury retrograde periods (2025-2026 known dates)
|
||||
retro_periods = [
|
||||
(datetime(2025, 3, 15, tzinfo=timezone.utc), datetime(2025, 4, 7, tzinfo=timezone.utc)),
|
||||
(datetime(2025, 7, 18, tzinfo=timezone.utc), datetime(2025, 8, 11, tzinfo=timezone.utc)),
|
||||
(datetime(2025, 11, 10, tzinfo=timezone.utc), datetime(2025, 12, 1, tzinfo=timezone.utc)),
|
||||
(datetime(2026, 3, 7, tzinfo=timezone.utc), datetime(2026, 3, 30, tzinfo=timezone.utc)),
|
||||
(datetime(2026, 6, 29, tzinfo=timezone.utc), datetime(2026, 7, 23, tzinfo=timezone.utc)),
|
||||
]
|
||||
mercury_retrograde = any(s <= now <= e for s, e in retro_periods)
|
||||
|
||||
# ── Composite advisory score ───────────────────────────────────────────────
|
||||
# Normalize each component to [-1, +1] relative to baseline WR=43.7%
|
||||
# Range ≈ ±20 WR points across all factors
|
||||
sess_score = (sess_wr - BASELINE_WR) / 20.0
|
||||
liq_score = (liq_wr - BASELINE_WR) / 20.0
|
||||
dow_score = (dow_wr - BASELINE_WR) / 20.0
|
||||
slot_score = (slot_wr - BASELINE_WR) / 20.0 if slot_data else 0.0
|
||||
|
||||
# Weights: liq_hour 30%, session 25%, dow 30%, slot 10%, cell 5%
|
||||
# liq_hour replaces pure session — it's strictly more granular (continuous)
|
||||
advisory_score = (
|
||||
liq_score * 0.30 +
|
||||
sess_score * 0.25 +
|
||||
dow_score * 0.30 +
|
||||
slot_score * 0.10 +
|
||||
cell_bonus * 0.05
|
||||
)
|
||||
advisory_score = max(-1.0, min(1.0, advisory_score))
|
||||
|
||||
if advisory_score > 0.25: advisory_label = "FAVORABLE"
|
||||
elif advisory_score > 0.05: advisory_label = "MILD_POSITIVE"
|
||||
elif advisory_score > -0.05: advisory_label = "NEUTRAL"
|
||||
elif advisory_score > -0.25: advisory_label = "MILD_NEGATIVE"
|
||||
else: advisory_label = "UNFAVORABLE"
|
||||
|
||||
# Mercury retrograde: small penalty
|
||||
if mercury_retrograde:
|
||||
advisory_score = max(-1.0, advisory_score - 0.05)
|
||||
|
||||
return {
|
||||
"ts": now.isoformat(),
|
||||
"_ts": now.timestamp(),
|
||||
# Calendar
|
||||
"dow": dow,
|
||||
"dow_name": DOW_NAMES[dow],
|
||||
"hour_utc": now.hour,
|
||||
"slot_15m": slot_key,
|
||||
"session": session,
|
||||
# Weighted hours (real MarketIndicators computation)
|
||||
"pop_weighted_hour": round(pop_hour, 2),
|
||||
"liq_weighted_hour": round(liq_hour, 2),
|
||||
"liq_bucket_3h": liq_bkt,
|
||||
# Astro
|
||||
"moon_illumination": round(moon_illumination, 3),
|
||||
"moon_phase": moon_phase,
|
||||
"mercury_retrograde": mercury_retrograde,
|
||||
# Cycle / harmonic
|
||||
"market_cycle_pos": round(cycle_pos, 4),
|
||||
"fib_strength": round(fib_strength, 3),
|
||||
# Expectancy (from live trade history)
|
||||
"liq_wr_pct": round(liq_wr, 1),
|
||||
"liq_net_pnl": round(liq_net, 2),
|
||||
"slot_wr_pct": round(slot_wr, 1),
|
||||
"slot_net_pnl": round(slot_net, 2),
|
||||
"session_wr_pct": round(sess_wr, 1),
|
||||
"session_net_pnl": round(sess_net, 2),
|
||||
"dow_wr_pct": round(dow_wr, 1),
|
||||
"dow_net_pnl": round(dow_net, 2),
|
||||
# Composite
|
||||
"advisory_score": round(advisory_score, 4),
|
||||
"advisory_label": advisory_label,
|
||||
# Meta
|
||||
"_weighted_hours_real": _WEIGHTED_HOURS_AVAILABLE,
|
||||
}
|
||||
|
||||
# ── CH writer (fire-and-forget) ───────────────────────────────────────────────
|
||||
CH_URL = "http://localhost:8123"
|
||||
CH_USER = "dolphin"
|
||||
CH_PASS = "dolphin_ch_2026"
|
||||
|
||||
def _ch_write(row: dict):
|
||||
ch_row = {
|
||||
"ts": row["_ts"] * 1000, # ms for DateTime64(3)
|
||||
"dow": row["dow"],
|
||||
"dow_name": row["dow_name"],
|
||||
"hour_utc": row["hour_utc"],
|
||||
"slot_15m": row["slot_15m"],
|
||||
"session": row["session"],
|
||||
"moon_illumination": row["moon_illumination"],
|
||||
"moon_phase": row["moon_phase"],
|
||||
"mercury_retrograde": int(row["mercury_retrograde"]),
|
||||
"pop_weighted_hour": row.get("pop_weighted_hour", 0.0),
|
||||
"liq_weighted_hour": row.get("liq_weighted_hour", 0.0),
|
||||
"market_cycle_pos": row["market_cycle_pos"],
|
||||
"fib_strength": row["fib_strength"],
|
||||
"slot_wr_pct": row["slot_wr_pct"],
|
||||
"slot_net_pnl": row["slot_net_pnl"],
|
||||
"session_wr_pct": row["session_wr_pct"],
|
||||
"session_net_pnl": row["session_net_pnl"],
|
||||
"dow_wr_pct": row["dow_wr_pct"],
|
||||
"dow_net_pnl": row["dow_net_pnl"],
|
||||
"advisory_score": row["advisory_score"],
|
||||
"advisory_label": row["advisory_label"],
|
||||
}
|
||||
body = json.dumps(ch_row).encode()
|
||||
url = f"{CH_URL}/?database=dolphin&query=INSERT+INTO+esof_advisory+FORMAT+JSONEachRow"
|
||||
req = urllib.request.Request(url, data=body, method="POST")
|
||||
req.add_header("X-ClickHouse-User", CH_USER)
|
||||
req.add_header("X-ClickHouse-Key", CH_PASS)
|
||||
try:
|
||||
urllib.request.urlopen(req, timeout=3)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── HZ writer ─────────────────────────────────────────────────────────────────
|
||||
def _hz_write(data: dict):
|
||||
try:
|
||||
import hazelcast
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=3.0)
|
||||
hz.get_map("DOLPHIN_FEATURES").blocking().put(
|
||||
"esof_advisor_latest", json.dumps(data))
|
||||
hz.shutdown()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Display ───────────────────────────────────────────────────────────────────
|
||||
GREEN = "\033[32m"; RED = "\033[31m"; YELLOW = "\033[33m"
|
||||
CYAN = "\033[36m"; BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
|
||||
LABEL_COLOR = {
|
||||
"FAVORABLE": GREEN,
|
||||
"MILD_POSITIVE":"\033[92m",
|
||||
"NEUTRAL": YELLOW,
|
||||
"MILD_NEGATIVE":"\033[91m",
|
||||
"UNFAVORABLE": RED,
|
||||
}
|
||||
|
||||
def display(d: dict):
|
||||
sc = d["advisory_score"]
|
||||
lbl = d["advisory_label"]
|
||||
col = LABEL_COLOR.get(lbl, RST)
|
||||
bar_len = int(abs(sc) * 20)
|
||||
bar = ("▓" * bar_len).ljust(20)
|
||||
sign = "+" if sc >= 0 else "-"
|
||||
|
||||
print(f"\n{BOLD}{CYAN}🐬 DOLPHIN EsoF Advisory{RST} "
|
||||
f"{DIM}{d['ts'][:19]} UTC{RST}")
|
||||
_lc = GREEN if d['liq_wr_pct'] > BASELINE_WR else RED
|
||||
print(f" {BOLD}Liq hour{RST} : {_lc}liq={d['liq_weighted_hour']:.2f}h (pop={d['pop_weighted_hour']:.2f}h){RST} "
|
||||
f"bkt:{d['liq_bucket_3h']}-{d['liq_bucket_3h']+3}h "
|
||||
f"WR={_lc}{d['liq_wr_pct']:.1f}%{RST} net={d['liq_net_pnl']:+,.0f}"
|
||||
+ (f" {DIM}[real]{RST}" if d.get('_weighted_hours_real') else f" {YELLOW}[approx]{RST}"))
|
||||
print(f" {BOLD}Session{RST} : {col}{d['session']:<22}{RST} "
|
||||
f"WR={col}{d['session_wr_pct']:.1f}%{RST} net={d['session_net_pnl']:+,.0f}")
|
||||
print(f" {BOLD}DoW{RST} : {col}{d['dow_name']:<22}{RST} "
|
||||
f"WR={col}{d['dow_wr_pct']:.1f}%{RST} net={d['dow_net_pnl']:+,.0f}")
|
||||
print(f" {BOLD}Slot 15m{RST} : {d['slot_15m']:<22} "
|
||||
f"WR={d['slot_wr_pct']:.1f}% net={d['slot_net_pnl']:+,.0f}")
|
||||
print(f" {BOLD}Moon{RST} : {d['moon_phase']:<18} {d['moon_illumination']*100:.0f}% illum")
|
||||
print(f" {BOLD}Mercury{RST} : {'⚠ RETROGRADE' if d['mercury_retrograde'] else 'direct'}")
|
||||
print(f" {BOLD}Fib{RST} : strength {d['fib_strength']:.2f} "
|
||||
f"cycle_pos {d['market_cycle_pos']:.4f}")
|
||||
print(f" {BOLD}Advisory{RST} : {col}{bar}{RST} "
|
||||
f"{col}{sign}{abs(sc):.3f} {BOLD}{lbl}{RST}")
|
||||
print()
|
||||
|
||||
# ── Daemon ────────────────────────────────────────────────────────────────────
|
||||
def run_daemon(interval_s: float = 15.0, write_hz: bool = True,
|
||||
write_ch: bool = True, verbose: bool = True):
|
||||
"""Loop: compute → HZ → CH → display every interval_s."""
|
||||
print(f"{BOLD}EsoF Advisory daemon started (interval={interval_s}s){RST}\n"
|
||||
f" HZ={write_hz} CH={write_ch} display={verbose}\n"
|
||||
f" Advisory-only — NOT wired into BLUE engine\n")
|
||||
|
||||
last_ch_write = 0.0 # write CH every 5 min, not every 15s
|
||||
|
||||
while True:
|
||||
try:
|
||||
d = compute_esof()
|
||||
if verbose:
|
||||
display(d)
|
||||
if write_hz:
|
||||
threading.Thread(target=_hz_write, args=(d,), daemon=True).start()
|
||||
if write_ch and time.time() - last_ch_write > 300:
|
||||
threading.Thread(target=_ch_write, args=(d,), daemon=True).start()
|
||||
last_ch_write = time.time()
|
||||
except Exception as e:
|
||||
print(f"[EsoF] error: {e}")
|
||||
time.sleep(interval_s)
|
||||
|
||||
# ── Public API for dolphin_status.py import ───────────────────────────────────
|
||||
def get_advisory(now: datetime = None) -> dict:
|
||||
"""Single-shot advisory computation. Import this into dolphin_status.py."""
|
||||
return compute_esof(now)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("--once", action="store_true", help="Compute once and exit")
|
||||
p.add_argument("--interval", type=float, default=15.0)
|
||||
p.add_argument("--no-hz", action="store_true")
|
||||
p.add_argument("--no-ch", action="store_true")
|
||||
args = p.parse_args()
|
||||
|
||||
if args.once:
|
||||
d = compute_esof()
|
||||
display(d)
|
||||
sys.exit(0)
|
||||
|
||||
run_daemon(
|
||||
interval_s=args.interval,
|
||||
write_hz=not args.no_hz,
|
||||
write_ch=not args.no_ch,
|
||||
)
|
||||
299
Observability/esof_gate.py
Executable file
299
Observability/esof_gate.py
Executable file
@@ -0,0 +1,299 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN EsoF Gate — Advisory-only (NOT wired into BLUE).
|
||||
|
||||
Six gating / modulation strategies derived from EsoF advisory data.
|
||||
All functions are pure (no side effects, no I/O, no HZ/CH deps).
|
||||
Import-safe; designed to be wired into nautilus_event_trader.py when ready.
|
||||
|
||||
Strategies
|
||||
──────────
|
||||
A LEV_SCALE Soft leverage reduction on negative advisory score
|
||||
B HARD_BLOCK Block entries on UNFAVORABLE + worst-session combo / Monday
|
||||
C DOW_BLOCK Block Monday only (WR=27.2%, n=81, high confidence)
|
||||
D SESSION_BLOCK Block NY_AFTERNOON only (WR=35.4%, n=127, high confidence)
|
||||
E COMBINED C + D (Monday OR NY_AFTERNOON)
|
||||
F S6_BUCKET EsoF-modulated S6 bucket sizing multipliers (main research target)
|
||||
S6_IRP EsoF-modulated IRP filter thresholds (needs full backtest to evaluate)
|
||||
|
||||
S6 Bucket Multiplier Tables
|
||||
───────────────────────────
|
||||
Source: prod/docs/CRITICAL_ASSET_PICKING_BRACKETS_VS._ROI_WR_AT_TRADES.md
|
||||
Base ("NEUTRAL") = Scenario S6 from that doc:
|
||||
B3 2.0×, B6 1.5×, B5 0.5×, B0 0.4×, B1 0.3×, B4 0×, B2 0×
|
||||
|
||||
EsoF modulates these: favorable → widen selection (higher mult on weak buckets,
|
||||
allow B4 back at reduced sizing); unfavorable → concentrate (S2-like, B3+B6 only).
|
||||
|
||||
Theory: In high-WR periods (FAVORABLE), even weaker buckets (B0/B1/B5) contribute
|
||||
gross alpha. In low-WR periods (UNFAVORABLE), concentrate on the only reliably
|
||||
profitable buckets (B3, B6) and minimise drag from the rest.
|
||||
|
||||
IRP ARS Constitutive Coefficients (S6_IRP, for reference)
|
||||
──────────────────────────────────────────────────────────
|
||||
ARS = 0.5×log1p(efficiency) + 0.35×alignment − 0.15×noise×1000
|
||||
Filter thresholds (gold spec): ALIGNMENT_MIN=0.20, NOISE_MAX=500, LATENCY_MAX=20
|
||||
Source: nautilus_dolphin/nautilus/alpha_asset_selector.py
|
||||
|
||||
EsoF modulates the thresholds: favorable → relax (more assets qualify);
|
||||
unfavorable → tighten (only highest-quality assets pass).
|
||||
This strategy CANNOT be evaluated against existing CH trades — it changes WHICH
|
||||
asset is selected, requiring a full IRP replay on historical klines.
|
||||
|
||||
Online Calibration Protocol (no feedback loop)
|
||||
──────────────────────────────────────────────
|
||||
ALWAYS calibrate EsoF tables from ungated BLUE trades only.
|
||||
NEVER update EsoF expectancy tables using trades that were gated by EsoF.
|
||||
Running gated trades through the calibration loop creates a positive/negative
|
||||
feedback that tightens advisory bands until they lose real-world validity.
|
||||
The baseline BLUE system (no gate) must always run in shadow to accumulate
|
||||
out-of-sample calibration data.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Optional
|
||||
|
||||
|
||||
# ── Known bucket assignments (from bucket_assignments.pkl / ASSET_BUCKETS.md) ─
|
||||
# Runtime: prefer loading from pkl; this map is the authoritative fallback.
|
||||
BUCKET_MAP: Dict[str, int] = {
|
||||
# B2 — Macro Anchors
|
||||
"BTCUSDT": 2, "ETHUSDT": 2,
|
||||
# B4 — Blue-Chip Alts (WORST bucket — net-negative even gross)
|
||||
"LTCUSDT": 4, "BNBUSDT": 4, "NEOUSDT": 4, "ETCUSDT": 4, "LINKUSDT": 4,
|
||||
# B0 — Mid-Vol Established Alts (fee-drag losers, gross-positive)
|
||||
"ONGUSDT": 0, "WANUSDT": 0, "ONTUSDT": 0, "MTLUSDT": 0, "BANDUSDT": 0,
|
||||
"TFUELUSDT": 0, "ICXUSDT": 0, "QTUMUSDT": 0, "RVNUSDT": 0, "XTZUSDT": 0,
|
||||
"VETUSDT": 0, "COSUSDT": 0, "HOTUSDT": 0, "STXUSDT": 0,
|
||||
# B5 — Low-BTC-Relevance Alts (gross-positive, large fee victim)
|
||||
"TRXUSDT": 5, "IOSTUSDT": 5, "CVCUSDT": 5, "BATUSDT": 5, "ATOMUSDT": 5,
|
||||
"ANKRUSDT": 5, "IOTAUSDT": 5, "CHZUSDT": 5, "ALGOUSDT": 5, "DUSKUSDT": 5,
|
||||
# B3 — High-Vol Alts (STAR bucket — only structurally profitable)
|
||||
"WINUSDT": 3, "ADAUSDT": 3, "ENJUSDT": 3, "ZILUSDT": 3, "DOGEUSDT": 3,
|
||||
"DENTUSDT": 3, "THETAUSDT": 3, "ONEUSDT": 3,
|
||||
# B1 — Extreme Low-Corr (marginal, fee-drag)
|
||||
"DASHUSDT": 1, "XRPUSDT": 1, "XLMUSDT": 1, "CELRUSDT": 1, "ZECUSDT": 1,
|
||||
"HBARUSDT": 1, "FUNUSDT": 1,
|
||||
# B6 — Extreme Vol Mid-Corr (good, small sample)
|
||||
"ZRXUSDT": 6, "FETUSDT": 6,
|
||||
}
|
||||
|
||||
|
||||
def get_bucket(asset: str, pkl_assignments: Optional[Dict[str, int]] = None) -> int:
|
||||
"""Resolve bucket_id for asset. Prefers pkl_assignments over built-in map."""
|
||||
if pkl_assignments and asset in pkl_assignments:
|
||||
return pkl_assignments[asset]
|
||||
return BUCKET_MAP.get(asset, 0) # B0 fallback for unknown assets
|
||||
|
||||
|
||||
# ── S6 bucket multiplier tables keyed by advisory_label ───────────────────────
|
||||
#
|
||||
# Base (NEUTRAL) = Scenario S6 from CRITICAL_ASSET_PICKING doc:
|
||||
# B3 2.0× B6 1.5× B5 0.5× B0 0.4× B1 0.3× B4 0× B2 0×
|
||||
#
|
||||
# FAVORABLE / MILD_POSITIVE → wider selection: more assets qualify,
|
||||
# even B4 re-admitted at very low sizing (0.2×) because in high-WR periods
|
||||
# even B4's 34.8% WR is partially redeemed by signal quality uplift.
|
||||
#
|
||||
# MILD_NEGATIVE / UNFAVORABLE → concentrate: pull back to S2-like config
|
||||
# (B3+B6 only) to minimise drag during periods where signal quality degrades.
|
||||
|
||||
S6_MULT: Dict[str, Dict[int, float]] = {
|
||||
# B0 B1 B2 B3 B4 B5 B6
|
||||
"FAVORABLE": {0: 0.65, 1: 0.50, 2: 0.0, 3: 2.0, 4: 0.20, 5: 0.75, 6: 1.5},
|
||||
"MILD_POSITIVE": {0: 0.50, 1: 0.35, 2: 0.0, 3: 2.0, 4: 0.10, 5: 0.60, 6: 1.5},
|
||||
"NEUTRAL": {0: 0.40, 1: 0.30, 2: 0.0, 3: 2.0, 4: 0.0, 5: 0.50, 6: 1.5},
|
||||
"MILD_NEGATIVE": {0: 0.20, 1: 0.20, 2: 0.0, 3: 1.5, 4: 0.0, 5: 0.30, 6: 1.2},
|
||||
"UNFAVORABLE": {0: 0.0, 1: 0.0, 2: 0.0, 3: 1.5, 4: 0.0, 5: 0.0, 6: 1.2},
|
||||
}
|
||||
|
||||
# Base S6 (NEUTRAL row above) — exposed for quick reference
|
||||
S6_BASE: Dict[int, float] = S6_MULT["NEUTRAL"]
|
||||
|
||||
|
||||
# ── IRP filter threshold tables keyed by advisory_label (Strategy S6_IRP) ─────
|
||||
# Gold spec (NEUTRAL): ALIGNMENT_MIN=0.20, NOISE_MAX=500, LATENCY_MAX=20
|
||||
# Widening during FAVORABLE: more assets pass IRP → wider selection surface
|
||||
# Tightening during UNFAVORABLE: only highest-quality assets enter
|
||||
|
||||
IRP_PARAMS: Dict[str, Dict[str, float]] = {
|
||||
"FAVORABLE": {"alignment_min": 0.15, "noise_max": 640.0, "latency_max": 24},
|
||||
"MILD_POSITIVE": {"alignment_min": 0.17, "noise_max": 560.0, "latency_max": 22},
|
||||
"NEUTRAL": {"alignment_min": 0.20, "noise_max": 500.0, "latency_max": 20},
|
||||
"MILD_NEGATIVE": {"alignment_min": 0.22, "noise_max": 440.0, "latency_max": 18},
|
||||
"UNFAVORABLE": {"alignment_min": 0.25, "noise_max": 380.0, "latency_max": 15},
|
||||
}
|
||||
|
||||
# Gold-spec thresholds (NEUTRAL row)
|
||||
IRP_GOLD: Dict[str, float] = IRP_PARAMS["NEUTRAL"]
|
||||
|
||||
|
||||
# ── GateResult ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class GateResult:
|
||||
action: str # 'ALLOW' | 'BLOCK' | 'SCALE'
|
||||
lev_mult: float # leverage multiplier: 1.0=no change, 0=block, 0.5=halve
|
||||
reason: str # human-readable label for logging
|
||||
s6_mult: Dict[int, float] = field(default_factory=lambda: dict(S6_BASE))
|
||||
irp_params: Dict[str, float] = field(default_factory=lambda: dict(IRP_GOLD))
|
||||
|
||||
@property
|
||||
def is_blocked(self) -> bool:
|
||||
return self.action == 'BLOCK' or self.lev_mult == 0.0
|
||||
|
||||
|
||||
# ── Strategy implementations ───────────────────────────────────────────────────
|
||||
|
||||
def strategy_A_lev_scale(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy A — LEV_SCALE
|
||||
Soft leverage reduction proportional to advisory label.
|
||||
Never boosts beyond gold spec (no mult > 1.0).
|
||||
"""
|
||||
label = adv["advisory_label"]
|
||||
mult_map = {
|
||||
"UNFAVORABLE": 0.50,
|
||||
"MILD_NEGATIVE": 0.75,
|
||||
"NEUTRAL": 1.00,
|
||||
"MILD_POSITIVE": 1.00,
|
||||
"FAVORABLE": 1.00,
|
||||
}
|
||||
mult = mult_map.get(label, 1.0)
|
||||
action = "SCALE" if mult < 1.0 else "ALLOW"
|
||||
return GateResult(action=action, lev_mult=mult,
|
||||
reason=f"A_LEV_SCALE({label},{mult:.2f}x)")
|
||||
|
||||
|
||||
def strategy_B_hard_block(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy B — HARD_BLOCK
|
||||
Block entry when UNFAVORABLE in the two worst sessions.
|
||||
Monday: reduce to 60% (WR=27.2%, not blocking entirely to maintain diversity).
|
||||
"""
|
||||
label = adv["advisory_label"]
|
||||
session = adv["session"]
|
||||
dow = adv["dow"]
|
||||
|
||||
BAD_SESSIONS = {"NY_AFTERNOON", "LOW_LIQUIDITY"}
|
||||
|
||||
if label == "UNFAVORABLE" and session in BAD_SESSIONS:
|
||||
return GateResult("BLOCK", 0.0,
|
||||
f"B_HARD_BLOCK(UNFAVORABLE+{session})")
|
||||
if dow == 0: # Monday
|
||||
return GateResult("SCALE", 0.60,
|
||||
"B_HARD_BLOCK(Monday,0.60x)")
|
||||
return GateResult("ALLOW", 1.0, "B_HARD_BLOCK(ALLOW)")
|
||||
|
||||
|
||||
def strategy_C_dow_block(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy C — DOW_BLOCK
|
||||
Block ALL entries on Monday (WR=27.2%, n=81, most robust single signal).
|
||||
"""
|
||||
if adv["dow"] == 0:
|
||||
return GateResult("BLOCK", 0.0, "C_DOW_BLOCK(Monday)")
|
||||
return GateResult("ALLOW", 1.0, "C_DOW_BLOCK(ALLOW)")
|
||||
|
||||
|
||||
def strategy_D_session_block(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy D — SESSION_BLOCK
|
||||
Block ALL entries during NY_AFTERNOON (WR=35.4%, n=127, net=-$3,857).
|
||||
"""
|
||||
if adv["session"] == "NY_AFTERNOON":
|
||||
return GateResult("BLOCK", 0.0, "D_SESSION_BLOCK(NY_AFTERNOON)")
|
||||
return GateResult("ALLOW", 1.0, "D_SESSION_BLOCK(ALLOW)")
|
||||
|
||||
|
||||
def strategy_E_combined(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy E — COMBINED
|
||||
Block Monday OR NY_AFTERNOON. The two highest-confidence single-factor signals.
|
||||
Together they cover 208 of 637 trades at WR=32.2% (heavy combined drag).
|
||||
"""
|
||||
if adv["dow"] == 0:
|
||||
return GateResult("BLOCK", 0.0, "E_COMBINED(Monday)")
|
||||
if adv["session"] == "NY_AFTERNOON":
|
||||
return GateResult("BLOCK", 0.0, "E_COMBINED(NY_AFTERNOON)")
|
||||
return GateResult("ALLOW", 1.0, "E_COMBINED(ALLOW)")
|
||||
|
||||
|
||||
def strategy_F_s6_bucket(adv: dict) -> GateResult:
|
||||
"""
|
||||
Strategy F — S6_BUCKET_MODULATION (primary research target)
|
||||
|
||||
Returns EsoF-modulated S6 bucket multipliers.
|
||||
The ALLOW action + lev_mult=1.0 means: use the returned s6_mult table
|
||||
to scale position size per bucket at the routing layer.
|
||||
|
||||
During FAVORABLE: widen selection (B4 back at 0.2×, B5/B0/B1 boosted)
|
||||
During NEUTRAL: base S6 (gold scenario from CRITICAL_ASSET_PICKING doc)
|
||||
During UNFAVORABLE: concentrate (B3+B6 only, S2-like)
|
||||
|
||||
IMPORTANT: This strategy cannot be evaluated against historical PnL alone
|
||||
because it changes WHICH trades occur (more/fewer assets qualify at the
|
||||
routing layer). The counterfactual below assumes the SAME trades execute
|
||||
with scaled sizing — a lower-bound estimate of the real effect.
|
||||
"""
|
||||
label = adv["advisory_label"]
|
||||
mults = S6_MULT.get(label, S6_BASE)
|
||||
params = IRP_PARAMS.get(label, IRP_GOLD)
|
||||
return GateResult("ALLOW", 1.0,
|
||||
f"F_S6_BUCKET({label})",
|
||||
s6_mult=dict(mults),
|
||||
irp_params=dict(params))
|
||||
|
||||
|
||||
# ── Unified dispatcher ─────────────────────────────────────────────────────────
|
||||
|
||||
STRATEGY_NAMES = {
|
||||
"A": "A_LEV_SCALE",
|
||||
"B": "B_HARD_BLOCK",
|
||||
"C": "C_DOW_BLOCK",
|
||||
"D": "D_SESSION_BLOCK",
|
||||
"E": "E_COMBINED",
|
||||
"F": "F_S6_BUCKET",
|
||||
}
|
||||
|
||||
_STRATEGY_FNS = {
|
||||
"A": strategy_A_lev_scale,
|
||||
"B": strategy_B_hard_block,
|
||||
"C": strategy_C_dow_block,
|
||||
"D": strategy_D_session_block,
|
||||
"E": strategy_E_combined,
|
||||
"F": strategy_F_s6_bucket,
|
||||
}
|
||||
|
||||
|
||||
def apply_gate(strategy: str, advisory: dict) -> GateResult:
|
||||
"""
|
||||
Apply a named gate strategy to an advisory dict.
|
||||
|
||||
Args:
|
||||
strategy: Key from STRATEGY_NAMES ('A'..'F')
|
||||
advisory: Dict returned by compute_esof() from esof_advisor.py
|
||||
|
||||
Returns:
|
||||
GateResult with action, lev_mult, reason, s6_mult, irp_params.
|
||||
|
||||
Raises:
|
||||
KeyError: if strategy key is unknown.
|
||||
"""
|
||||
fn = _STRATEGY_FNS.get(strategy)
|
||||
if fn is None:
|
||||
raise KeyError(f"Unknown strategy '{strategy}'. Valid: {list(_STRATEGY_FNS)}")
|
||||
return fn(advisory)
|
||||
|
||||
|
||||
def get_s6_mult(advisory: dict, bucket_id: int) -> float:
|
||||
"""Convenience: return S6 bucket multiplier for a specific advisory + bucket."""
|
||||
label = advisory["advisory_label"]
|
||||
return S6_MULT.get(label, S6_BASE).get(bucket_id, 0.4)
|
||||
|
||||
|
||||
def get_irp_params(advisory: dict) -> Dict[str, float]:
|
||||
"""Convenience: return IRP filter params for a specific advisory."""
|
||||
return dict(IRP_PARAMS.get(advisory["advisory_label"], IRP_GOLD))
|
||||
168
Observability/trade_audit.py
Executable file
168
Observability/trade_audit.py
Executable file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DOLPHIN trade audit — reconstructs capital from log, compares to live HZ.
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python trade_audit.py
|
||||
"""
|
||||
import json, re, sys, time
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
TRADER_LOG = Path("/mnt/dolphinng5_predict/prod/supervisor/logs/nautilus_trader.log")
|
||||
INITIAL_CAP = 25_000.0 # from engine config
|
||||
|
||||
RE_ENTRY = re.compile(r"\[(.+?)\] ENTRY: (.+)")
|
||||
RE_EXIT = re.compile(r"\[(.+?)\] EXIT: (.+)")
|
||||
|
||||
GREEN = "\033[32m"; RED = "\033[31m"; YELLOW = "\033[33m"
|
||||
CYAN = "\033[36m"; BOLD = "\033[1m"; DIM = "\033[2m"; RST = "\033[0m"
|
||||
|
||||
def _parse_dict(s):
|
||||
"""Parse Python dict repr (single quotes) or JSON."""
|
||||
try:
|
||||
return json.loads(s)
|
||||
except Exception:
|
||||
try:
|
||||
return json.loads(s.replace("'", '"').replace("nan", "null").replace("True","true").replace("False","false"))
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
def parse_trades(log_path):
|
||||
lines = log_path.read_text(errors="replace").splitlines()
|
||||
entries = {}
|
||||
trades = []
|
||||
for line in lines:
|
||||
m = RE_ENTRY.search(line)
|
||||
if m:
|
||||
d = _parse_dict(m.group(2))
|
||||
if d.get("trade_id"):
|
||||
entries[d["trade_id"]] = {"entry_ts": m.group(1), **d}
|
||||
m = RE_EXIT.search(line)
|
||||
if m:
|
||||
d = _parse_dict(m.group(2))
|
||||
tid = d.get("trade_id")
|
||||
if tid and tid in entries:
|
||||
e = entries.pop(tid)
|
||||
trades.append({**e,
|
||||
"exit_ts": m.group(1),
|
||||
"reason": d.get("reason", "?"),
|
||||
"pnl_pct": d.get("pnl_pct"),
|
||||
"net_pnl": d.get("net_pnl"),
|
||||
"bars_held": d.get("bars_held", 0),
|
||||
})
|
||||
# Open (no exit yet)
|
||||
open_trades = list(entries.values())
|
||||
return trades, open_trades
|
||||
|
||||
def hz_capital():
|
||||
try:
|
||||
import hazelcast
|
||||
hz = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=3.0)
|
||||
raw = hz.get_map("DOLPHIN_STATE_BLUE").blocking().get("engine_snapshot")
|
||||
hz.shutdown()
|
||||
if raw:
|
||||
d = json.loads(raw)
|
||||
return d.get("capital"), d.get("trades_executed")
|
||||
except Exception as e:
|
||||
print(f"{YELLOW}HZ unavailable: {e}{RST}")
|
||||
return None, None
|
||||
|
||||
def main():
|
||||
print(f"\n{BOLD}{CYAN}🐬 DOLPHIN TRADE AUDIT{RST} {DIM}{datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')}{RST}\n")
|
||||
|
||||
trades, open_trades = parse_trades(TRADER_LOG)
|
||||
print(f"Parsed {len(trades)} completed trades, {len(open_trades)} open.\n")
|
||||
|
||||
# ── Trade-by-trade reconstruction ────────────────────────────────────────
|
||||
capital = INITIAL_CAP
|
||||
wins = losses = skipped = 0
|
||||
total_pnl = 0.0
|
||||
peak_cap = INITIAL_CAP
|
||||
max_dd = 0.0
|
||||
consecutive_losses = 0
|
||||
max_consec_loss = 0
|
||||
|
||||
print(f"{'#':>4} {'Date':>16} {'Asset':<12} {'Lev':>5} {'Notional':>10} {'PnL $':>10} {'PnL %':>7} {'Reason':<16} {'Capital':>12}")
|
||||
print("─" * 115)
|
||||
|
||||
for i, t in enumerate(trades, 1):
|
||||
pnl_pct = t.get("pnl_pct")
|
||||
net_pnl = t.get("net_pnl")
|
||||
notional= t.get("notional")
|
||||
lev = t.get("leverage", 0) or 0
|
||||
asset = t.get("asset", "?")
|
||||
reason = t.get("reason", "?")
|
||||
ts = t.get("entry_ts", "")[:16].replace("T", " ")
|
||||
|
||||
# Reconstruct net_pnl if missing (nan from early runs)
|
||||
if net_pnl is None and pnl_pct is not None and notional is not None:
|
||||
net_pnl = pnl_pct * notional
|
||||
|
||||
if net_pnl is None:
|
||||
skipped += 1
|
||||
pnl_str = f"{YELLOW}nan{RST}"
|
||||
print(f"{i:>4} {ts:>16} {asset:<12} {lev:>5.2f} {'nan':>10} {pnl_str:>10} {'?':>7} {reason:<16} {capital:>12,.2f} {DIM}(skipped — nan){RST}")
|
||||
continue
|
||||
|
||||
capital += net_pnl
|
||||
total_pnl += net_pnl
|
||||
peak_cap = max(peak_cap, capital)
|
||||
dd = (peak_cap - capital) / peak_cap * 100
|
||||
max_dd = max(max_dd, dd)
|
||||
|
||||
if net_pnl >= 0:
|
||||
wins += 1
|
||||
consecutive_losses = 0
|
||||
pc = GREEN
|
||||
else:
|
||||
losses += 1
|
||||
consecutive_losses += 1
|
||||
max_consec_loss = max(max_consec_loss, consecutive_losses)
|
||||
pc = RED
|
||||
|
||||
notional_str = f"${notional:,.0f}" if notional else "?"
|
||||
pnl_pct_val = pnl_pct * 100 if pnl_pct is not None else 0
|
||||
print(f"{i:>4} {ts:>16} {asset:<12} {lev:>5.2f} {notional_str:>10} "
|
||||
f"{pc}{net_pnl:>+10.2f}{RST} {pc}{pnl_pct_val:>+6.2f}%{RST} "
|
||||
f"{reason:<16} {capital:>12,.2f}")
|
||||
|
||||
# ── Summary ───────────────────────────────────────────────────────────────
|
||||
completed = wins + losses
|
||||
wr = wins / completed * 100 if completed else 0
|
||||
roi = (capital - INITIAL_CAP) / INITIAL_CAP * 100
|
||||
|
||||
print("─" * 115)
|
||||
print(f"\n{BOLD}SUMMARY{RST}")
|
||||
print(f" Completed trades : {completed} ({skipped} skipped — pre-fix nan)")
|
||||
print(f" Wins / Losses : {GREEN}{wins}{RST} / {RED}{losses}{RST} → WR: {GREEN if wr>=50 else RED}{wr:.1f}%{RST}")
|
||||
print(f" Total PnL : {GREEN if total_pnl>=0 else RED}{total_pnl:+,.2f}{RST}")
|
||||
print(f" Initial capital : ${INITIAL_CAP:,.2f}")
|
||||
print(f" Audit capital : {GREEN if capital >= INITIAL_CAP else RED}${capital:,.2f}{RST}")
|
||||
print(f" Audit ROI : {GREEN if roi>=0 else RED}{roi:+.3f}%{RST}")
|
||||
print(f" Peak capital : ${peak_cap:,.2f}")
|
||||
print(f" Max drawdown : {RED if max_dd>20 else YELLOW if max_dd>10 else GREEN}{max_dd:.2f}%{RST}")
|
||||
print(f" Max consec.loss : {max_consec_loss}")
|
||||
if open_trades:
|
||||
print(f" {YELLOW}Open positions : {len(open_trades)} (not counted in audit){RST}")
|
||||
for ot in open_trades:
|
||||
print(f" → {ot.get('asset')} lev:{ot.get('leverage',0):.2f}x notional:{ot.get('notional','?')}")
|
||||
|
||||
# ── HZ comparison ─────────────────────────────────────────────────────────
|
||||
print(f"\n{BOLD}LIVE HZ COMPARISON{RST}")
|
||||
hz_cap, hz_trades = hz_capital()
|
||||
if hz_cap is not None:
|
||||
diff = hz_cap - capital
|
||||
match = abs(diff) < 1.0
|
||||
mc = GREEN if match else (YELLOW if abs(diff) < 50 else RED)
|
||||
print(f" HZ capital : ${hz_cap:,.2f}")
|
||||
print(f" Audit capital: ${capital:,.2f}")
|
||||
print(f" Difference : {mc}{diff:+,.2f}{RST} {'✓ MATCH' if match else '⚠ MISMATCH'}")
|
||||
print(f" HZ trades : {hz_trades} Audit trades: {completed} completed + {skipped} skipped")
|
||||
else:
|
||||
print(f" {YELLOW}HZ not available — run standalone to compare{RST}")
|
||||
|
||||
print()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user