initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
4
Observability/TUI/_check_textual.py
Executable file
4
Observability/TUI/_check_textual.py
Executable file
@@ -0,0 +1,4 @@
|
||||
import textual, textual.widgets as w, textual.containers as c
|
||||
print("version:", textual.__version__)
|
||||
print("widgets:", sorted([x for x in dir(w) if x[0].isupper()]))
|
||||
print("containers:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
8
Observability/TUI/_find_textual.py
Executable file
8
Observability/TUI/_find_textual.py
Executable file
@@ -0,0 +1,8 @@
|
||||
import sys, textual, textual.widgets as w
|
||||
print("python:", sys.executable)
|
||||
print("version:", textual.__version__)
|
||||
print("location:", textual.__file__)
|
||||
widgets = sorted([x for x in dir(w) if x[0].isupper()])
|
||||
print("widgets:", widgets)
|
||||
import textual.containers as c
|
||||
print("containers:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
4
Observability/TUI/_widgets_check.py
Executable file
4
Observability/TUI/_widgets_check.py
Executable file
@@ -0,0 +1,4 @@
|
||||
import textual.widgets as w, textual.containers as c
|
||||
print("textual version:", __import__("textual").__version__)
|
||||
print("WIDGETS:", sorted([x for x in dir(w) if x[0].isupper()]))
|
||||
print("CONTAINERS:", sorted([x for x in dir(c) if x[0].isupper()]))
|
||||
2654
Observability/TUI/dolphin_tui.py
Executable file
2654
Observability/TUI/dolphin_tui.py
Executable file
File diff suppressed because it is too large
Load Diff
372
Observability/TUI/dolphin_tui_v2.py
Executable file
372
Observability/TUI/dolphin_tui_v2.py
Executable file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v2 — full layout, mock data, sexy MC-Forewarner footer.
|
||||
Run: python3 dolphin_tui_v2.py
|
||||
q=quit r=refresh l=toggle log
|
||||
"""
|
||||
import time
|
||||
import math
|
||||
from collections import deque
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.widgets import Static, ProgressBar, Sparkline, Digits, Rule
|
||||
from textual.containers import Horizontal, Vertical
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
CSS = """
|
||||
Screen { background: #0d0d0d; color: #d0d0d0; }
|
||||
|
||||
#header { height: 2; background: #111; border: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
|
||||
/* MC Footer */
|
||||
#mc_footer_outer {
|
||||
height: 7;
|
||||
border: solid #336;
|
||||
background: #080818;
|
||||
}
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 6; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_spark_label { height: 1; }
|
||||
#mc_sparkline { height: 2; }
|
||||
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status_text { height: 2; }
|
||||
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
|
||||
Static.panel {
|
||||
border: solid #3a3a3a;
|
||||
padding: 0 1;
|
||||
height: 100%;
|
||||
}
|
||||
#panel_trader { width: 1fr; border: solid #00aa88; }
|
||||
#panel_health { width: 1fr; }
|
||||
#panel_alpha { width: 1fr; }
|
||||
#panel_scan { width: 1fr; }
|
||||
#panel_extf { width: 1fr; }
|
||||
#panel_esof { width: 1fr; }
|
||||
#panel_capital { width: 1fr; }
|
||||
#panel_prefect { width: 1fr; }
|
||||
#panel_obf { width: 1fr; }
|
||||
#panel_log { width: 1fr; border: solid #444; padding: 0 1; }
|
||||
"""
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def prefect_dot(status: str, blink_frame: bool) -> str:
|
||||
s = status.upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]◉[/cyan]" if blink_frame else "[dim]◌[/dim]"
|
||||
if s in ("FAILED", "CRASHED"): return "[red]●[/red]"
|
||||
if s == "LATE": return "[dark_orange]●[/dark_orange]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]●[/dim]"
|
||||
|
||||
MOCK_FLOWS = [
|
||||
("paper_trade_flow", "COMPLETED", "2m"),
|
||||
("nautilus_prefect", "COMPLETED", "8m"),
|
||||
("obf_prefect_flow", "RUNNING", "0m"),
|
||||
("exf_fetcher_flow", "COMPLETED", "15m"),
|
||||
("mc_forewarner_flow", "RUNNING", "3m"),
|
||||
]
|
||||
|
||||
MOCK_POSITIONS = [
|
||||
("BTCUSDT", "SHORT", 0.01, 83420.5, 83278.2),
|
||||
("ETHUSDT", "SHORT", 0.10, 1612.3, 1598.7),
|
||||
]
|
||||
|
||||
def mock_open_positions(n: int) -> list:
|
||||
phase = (n // 20) % 3
|
||||
if phase == 0: return []
|
||||
if phase == 1:
|
||||
p = MOCK_POSITIONS[0]
|
||||
return [(p[0], p[1], p[2], p[3], p[4] - (n % 10) * 2.1)]
|
||||
return [(p[0], p[1], p[2], p[3], p[4] - (n % 10) * 1.5) for p in MOCK_POSITIONS]
|
||||
|
||||
def mc_mock(n: int) -> dict:
|
||||
"""Real schema: DOLPHIN_FEATURES['mc_forewarner_latest']
|
||||
Thresholds: GREEN<0.10 ORANGE<0.30 RED>=0.30"""
|
||||
t = n * 0.05
|
||||
prob = max(0.0, min(1.0, 0.12 + 0.10 * math.sin(t)))
|
||||
env = max(0.0, min(1.0, 0.82 - 0.08 * abs(math.sin(t * 1.3))))
|
||||
status = "GREEN" if prob < 0.10 else ("ORANGE" if prob < 0.30 else "RED")
|
||||
return {"status": status, "catastrophic_prob": prob,
|
||||
"envelope_score": env, "source": "MOCK",
|
||||
"timestamp": time.strftime("%H:%M:%SZ", time.gmtime())}
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
class DolphinTUI(App):
|
||||
CSS = CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","refresh","Refresh"),("l","toggle_log","Log")]
|
||||
|
||||
_log_visible = False
|
||||
_tick_n = 0
|
||||
_prob_history: deque = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="panel_trader")
|
||||
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="panel_health")
|
||||
yield Static("", classes="panel", id="panel_alpha")
|
||||
yield Static("", classes="panel", id="panel_scan")
|
||||
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="panel_extf")
|
||||
yield Static("", classes="panel", id="panel_esof")
|
||||
yield Static("", classes="panel", id="panel_capital")
|
||||
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="panel_prefect")
|
||||
yield Static("", classes="panel", id="panel_obf")
|
||||
|
||||
# ── MC-Forewarner footer ──────────────────────────────────────────────
|
||||
with Vertical(id="mc_footer_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
# Left: big probability digits
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.00", id="mc_digits")
|
||||
yield Static("", id="mc_status_text")
|
||||
# Center: progress bars + sparkline
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False,
|
||||
id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False,
|
||||
id="mc_env_bar")
|
||||
yield Static("", id="mc_spark_label")
|
||||
yield Sparkline([], id="mc_sparkline")
|
||||
# Right: threshold legend + source
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_legend")
|
||||
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="panel_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_history = deque([0.12] * 40, maxlen=40)
|
||||
self.set_interval(1, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
n = self._tick_n
|
||||
self._tick_n += 1
|
||||
blink = (n % 2 == 0)
|
||||
t = time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime())
|
||||
|
||||
cap = 124532.10 + n * 0.5
|
||||
pnl = 1240.50 + n * 0.1
|
||||
rm = 0.82 + (n % 10) * 0.01
|
||||
vel = -0.031 - (n % 5) * 0.002
|
||||
scan = 59000 + n
|
||||
age = (n % 5) + 0.1
|
||||
age_col = "green" if age < 15 else "yellow"
|
||||
mc = mc_mock(n)
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
hz = "[green][HZ ✓][/green]"
|
||||
sc = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}.get(mc["status"],"dim")
|
||||
self.query_one("#header", Static).update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] v2.0 │ {t}"
|
||||
f" │ [green]● GREEN[/green] {hz}"
|
||||
f" │ MC:[{sc}]{mc['status']}[/{sc}]\n"
|
||||
f"[dim] localhost:5701 │ q=quit r=refresh l=log[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
positions = mock_open_positions(n)
|
||||
pos_lines = " ".join(
|
||||
f"[cyan]{sym}[/cyan] [yellow]{side}[/yellow] {qty}"
|
||||
f"@[dim]{entry:,.0f}[/dim]→[green]{cur:,.0f}[/green]"
|
||||
f"([green]+${abs((entry-cur)*qty):,.1f}[/green])"
|
||||
for sym, side, qty, entry, cur in positions
|
||||
) if positions else "[dim]NONE[/dim]"
|
||||
vol_ok = "[green]YES[/green]" if (n % 8) < 6 else "[yellow]NO[/yellow]"
|
||||
self.query_one("#panel_trader", Static).update(
|
||||
f"[bold cyan]NAUTILUS-DOLPHIN TRADER[/bold cyan]"
|
||||
f" posture:[green]APEX[/green] bar:{scan} vol:{vol_ok}"
|
||||
f" trades:[cyan]{12+n//30}[/cyan] cap:[cyan]${cap:,.0f}[/cyan]\n"
|
||||
f" open: {pos_lines}\n"
|
||||
f" vel:[yellow]{vel:.5f}[/yellow] thr:-0.02000 pnl:[green]+${pnl:,.2f}[/green]"
|
||||
)
|
||||
|
||||
# ── SYSTEM HEALTH ─────────────────────────────────────────────────────
|
||||
self.query_one("#panel_health", Static).update(
|
||||
f"[bold]SYS HEALTH[/bold]\n"
|
||||
f"rm_meta:[green]{rm:.3f}[/green]\n"
|
||||
f"M1:[green]1.0[/green] M2:[green]1.0[/green] M3:[green]1.0[/green]\n"
|
||||
f"M4:[green]1.0[/green] M5:[green]1.0[/green]\n"
|
||||
f"[green]● GREEN[/green]"
|
||||
)
|
||||
|
||||
# ── ALPHA ENGINE ──────────────────────────────────────────────────────
|
||||
filled = int(rm * 16)
|
||||
bar = "█" * filled + "░" * (16 - filled)
|
||||
self.query_one("#panel_alpha", Static).update(
|
||||
f"[bold]ALPHA ENGINE[/bold]\n"
|
||||
f"Posture:[green]APEX ●[/green]\n"
|
||||
f"Rm:[green]{bar}[/green]{rm:.2f}\n"
|
||||
f"ACB:1.55x β=0.80\n"
|
||||
f"C1:[green].9[/green] C2:[green].8[/green] C3:[yellow].7[/yellow]"
|
||||
f" C4:[green]1.[/green] C5:[green].9[/green]"
|
||||
)
|
||||
|
||||
# ── SCAN BRIDGE ───────────────────────────────────────────────────────
|
||||
self.query_one("#panel_scan", Static).update(
|
||||
f"[bold]SCAN / NG7[/bold]\n"
|
||||
f"#{scan} age:[{age_col}]{age:.1f}s[/{age_col}]\n"
|
||||
f"vel_div:[{age_col}]{vel:.4f}[/{age_col}]\n"
|
||||
f"w50:-0.0421 w750:-0.0109\n"
|
||||
f"inst:0.0234"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_extf", Static).update(
|
||||
f"[bold]ExtF[/bold] [green]9/9 ✓[/green]\n"
|
||||
f"fund:[cyan]-0.012[/cyan] dvol:[cyan]62.4[/cyan]\n"
|
||||
f"fng:[yellow]28[/yellow] taker:0.81\n"
|
||||
f"vix:18.2 ls:0.48\n"
|
||||
f"age:[green]4.2s[/green]"
|
||||
)
|
||||
|
||||
# ── EsoF ──────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_esof", Static).update(
|
||||
f"[bold]EsoF[/bold]\n"
|
||||
f"Moon: Waxing Gibbous\n"
|
||||
f"Merc:[green]Normal[/green]\n"
|
||||
f"Sess:London MC:0.42\n"
|
||||
f"age:[green]3.8s[/green]"
|
||||
)
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_capital", Static).update(
|
||||
f"[bold]CAPITAL[/bold]\n"
|
||||
f"Cap:[cyan]${cap:,.0f}[/cyan]\n"
|
||||
f"DD:[yellow]-3.21%[/yellow]\n"
|
||||
f"PnL:[green]+${pnl:,.2f}[/green]\n"
|
||||
f"Pos:[green]APEX[/green] T:{12+n//30}"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flow_lines = "\n".join(
|
||||
f"{prefect_dot(st, blink)} {name:<22} {dur}"
|
||||
for name, st, dur in MOCK_FLOWS
|
||||
)
|
||||
self.query_one("#panel_prefect", Static).update(
|
||||
f"[bold]PREFECT[/bold] [green]✓[/green]\n{flow_lines}"
|
||||
)
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
self.query_one("#panel_obf", Static).update(
|
||||
f"[bold]OBF TOP[/bold]\n"
|
||||
f"BTC [green]+0.18[/green] fp:0.72\n"
|
||||
f"ETH [green]+0.12[/green] fp:0.68\n"
|
||||
f"SOL [green]+0.09[/green] fp:0.61\n"
|
||||
f"BNB [red]-0.05[/red] fp:0.51"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER FOOTER (sexy) ───────────────────────────────────────
|
||||
prob = mc["catastrophic_prob"]
|
||||
env = mc["envelope_score"]
|
||||
self._prob_history.append(prob)
|
||||
|
||||
# Title bar
|
||||
self.query_one("#mc_title", Static).update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan]"
|
||||
f" [{sc}]▶ {mc['status']}[/{sc}]"
|
||||
f" [dim]src:{mc['source']} {mc['timestamp']}[/dim]"
|
||||
)
|
||||
|
||||
# Left: Digits widget showing probability as large text
|
||||
self.query_one("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_emoji = {"GREEN": "🟢 SAFE", "ORANGE": "🟡 CAUTION", "RED": "🔴 DANGER"}.get(mc["status"], "⚪")
|
||||
self.query_one("#mc_status_text", Static).update(
|
||||
f"[{sc}]{status_emoji}[/{sc}]\n[dim]cat.prob[/dim]"
|
||||
)
|
||||
|
||||
# Center: ProgressBar for catastrophic_prob (danger = high value)
|
||||
prob_pct = int(prob * 100)
|
||||
prob_bar = self.query_one("#mc_prob_bar", ProgressBar)
|
||||
prob_bar.progress = prob_pct
|
||||
# Apply danger CSS class based on threshold
|
||||
prob_bar.remove_class("-danger", "-warning")
|
||||
if prob >= 0.30: prob_bar.add_class("-danger")
|
||||
elif prob >= 0.10: prob_bar.add_class("-warning")
|
||||
self.query_one("#mc_prob_label", Static).update(
|
||||
f"[dim]catastrophic_prob[/dim] "
|
||||
f"[green]▏GREEN<0.10[/green] [yellow]▏ORANGE<0.30[/yellow] [red]▏RED≥0.30[/red]"
|
||||
f" [{sc}]{prob:.4f}[/{sc}]"
|
||||
)
|
||||
|
||||
# ProgressBar for envelope_score (safe = high value, so invert display)
|
||||
env_pct = int(env * 100)
|
||||
env_bar = self.query_one("#mc_env_bar", ProgressBar)
|
||||
env_bar.progress = env_pct
|
||||
env_bar.remove_class("-danger", "-warning")
|
||||
if env < 0.40: env_bar.add_class("-danger")
|
||||
elif env < 0.70: env_bar.add_class("-warning")
|
||||
self.query_one("#mc_env_label", Static).update(
|
||||
f"[dim]envelope_score [/dim]"
|
||||
f"[red]▏DANGER<0.40[/red] [yellow]▏CAUTION<0.70[/yellow] [green]▏SAFE≥0.70[/green]"
|
||||
f" [green]{env:.4f}[/green]"
|
||||
)
|
||||
|
||||
# Sparkline: rolling 40-sample history of catastrophic_prob
|
||||
self.query_one("#mc_spark_label", Static).update(
|
||||
f"[dim]prob history (40s)[/dim] "
|
||||
f"[dim]min:{min(self._prob_history):.3f} "
|
||||
f"max:{max(self._prob_history):.3f}[/dim]"
|
||||
)
|
||||
self.query_one("#mc_sparkline", Sparkline).data = list(self._prob_history)
|
||||
|
||||
# Right: threshold legend
|
||||
self.query_one("#mc_legend", Static).update(
|
||||
f"[bold]THRESHOLDS[/bold]\n"
|
||||
f"[green]GREEN[/green] prob < 0.10\n"
|
||||
f"[yellow]ORANGE[/yellow] prob < 0.30\n"
|
||||
f"[red]RED[/red] prob ≥ 0.30\n"
|
||||
f"\n"
|
||||
f"[dim]runs every 4h[/dim]\n"
|
||||
f"[dim]model: DolphinForewarner[/dim]"
|
||||
)
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_visible:
|
||||
self.query_one("#panel_log", Static).update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{t}[/dim] [INFO] RM_META=0.923 GREEN\n"
|
||||
f"[dim]{t}[/dim] [INFO] SCAN #{scan} vel={vel:.4f}\n"
|
||||
f"[dim]{t}[/dim] [INFO] MC {mc['status']} prob={prob:.4f}"
|
||||
)
|
||||
|
||||
def action_refresh(self) -> None:
|
||||
self._update()
|
||||
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_visible = not self._log_visible
|
||||
self.query_one("#log_row").display = self._log_visible
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
1053
Observability/TUI/dolphin_tui_v3.py
Executable file
1053
Observability/TUI/dolphin_tui_v3.py
Executable file
File diff suppressed because it is too large
Load Diff
694
Observability/TUI/dolphin_tui_v4.py
Executable file
694
Observability/TUI/dolphin_tui_v4.py
Executable file
@@ -0,0 +1,694 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "v4"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
_TEST_JSON = Path("/mnt/dolphinng5_predict/run_logs/test_results_latest.json")
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age= _age(cap_t) if cap_t else "?"
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${cap_val:,.0f}[/cyan]\n"
|
||||
f"DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}] C5:{c5:.3f}\n"
|
||||
f"Pos:{_posture_markup(posture)}\n"
|
||||
f"[dim]pnl/trades: DOLPHIN_PNL_BLUE[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER ───────────────────────────────────────────────────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
def _tr_badge(cat):
|
||||
info = tr.get(cat, {})
|
||||
if not info: return f"[dim]{cat[:12]}:n/a[/dim]"
|
||||
p, f = info.get("passed",0), info.get("failed",0)
|
||||
c = "green" if f == 0 else ("yellow" if f <= 2 else "red")
|
||||
return f"[{c}]{cat[:10]}:{p}/{p+f}[/{c}][dim]@{info.get('ts','?')[:10]}[/dim]"
|
||||
cats = ["data_integrity","finance_fuzz","signal_fill","degradation","actor"]
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last:{tr.get('_run_at','never')}[/dim]\n"
|
||||
f"{' '.join(_tr_badge(c) for c in cats)}\n"
|
||||
f"[dim]update: prod/run_logs/test_results_latest.json[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
740
Observability/TUI/dolphin_tui_v5.py
Executable file
740
Observability/TUI/dolphin_tui_v5.py
Executable file
@@ -0,0 +1,740 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v5"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL ───────────────────────────────────────────────────────────
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age= _age(cap_t) if cap_t else "?"
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${cap_val:,.0f}[/cyan]\n"
|
||||
f"DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}] C5:{c5:.3f}\n"
|
||||
f"Pos:{_posture_markup(posture)}\n"
|
||||
f"[dim]pnl/trades: DOLPHIN_PNL_BLUE[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v5.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
777
Observability/TUI/dolphin_tui_v6.py
Executable file
777
Observability/TUI/dolphin_tui_v6.py
Executable file
@@ -0,0 +1,777 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v6"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] age:[{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel_div:[{vc}]{vel_div:+.5f}[/{vc}]\n"
|
||||
f"w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"inst:{vi:.4f}"
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker= exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fund:[cyan]{f_btc:.5f}[/cyan] dvol:[{dvol_c}]{dvol:.1f}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}] taker:[yellow]{taker:.3f}[/yellow]\n"
|
||||
f"ls_btc:{ls_btc:.3f} vix:{vix:.1f}\n"
|
||||
f"acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
800
Observability/TUI/dolphin_tui_v7.py
Executable file
800
Observability/TUI/dolphin_tui_v7.py
Executable file
@@ -0,0 +1,800 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
TUI_VERSION = "TUI v7"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm = client.get_map("DOLPHIN_FEATURES").blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m = client.get_map(map_name).blocking()
|
||||
_ingest(state_key, m.get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm = client.get_map("DOLPHIN_STATE_BLUE").blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm = client.get_map("DOLPHIN_PNL_BLUE").blocking()
|
||||
_ingest("pnl_blue", pm.get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
# Extra fields from result dict
|
||||
r_dict = scan.get("result", scan)
|
||||
regime = eigen.get("regime", r_dict.get("regime", "?"))
|
||||
bull_pct = r_dict.get("bull_pct", 0.0)
|
||||
bear_pct = r_dict.get("bear_pct", 0.0)
|
||||
conf = r_dict.get("confidence", 0.0)
|
||||
bb_dist = r_dict.get("bb_dist_pct", 0.0)
|
||||
price = r_dict.get("price", eigen.get("btc_price"))
|
||||
reg_c = "green" if regime == "BULL" else ("red" if regime == "BEAR" else "yellow")
|
||||
bb_c = "red" if abs(bb_dist) > 0.05 else ("yellow" if abs(bb_dist) > 0.02 else "green")
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] [{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel:[{vc}]{vel_div:+.5f}[/{vc}]"
|
||||
f" w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"[{reg_c}]{regime}[/{reg_c}] B:{bull_pct:.0f}% b:{bear_pct:.0f}%"
|
||||
f" conf:{conf:.2f} inst:{vi:.4f}\n"
|
||||
f"BB:[{bb_c}]{bb_dist:+.4f}[/{bb_c}]"
|
||||
+ (f" ${price:,.0f}" if price else "")
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker = exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
f_eth = exf.get("funding_eth"); ls_eth = exf.get("ls_eth")
|
||||
oi_btc = exf.get("oi_btc"); oi_eth = exf.get("oi_eth")
|
||||
fdb_btc = exf.get("fund_dbt_btc")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
taker_c = "red" if taker and taker > 0.7 else ("yellow" if taker and taker > 0.55 else "green")
|
||||
def _ef(v, fmt=".5f"): return f"{v:{fmt}}" if v is not None else "?"
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fBTC:[cyan]{_ef(f_btc)}[/cyan] fETH:[dim]{_ef(f_eth)}[/dim]"
|
||||
f" dvol:[{dvol_c}]{_ef(dvol,'.1f')}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}]"
|
||||
f" tkr:[{taker_c}]{_ef(taker,'.3f')}[/{taker_c}]"
|
||||
f" vix:{_ef(vix,'.1f')}\n"
|
||||
f"ls_b:{_ef(ls_btc,'.3f')} ls_e:{_ef(ls_eth,'.3f')}"
|
||||
f" fdb:{_ef(fdb_btc,'.5f')}\n"
|
||||
f"oi_b:{_ef(oi_btc,'.0f')} oi_e:{_ef(oi_eth,'.0f')}"
|
||||
f" acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
958
Observability/TUI/dolphin_tui_v9.py
Executable file
958
Observability/TUI/dolphin_tui_v9.py
Executable file
@@ -0,0 +1,958 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DOLPHIN TUI v4
|
||||
==============
|
||||
Fixes vs v3:
|
||||
• SYS HEALTH: tdr/scb labels expanded to full service names
|
||||
• ALPHA ENGINE: falls back to engine_snapshot when DOLPHIN_SAFETY is empty
|
||||
• MC-FOREWARNER: graceful "not yet run" display; shows last-run age; no crash on absent key
|
||||
• Version number shown in header after MC status
|
||||
|
||||
Run: source /home/dolphin/siloqy_env/bin/activate && python dolphin_tui_v4.py
|
||||
Keys: q=quit r=force-refresh l=toggle log t=toggle test footer
|
||||
"""
|
||||
|
||||
# ── stdlib ────────────────────────────────────────────────────────────────────
|
||||
import asyncio, json, math, threading, time
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
# ── third-party ───────────────────────────────────────────────────────────────
|
||||
try:
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from textual.widgets import Digits, ProgressBar, Sparkline, Static
|
||||
except ImportError as e:
|
||||
raise SystemExit(f"textual not found — activate siloqy_env: {e}")
|
||||
|
||||
try:
|
||||
import hazelcast
|
||||
_HZ_OK = True
|
||||
except ImportError:
|
||||
_HZ_OK = False
|
||||
|
||||
import urllib.request as _urlreq
|
||||
|
||||
_CH_URL = "http://localhost:8123/"
|
||||
_CH_HEADERS = {"X-ClickHouse-User": "dolphin", "X-ClickHouse-Key": "dolphin_ch_2026"}
|
||||
|
||||
def _ch_q(sql: str) -> list:
|
||||
try:
|
||||
body = (sql + "\nFORMAT JSONEachRow").encode()
|
||||
req = _urlreq.Request(_CH_URL, data=body, method="POST")
|
||||
for k, v in _CH_HEADERS.items(): req.add_header(k, v)
|
||||
resp = _urlreq.urlopen(req, timeout=5)
|
||||
return [json.loads(l) for l in resp.read().decode().strip().split("\n") if l]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def _start_trades_poll():
|
||||
"""Background thread: poll CH every 30s for recent trades and AE shadow exits."""
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
rows = _ch_q(f"""
|
||||
SELECT asset, pnl_pct, exit_reason, bars_held, strategy
|
||||
FROM dolphin.trade_events
|
||||
WHERE date = '{today}'
|
||||
ORDER BY ts DESC LIMIT 8
|
||||
""")
|
||||
_S.put("ch.recent_trades", rows)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
# CLOSED rows = one row per trade at real close, with actual_exit + p_cont at that moment
|
||||
ae_rows = _ch_q(f"""
|
||||
SELECT asset, action, actual_exit, p_cont, mae_norm, mfe_norm, tau_norm, pnl_pct
|
||||
FROM dolphin.adaptive_exit_shadow
|
||||
WHERE action = 'CLOSED' AND ts_day = '{today}'
|
||||
ORDER BY ts DESC LIMIT 8
|
||||
""")
|
||||
_S.put("ch.ae_shadow_exits", ae_rows)
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(30)
|
||||
threading.Thread(target=_run, daemon=True, name="ch-trades-poll").start()
|
||||
|
||||
def _start_bucket_poll():
|
||||
"""Background thread: poll CH every 60s for per-bucket trade performance."""
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
rows = _ch_q("""
|
||||
SELECT
|
||||
bucket_id,
|
||||
count() AS n,
|
||||
countIf(pnl_pct > 0) AS wins,
|
||||
avg(pnl_pct) AS avg_pnl
|
||||
FROM dolphin.adaptive_exit_shadow
|
||||
WHERE action = 'CLOSED'
|
||||
AND actual_exit NOT IN ('HIBERNATE_HALT', 'SUBDAY_ACB_NORMALIZATION')
|
||||
GROUP BY bucket_id
|
||||
ORDER BY bucket_id
|
||||
""")
|
||||
_S.put("ch.bucket_perf", rows)
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(60)
|
||||
threading.Thread(target=_run, daemon=True, name="ch-bucket-poll").start()
|
||||
|
||||
TUI_VERSION = "TUI v9"
|
||||
|
||||
_META_JSON = Path("/mnt/dolphinng5_predict/run_logs/meta_health.json")
|
||||
_CAPITAL_JSON = Path("/tmp/dolphin_capital_checkpoint.json")
|
||||
# Path relative to this file: Observability/TUI/ → ../../run_logs/
|
||||
_TEST_JSON = Path(__file__).parent.parent.parent / "run_logs" / "test_results_latest.json"
|
||||
_OBF_SYMS = ["BTCUSDT", "ETHUSDT", "SOLUSDT", "BNBUSDT", "XRPUSDT"]
|
||||
|
||||
_PC = {"APEX":"green","STALKER":"yellow","TURTLE":"dark_orange","HIBERNATE":"red"}
|
||||
_MC = {"GREEN":"green","ORANGE":"dark_orange","RED":"red"}
|
||||
_SC = {"GREEN":"green","DEGRADED":"yellow","CRITICAL":"dark_orange","DEAD":"red"}
|
||||
|
||||
|
||||
# ── Thread-safe state ─────────────────────────────────────────────────────────
|
||||
class _State:
|
||||
def __init__(self):
|
||||
self._l = threading.Lock(); self._d: Dict[str, Any] = {}
|
||||
def put(self, k, v):
|
||||
with self._l: self._d[k] = v
|
||||
def get(self, k, default=None):
|
||||
with self._l: return self._d.get(k, default)
|
||||
def update(self, m):
|
||||
with self._l: self._d.update(m)
|
||||
|
||||
_S = _State()
|
||||
|
||||
def _ingest(key, raw):
|
||||
if not raw: return
|
||||
try: _S.update({f"hz.{key}": json.loads(raw), f"hz.{key}._t": time.time()})
|
||||
except Exception: pass
|
||||
|
||||
def start_hz_listener(on_scan=None):
|
||||
if not _HZ_OK:
|
||||
_S.put("hz_up", False); return
|
||||
def _run():
|
||||
while True:
|
||||
try:
|
||||
_S.put("hz_up", False)
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin", cluster_members=["localhost:5701"],
|
||||
connection_timeout=5.0)
|
||||
_S.put("hz_up", True)
|
||||
fm_nb = client.get_map("DOLPHIN_FEATURES")
|
||||
fm = fm_nb.blocking()
|
||||
for k in ("latest_eigen_scan","exf_latest","acb_boost",
|
||||
"obf_universe_latest","mc_forewarner_latest"):
|
||||
_ingest(k, fm.get(k))
|
||||
def _f(e):
|
||||
_ingest(e.key, e.value)
|
||||
if e.key == "latest_eigen_scan" and on_scan:
|
||||
try: on_scan()
|
||||
except Exception: pass
|
||||
fm_nb.add_entry_listener(include_value=True, updated=_f, added=_f)
|
||||
for map_name, key, state_key in [
|
||||
("DOLPHIN_META_HEALTH", "latest", "meta_health"),
|
||||
("DOLPHIN_SAFETY", "latest", "safety"),
|
||||
("DOLPHIN_HEARTBEAT", "nautilus_flow_heartbeat", "heartbeat"),
|
||||
]:
|
||||
m_nb = client.get_map(map_name)
|
||||
_ingest(state_key, m_nb.blocking().get(key))
|
||||
def _cb(e, sk=state_key, ek=key):
|
||||
if e.key == ek: _ingest(sk, e.value)
|
||||
m_nb.add_entry_listener(include_value=True, updated=_cb, added=_cb)
|
||||
stm_nb = client.get_map("DOLPHIN_STATE_BLUE")
|
||||
stm = stm_nb.blocking()
|
||||
_ingest("capital", stm.get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", stm.get("engine_snapshot"))
|
||||
def _cap(e):
|
||||
if e.key == "capital_checkpoint": _ingest("capital", e.value)
|
||||
elif e.key == "engine_snapshot": _ingest("engine_snapshot", e.value)
|
||||
stm_nb.add_entry_listener(include_value=True, updated=_cap, added=_cap)
|
||||
try:
|
||||
pm_nb = client.get_map("DOLPHIN_PNL_BLUE")
|
||||
_ingest("pnl_blue", pm_nb.blocking().get("session_perf"))
|
||||
def _pnl(e):
|
||||
if e.key == "session_perf": _ingest("pnl_blue", e.value)
|
||||
pm_nb.add_entry_listener(include_value=True, updated=_pnl, added=_pnl)
|
||||
except Exception: pass
|
||||
_poll_ctr = 0
|
||||
while True:
|
||||
time.sleep(5)
|
||||
if not getattr(client.lifecycle_service, "is_running", lambda: True)():
|
||||
break
|
||||
# Re-poll key maps every 30s — catches missed listener events
|
||||
# after HZ restarts or reconnects
|
||||
_poll_ctr += 1
|
||||
if _poll_ctr % 6 == 0:
|
||||
try:
|
||||
_ingest("safety", client.get_map("DOLPHIN_SAFETY").blocking().get("latest"))
|
||||
_ingest("capital", client.get_map("DOLPHIN_STATE_BLUE").blocking().get("capital_checkpoint"))
|
||||
_ingest("engine_snapshot", client.get_map("DOLPHIN_STATE_BLUE").blocking().get("engine_snapshot"))
|
||||
_ingest("heartbeat", client.get_map("DOLPHIN_HEARTBEAT").blocking().get("nautilus_flow_heartbeat"))
|
||||
except Exception:
|
||||
pass # non-fatal — listeners may still work
|
||||
except Exception as e:
|
||||
_S.put("hz_up", False); _S.put("hz_err", str(e)); time.sleep(10)
|
||||
threading.Thread(target=_run, daemon=True, name="hz-listener").start()
|
||||
|
||||
async def prefect_poll_loop():
|
||||
while True:
|
||||
try:
|
||||
from prefect.client.orchestration import get_client
|
||||
from prefect.client.schemas.sorting import FlowRunSort
|
||||
async with get_client() as pc:
|
||||
runs = await pc.read_flow_runs(limit=20, sort=FlowRunSort.START_TIME_DESC)
|
||||
seen: Dict[str, Any] = {}
|
||||
fid_to_name: Dict[str, str] = {}
|
||||
for r in runs:
|
||||
fid = str(r.flow_id)
|
||||
if fid not in seen: seen[fid] = r
|
||||
rows = []
|
||||
for fid, r in seen.items():
|
||||
if fid not in fid_to_name:
|
||||
try:
|
||||
f = await pc.read_flow(r.flow_id)
|
||||
fid_to_name[fid] = f.name
|
||||
except Exception: fid_to_name[fid] = fid[:8]
|
||||
rows.append({"name": fid_to_name[fid],
|
||||
"state": r.state_name or "?",
|
||||
"ts": r.start_time.strftime("%m-%d %H:%M") if r.start_time else "--"})
|
||||
_S.put("prefect_flows", rows[:8]); _S.put("prefect_ok", True)
|
||||
except Exception as e:
|
||||
_S.put("prefect_ok", False); _S.put("prefect_err", str(e)[:60])
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
def _age(ts):
|
||||
if not ts: return "?"
|
||||
s = time.time() - ts
|
||||
if s < 0: return "0s"
|
||||
if s < 60: return f"{s:.0f}s"
|
||||
if s < 3600: return f"{s/60:.0f}m"
|
||||
return f"{s/3600:.1f}h"
|
||||
|
||||
def _age_col(ts, warn=15, dead=60):
|
||||
s = time.time() - ts if ts else 9999
|
||||
return "red" if s > dead else ("yellow" if s > warn else "green")
|
||||
|
||||
def _bar(v, width=12):
|
||||
v = max(0.0, min(1.0, v))
|
||||
f = round(v * width)
|
||||
return "█" * f + "░" * (width - f)
|
||||
|
||||
def _fmt_vel(v):
|
||||
return "---" if v is None else f"{float(v):+.5f}"
|
||||
|
||||
def _dot(state):
|
||||
s = (state or "").upper()
|
||||
if s == "COMPLETED": return "[green]●[/green]"
|
||||
if s == "RUNNING": return "[cyan]●[/cyan]"
|
||||
if s in ("FAILED","CRASHED","TIMEDOUT"): return "[red]●[/red]"
|
||||
if s == "CANCELLED": return "[dim]●[/dim]"
|
||||
if s == "PENDING": return "[yellow]●[/yellow]"
|
||||
return "[dim]◌[/dim]"
|
||||
|
||||
def _posture_markup(p):
|
||||
c = _PC.get(p, "dim")
|
||||
return f"[{c}]{p}[/{c}]"
|
||||
|
||||
def _col(v, c): return f"[{c}]{v}[/{c}]"
|
||||
|
||||
def _eigen_from_scan(scan):
|
||||
if not scan: return {}
|
||||
r = scan.get("result", scan)
|
||||
mwr_raw = r.get("multi_window_results", {})
|
||||
def _td(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("tracking_data", {})
|
||||
def _rs(w): return (mwr_raw.get(w) or mwr_raw.get(str(w)) or {}).get("regime_signals", {})
|
||||
v50 = _td(50).get("lambda_max_velocity") or scan.get("w50_velocity", 0.0)
|
||||
v150 = _td(150).get("lambda_max_velocity") or scan.get("w150_velocity", 0.0)
|
||||
v300 = _td(300).get("lambda_max_velocity") or scan.get("w300_velocity", 0.0)
|
||||
v750 = _td(750).get("lambda_max_velocity") or scan.get("w750_velocity", 0.0)
|
||||
vel_div = scan.get("vel_div", float(v50 or 0) - float(v150 or 0))
|
||||
inst_avg = sum(_rs(w).get("instability_score", 0.0) for w in (50,150,300,750)) / 4
|
||||
bt_price = (r.get("pricing_data", {}) or {}).get("current_prices", {}).get("BTCUSDT")
|
||||
return {
|
||||
"scan_number": scan.get("scan_number", 0),
|
||||
"timestamp": scan.get("timestamp", 0),
|
||||
"vel_div": float(vel_div or 0),
|
||||
"v50": float(v50 or 0), "v150": float(v150 or 0),
|
||||
"v300": float(v300 or 0), "v750": float(v750 or 0),
|
||||
"inst_avg": float(inst_avg or 0),
|
||||
"btc_price": float(bt_price) if bt_price else None,
|
||||
"regime": r.get("regime", r.get("sentiment", "?")),
|
||||
"version": scan.get("version", "?"),
|
||||
}
|
||||
|
||||
# ── CSS ───────────────────────────────────────────────────────────────────────
|
||||
_CSS = """
|
||||
Screen { background: #0a0a0a; color: #d4d4d4; }
|
||||
#header { height: 2; background: #111; border-bottom: solid #333; padding: 0 1; }
|
||||
#trader_row { height: 5; }
|
||||
#top_row { height: 9; }
|
||||
#mid_row { height: 9; }
|
||||
#bot_row { height: 7; }
|
||||
#log_row { height: 5; display: none; }
|
||||
#mc_outer { height: 16; border: solid #224; background: #060616; }
|
||||
#mc_title { height: 1; padding: 0 1; }
|
||||
#mc_body { height: 15; }
|
||||
#mc_left { width: 18; padding: 0 1; }
|
||||
#mc_center { width: 1fr; padding: 0 1; }
|
||||
#mc_right { width: 30; padding: 0 1; }
|
||||
#mc_prob_label { height: 1; }
|
||||
#mc_prob_bar { height: 1; }
|
||||
#mc_env_label { height: 1; }
|
||||
#mc_env_bar { height: 1; }
|
||||
#mc_champ_label{ height: 1; }
|
||||
#mc_champ_bar { height: 1; }
|
||||
#mc_live { height: 8; }
|
||||
#mc_spark_lbl { height: 1; }
|
||||
#mc_spark { height: 2; }
|
||||
#mc_mae_lbl { height: 1; }
|
||||
#mc_mae_spark { height: 2; }
|
||||
#mc_digits { height: 3; }
|
||||
#mc_status { height: 3; }
|
||||
#mc_legend { height: 6; }
|
||||
#trades_footer { height: 5; background: #060a10; border-top: solid #003820; padding: 0 1; }
|
||||
#bucket_footer { height: 5; background: #080810; border-top: solid #002040; padding: 0 1; }
|
||||
#test_footer { height: 3; background: #101010; border-top: solid #2a2a2a; padding: 0 1; }
|
||||
Static.panel { border: solid #333; padding: 0 1; height: 100%; }
|
||||
#p_trader { width: 1fr; border: solid #006650; }
|
||||
#p_health { width: 1fr; }
|
||||
#p_alpha { width: 1fr; }
|
||||
#p_scan { width: 1fr; }
|
||||
#p_extf { width: 1fr; }
|
||||
#p_obf { width: 1fr; }
|
||||
#p_capital { width: 1fr; }
|
||||
#p_prefect { width: 1fr; }
|
||||
#p_acb { width: 1fr; }
|
||||
#p_log { width: 1fr; border: solid #333; padding: 0 1; }
|
||||
ProgressBar > .bar--bar { color: $success; }
|
||||
ProgressBar > .bar--complete { color: $success; }
|
||||
ProgressBar.-danger > .bar--bar { color: $error; }
|
||||
ProgressBar.-warning > .bar--bar { color: $warning; }
|
||||
"""
|
||||
|
||||
|
||||
# ── App ───────────────────────────────────────────────────────────────────────
|
||||
class DolphinTUI(App):
|
||||
CSS = _CSS
|
||||
BINDINGS = [("q","quit","Quit"),("r","force_refresh","Refresh"),
|
||||
("l","toggle_log","Log"),("t","toggle_tests","Tests")]
|
||||
_log_vis = False; _test_vis = True
|
||||
_prob_hist: deque; _mae_deque: deque
|
||||
_session_start_cap: Optional[float] = None
|
||||
_cap_peak: Optional[float] = None
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static("", id="header")
|
||||
with Horizontal(id="trader_row"):
|
||||
yield Static("", classes="panel", id="p_trader")
|
||||
with Horizontal(id="top_row"):
|
||||
yield Static("", classes="panel", id="p_health")
|
||||
yield Static("", classes="panel", id="p_alpha")
|
||||
yield Static("", classes="panel", id="p_scan")
|
||||
with Horizontal(id="mid_row"):
|
||||
yield Static("", classes="panel", id="p_extf")
|
||||
yield Static("", classes="panel", id="p_obf")
|
||||
yield Static("", classes="panel", id="p_capital")
|
||||
with Horizontal(id="bot_row"):
|
||||
yield Static("", classes="panel", id="p_prefect")
|
||||
yield Static("", classes="panel", id="p_acb")
|
||||
with Vertical(id="mc_outer"):
|
||||
yield Static("", id="mc_title")
|
||||
with Horizontal(id="mc_body"):
|
||||
with Vertical(id="mc_left"):
|
||||
yield Digits("0.000", id="mc_digits")
|
||||
yield Static("", id="mc_status")
|
||||
with Vertical(id="mc_center"):
|
||||
yield Static("", id="mc_prob_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_prob_bar")
|
||||
yield Static("", id="mc_env_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_env_bar")
|
||||
yield Static("", id="mc_champ_label")
|
||||
yield ProgressBar(total=100, show_eta=False, show_percentage=False, id="mc_champ_bar")
|
||||
yield Static("", id="mc_live")
|
||||
with Vertical(id="mc_right"):
|
||||
yield Static("", id="mc_spark_lbl")
|
||||
yield Sparkline([], id="mc_spark")
|
||||
yield Static("", id="mc_mae_lbl")
|
||||
yield Sparkline([], id="mc_mae_spark")
|
||||
yield Static("", id="mc_legend")
|
||||
yield Static("", id="trades_footer")
|
||||
yield Static("", id="bucket_footer")
|
||||
yield Static("", id="test_footer")
|
||||
with Horizontal(id="log_row"):
|
||||
yield Static("", classes="panel", id="p_log")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self._prob_hist = deque([0.0] * 40, maxlen=40)
|
||||
self._mae_deque = deque(maxlen=500)
|
||||
start_hz_listener(on_scan=lambda: self.call_from_thread(self._update))
|
||||
self.run_worker(prefect_poll_loop(), name="prefect-poll", exclusive=True)
|
||||
_start_trades_poll()
|
||||
_start_bucket_poll()
|
||||
self.set_interval(1.0, self._update)
|
||||
self._update()
|
||||
|
||||
def _update(self) -> None:
|
||||
now_str = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
hz_up = _S.get("hz_up", False)
|
||||
mh = _S.get("hz.meta_health") or self._read_json(_META_JSON)
|
||||
safe = _S.get("hz.safety") or {}
|
||||
scan = _S.get("hz.latest_eigen_scan") or {}
|
||||
exf = _S.get("hz.exf_latest") or {}
|
||||
acb = _S.get("hz.acb_boost") or {}
|
||||
obf_u = _S.get("hz.obf_universe_latest") or {}
|
||||
mc = _S.get("hz.mc_forewarner_latest") or {}
|
||||
cap = _S.get("hz.capital") or self._read_json(_CAPITAL_JSON) or {}
|
||||
hb = _S.get("hz.heartbeat") or {}
|
||||
eng = _S.get("hz.engine_snapshot") or {}
|
||||
eigen = _eigen_from_scan(scan)
|
||||
|
||||
# ── posture: DOLPHIN_SAFETY first, fall back to engine_snapshot ───────
|
||||
posture = safe.get("posture") or eng.get("posture") or "?"
|
||||
# ── Rm / breakdown: DOLPHIN_SAFETY first, fall back to zeros ─────────
|
||||
rm_s = float(safe.get("Rm", 0.0))
|
||||
bd = safe.get("breakdown") or {}
|
||||
# If DOLPHIN_SAFETY is empty but engine_snapshot has posture, show that
|
||||
safety_live = bool(safe.get("posture") or safe.get("Rm"))
|
||||
|
||||
rm_m = mh.get("rm_meta", 0.0) if mh else 0.0
|
||||
mhs_st = mh.get("status", "?") if mh else "?"
|
||||
sc_mhs = _SC.get(mhs_st, "dim")
|
||||
pc_col = _PC.get(posture, "dim")
|
||||
hz_tag = "[green][HZ✓][/green]" if hz_up else "[red][HZ✗][/red]"
|
||||
mc_st = mc.get("status", "N/A") if mc else "N/A"
|
||||
mc_col = _MC.get(mc_st, "dim")
|
||||
|
||||
# ── HEADER ────────────────────────────────────────────────────────────
|
||||
self._w("#header").update(
|
||||
f"[bold cyan]🐬 DOLPHIN-NAUTILUS[/bold cyan] {now_str}"
|
||||
f" {hz_tag} [{sc_mhs}]MHS:{mhs_st} {rm_m:.3f}[/{sc_mhs}]"
|
||||
f" [{pc_col}]◈{posture}[/{pc_col}]"
|
||||
f" [{mc_col}]MC:{mc_st}[/{mc_col}]"
|
||||
f" [dim]{TUI_VERSION}[/dim]\n"
|
||||
f"[dim] localhost:5701 q=quit r=refresh l=log t=tests[/dim]"
|
||||
)
|
||||
|
||||
# ── TRADER ────────────────────────────────────────────────────────────
|
||||
cap_val = float(cap.get("capital", 0)) if cap else 0.0
|
||||
hb_phase = hb.get("phase", "?") if hb else "N/A"
|
||||
hb_ts = hb.get("ts") if hb else None
|
||||
hb_age = _age(hb_ts) if hb_ts else "?"
|
||||
hb_col = _age_col(hb_ts, 30, 120) if hb_ts else "red"
|
||||
vel_div = eigen.get("vel_div", 0.0)
|
||||
vc = "green" if vel_div > 0 else ("red" if vel_div < -0.02 else "yellow")
|
||||
scan_no = eigen.get("scan_number", 0)
|
||||
btc_p = eigen.get("btc_price")
|
||||
btc_str = f"BTC:[cyan]${btc_p:,.0f}[/cyan] " if btc_p else ""
|
||||
trades_ex= eng.get("trades_executed")
|
||||
last_vd = eng.get("last_vel_div")
|
||||
self._w("#p_trader").update(
|
||||
f"[bold cyan]TRADER[/bold cyan] {_posture_markup(posture)}"
|
||||
f" phase:[{hb_col}]{hb_phase}[/{hb_col}] hb:{_col(hb_age, hb_col)}"
|
||||
f" scan:[dim]#{scan_no}[/dim] {btc_str}\n"
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" last_vel_div:[dim]{last_vd:+.5f}[/dim] open-pos:[dim]DOLPHIN_PNL_BLUE[/dim]"
|
||||
if last_vd is not None else
|
||||
f" vel_div:[{vc}]{vel_div:+.5f}[/{vc}] thr:[dim]-0.02000[/dim]"
|
||||
f" cap:[cyan]${cap_val:,.0f}[/cyan]"
|
||||
f" trades:{trades_ex if trades_ex is not None else '—'}\n"
|
||||
f" open-pos:[dim]DOLPHIN_PNL_BLUE (not yet wired)[/dim]"
|
||||
)
|
||||
|
||||
# ── SYS HEALTH — FIX: expand tdr/scb labels ──────────────────────────
|
||||
if mh:
|
||||
svc = mh.get("service_status", {})
|
||||
hz_ks= mh.get("hz_key_status", {})
|
||||
def _svc(nm, label):
|
||||
st = svc.get(nm, "?")
|
||||
dot = "[green]●[/green]" if st == "RUNNING" else "[red]●[/red]"
|
||||
return f"{dot}[dim]{label}[/dim]"
|
||||
def _hz_dot(nm):
|
||||
sc = hz_ks.get(nm, {}).get("score", 0)
|
||||
return "[green]●[/green]" if sc >= 0.9 else ("[yellow]●[/yellow]" if sc >= 0.5 else "[red]●[/red]")
|
||||
self._w("#p_health").update(
|
||||
f"[bold]SYS HEALTH[/bold] [{sc_mhs}]{mhs_st}[/{sc_mhs}]\n"
|
||||
f"rm:[{sc_mhs}]{rm_m:.3f}[/{sc_mhs}]"
|
||||
f" m4:{mh.get('m4_control_plane',0):.2f}"
|
||||
f" m3:{mh.get('m3_data_freshness',0):.2f}"
|
||||
f" m5:{mh.get('m5_coherence',0):.2f}\n"
|
||||
f"{_svc('dolphin_data:exf_fetcher','exf')}"
|
||||
f" {_svc('dolphin_data:acb_processor','acb')}"
|
||||
f" {_svc('dolphin_data:obf_universe','obf')}\n"
|
||||
f"{_svc('dolphin:nautilus_trader','trader')}"
|
||||
f" {_svc('dolphin:scan_bridge','scan-bridge')}\n"
|
||||
f"[dim]hz: exf{_hz_dot('exf_latest')}"
|
||||
f" scan{_hz_dot('latest_eigen_scan')}"
|
||||
f" obf{_hz_dot('obf_universe')}[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#p_health").update("[bold]SYS HEALTH[/bold]\n[dim]awaiting MHS…[/dim]")
|
||||
|
||||
# ── ALPHA ENGINE — FIX: fall back to engine_snapshot when safety empty ─
|
||||
safe_ts = _S.get("hz.safety._t")
|
||||
safe_age = _age(safe_ts) if safe_ts else "?"
|
||||
safe_ac = _age_col(safe_ts, 30, 120) if safe_ts else "red"
|
||||
def _cat(n):
|
||||
v = bd.get(f"Cat{n}", 0.0)
|
||||
c = "green" if v >= 0.9 else ("yellow" if v >= 0.6 else "red")
|
||||
return f"[{c}]{v:.2f}[/{c}]"
|
||||
if safety_live:
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"Rm:[{pc_col}]{_bar(rm_s,14)}[/{pc_col}]{rm_s:.3f}\n"
|
||||
f"C1:{_cat(1)} C2:{_cat(2)} C3:{_cat(3)}\n"
|
||||
f"C4:{_cat(4)} C5:{_cat(5)}"
|
||||
f" fenv:{bd.get('f_env',0):.2f} fex:{bd.get('f_exe',0):.2f}\n"
|
||||
f"[dim]age:{_col(safe_age, safe_ac)}[/dim]"
|
||||
)
|
||||
else:
|
||||
# DOLPHIN_SAFETY empty — show what we have from engine_snapshot
|
||||
bars_idx = eng.get("bar_idx", "?")
|
||||
scans_p = eng.get("scans_processed", "?")
|
||||
self._w("#p_alpha").update(
|
||||
f"[bold]ALPHA ENGINE[/bold] {_posture_markup(posture)}\n"
|
||||
f"[yellow]DOLPHIN_SAFETY empty[/yellow]\n"
|
||||
f"posture from engine_snapshot\n"
|
||||
f"bar:{bars_idx} scans:{scans_p}\n"
|
||||
f"[dim]Rm/Cat1-5: awaiting DOLPHIN_SAFETY[/dim]"
|
||||
)
|
||||
|
||||
# ── SCAN ──────────────────────────────────────────────────────────────
|
||||
scan_ac = _age_col(eigen.get("timestamp", 0), 15, 60)
|
||||
scan_age= _age(eigen.get("timestamp")) if eigen.get("timestamp") else "?"
|
||||
vi = eigen.get("inst_avg", 0)
|
||||
# Extra fields from result dict
|
||||
r_dict = scan.get("result", scan)
|
||||
regime = eigen.get("regime", r_dict.get("regime", "?"))
|
||||
bull_pct = r_dict.get("bull_pct", 0.0)
|
||||
bear_pct = r_dict.get("bear_pct", 0.0)
|
||||
conf = r_dict.get("confidence", 0.0)
|
||||
bb_dist = r_dict.get("bb_dist_pct", 0.0)
|
||||
price = r_dict.get("price", eigen.get("btc_price"))
|
||||
reg_c = "green" if regime == "BULL" else ("red" if regime == "BEAR" else "yellow")
|
||||
bb_c = "red" if abs(bb_dist) > 0.05 else ("yellow" if abs(bb_dist) > 0.02 else "green")
|
||||
self._w("#p_scan").update(
|
||||
f"[bold]SCAN {eigen.get('version','?')}[/bold]"
|
||||
f" [dim]#{scan_no}[/dim] [{scan_ac}]{scan_age}[/{scan_ac}]\n"
|
||||
f"vel:[{vc}]{vel_div:+.5f}[/{vc}]"
|
||||
f" w50:[yellow]{_fmt_vel(eigen.get('v50'))}[/yellow]"
|
||||
f" w150:[dim]{_fmt_vel(eigen.get('v150'))}[/dim]\n"
|
||||
f"w300:[dim]{_fmt_vel(eigen.get('v300'))}[/dim]"
|
||||
f" w750:[dim]{_fmt_vel(eigen.get('v750'))}[/dim]\n"
|
||||
f"[{reg_c}]{regime}[/{reg_c}] B:{bull_pct:.0f}% b:{bear_pct:.0f}%"
|
||||
f" conf:{conf:.2f} inst:{vi:.4f}\n"
|
||||
f"BB:[{bb_c}]{bb_dist:+.4f}[/{bb_c}]"
|
||||
+ (f" ${price:,.0f}" if price else "")
|
||||
)
|
||||
|
||||
# ── ExtF ──────────────────────────────────────────────────────────────
|
||||
exf_t = _S.get("hz.exf_latest._t")
|
||||
exf_age = _age(exf_t) if exf_t else "?"
|
||||
exf_ac = _age_col(exf_t, 30, 120) if exf_t else "red"
|
||||
f_btc = exf.get("funding_btc"); dvol = exf.get("dvol_btc")
|
||||
fng = exf.get("fng"); taker = exf.get("taker")
|
||||
ls_btc = exf.get("ls_btc"); vix = exf.get("vix")
|
||||
f_eth = exf.get("funding_eth"); ls_eth = exf.get("ls_eth")
|
||||
oi_btc = exf.get("oi_btc"); oi_eth = exf.get("oi_eth")
|
||||
fdb_btc = exf.get("fund_dbt_btc")
|
||||
ok_cnt = exf.get("_ok_count", 0)
|
||||
dvol_c = "red" if dvol and dvol > 70 else ("yellow" if dvol and dvol > 50 else "green")
|
||||
fng_c = "red" if fng and fng < 25 else ("yellow" if fng and fng < 45 else "green")
|
||||
taker_c = "red" if taker and taker > 0.7 else ("yellow" if taker and taker > 0.55 else "green")
|
||||
def _ef(v, fmt=".5f"): return f"{v:{fmt}}" if v is not None else "?"
|
||||
if exf:
|
||||
self._w("#p_extf").update(
|
||||
f"[bold]ExtF[/bold] [{exf_ac}]{ok_cnt}/9 {exf_age}[/{exf_ac}]\n"
|
||||
f"fBTC:[cyan]{_ef(f_btc)}[/cyan] fETH:[dim]{_ef(f_eth)}[/dim]"
|
||||
f" dvol:[{dvol_c}]{_ef(dvol,'.1f')}[/{dvol_c}]\n"
|
||||
f"fng:[{fng_c}]{int(fng) if fng else '?'}[/{fng_c}]"
|
||||
f" tkr:[{taker_c}]{_ef(taker,'.3f')}[/{taker_c}]"
|
||||
f" vix:{_ef(vix,'.1f')}\n"
|
||||
f"ls_b:{_ef(ls_btc,'.3f')} ls_e:{_ef(ls_eth,'.3f')}"
|
||||
f" fdb:{_ef(fdb_btc,'.5f')}\n"
|
||||
f"oi_b:{_ef(oi_btc,'.0f')} oi_e:{_ef(oi_eth,'.0f')}"
|
||||
f" acb✓:{exf.get('_acb_ready','?')}"
|
||||
)
|
||||
else:
|
||||
self._w("#p_extf").update("[bold]ExtF[/bold]\n[dim]no data[/dim]")
|
||||
|
||||
# ── OBF ───────────────────────────────────────────────────────────────
|
||||
obf_t = _S.get("hz.obf_universe_latest._t")
|
||||
obf_age = _age(obf_t) if obf_t else "?"
|
||||
obf_ac = _age_col(obf_t, 30, 120) if obf_t else "red"
|
||||
n_assets= obf_u.get("_n_assets", 0) if obf_u else 0
|
||||
lines = [f"[bold]OBF[/bold] [{obf_ac}]n={n_assets} {obf_age}[/{obf_ac}]"]
|
||||
for sym in _OBF_SYMS:
|
||||
if not obf_u: break
|
||||
a = obf_u.get(sym)
|
||||
if not a: continue
|
||||
imb = float(a.get("imbalance", 0))
|
||||
fp = float(a.get("fill_probability", 0))
|
||||
dq = float(a.get("depth_quality", 0))
|
||||
imb_c = "green" if imb > 0.1 else ("red" if imb < -0.1 else "yellow")
|
||||
lines.append(f"{sym[:3]} [{imb_c}]{imb:+.2f}[/{imb_c}] fp:{fp:.2f} dq:{dq:.2f}")
|
||||
self._w("#p_obf").update("\n".join(lines[:6]))
|
||||
|
||||
# ── CAPITAL — sourced from engine_snapshot + capital_checkpoint ─────────
|
||||
# engine_snapshot: capital, posture, trades_executed, scans_processed,
|
||||
# bar_idx, open_notional, current_leverage,
|
||||
# leverage_soft_cap, leverage_abs_cap, timestamp
|
||||
# capital_checkpoint: capital, ts (written more frequently)
|
||||
cap_t = _S.get("hz.capital._t")
|
||||
cap_ac = _age_col(cap_t, 60, 300) if cap_t else "dim"
|
||||
cap_age = _age(cap_t) if cap_t else "?"
|
||||
eng_ts = eng.get("timestamp")
|
||||
|
||||
# Capital: prefer engine_snapshot (most recent), fall back to checkpoint
|
||||
eng_cap = float(eng.get("capital", 0.0)) if eng else 0.0
|
||||
chk_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
live_cap = eng_cap if eng_cap > 0 else chk_cap
|
||||
|
||||
# Leverage
|
||||
cur_lev = float(eng.get("current_leverage", 0.0)) if eng else 0.0
|
||||
soft_cap = float(eng.get("leverage_soft_cap", 8.0)) if eng else 8.0
|
||||
abs_cap = float(eng.get("leverage_abs_cap", 9.0)) if eng else 9.0
|
||||
open_not = float(eng.get("open_notional", 0.0)) if eng else 0.0
|
||||
lev_c = "green" if cur_lev < 3 else ("yellow" if cur_lev < soft_cap else "red")
|
||||
|
||||
# Trades / scans
|
||||
trades_ex = eng.get("trades_executed") if eng else None
|
||||
scans_p = eng.get("scans_processed") if eng else None
|
||||
bar_idx = eng.get("bar_idx") if eng else None
|
||||
|
||||
# Drawdown from Cat5 (safety breakdown)
|
||||
c5 = bd.get("Cat5", 1.0) if bd else 1.0
|
||||
try:
|
||||
dd_est = 0.12 + math.log(1.0/c5 - 1.0) / 30.0 if 0 < c5 < 1 else 0.0
|
||||
except Exception:
|
||||
dd_est = 0.0
|
||||
dd_c = "red" if dd_est > 0.15 else ("yellow" if dd_est > 0.08 else "green")
|
||||
|
||||
# Trader up/down: heartbeat age < 30s = up
|
||||
trader_up = hb_ts and (time.time() - hb_ts) < 30
|
||||
trader_tag = "[green]● LIVE[/green]" if trader_up else "[red]● DOWN[/red]"
|
||||
|
||||
# Lev bar (compact, 16 chars)
|
||||
lev_bar = _bar(min(cur_lev / abs_cap, 1.0), 14)
|
||||
|
||||
self._w("#p_capital").update(
|
||||
f"[bold]CAPITAL[/bold] {trader_tag} [{cap_ac}]{cap_age}[/{cap_ac}]\n"
|
||||
f"Cap:[cyan]${live_cap:,.0f}[/cyan]"
|
||||
f" DD≈:[{dd_c}]{dd_est*100:.1f}%[/{dd_c}]\n"
|
||||
f"Pos:{_posture_markup(posture)}"
|
||||
f" Rm:[{_PC.get(posture,'dim')}]{rm_s:.3f}[/{_PC.get(posture,'dim')}]\n"
|
||||
f"Lev:[{lev_c}]{lev_bar}[/{lev_c}]{cur_lev:.2f}x"
|
||||
f" notional:${open_not:,.0f}\n"
|
||||
f"[dim]trades:{trades_ex if trades_ex is not None else '—'}"
|
||||
f" scans:{scans_p if scans_p is not None else '—'}"
|
||||
f" bar:{bar_idx if bar_idx is not None else '—'}[/dim]"
|
||||
)
|
||||
|
||||
# ── PREFECT ───────────────────────────────────────────────────────────
|
||||
flows = _S.get("prefect_flows") or []
|
||||
pf_ok = _S.get("prefect_ok", False)
|
||||
pf_hdr = "[green]✓[/green]" if pf_ok else "[red]✗[/red]"
|
||||
flines = "\n".join(
|
||||
f"{_dot(f['state'])} {f['name'][:22]:<22} {f['ts']}" for f in flows[:5])
|
||||
self._w("#p_prefect").update(
|
||||
f"[bold]PREFECT[/bold] {pf_hdr}\n" + (flines or "[dim]polling…[/dim]"))
|
||||
|
||||
# ── ACB ───────────────────────────────────────────────────────────────
|
||||
acb_t = _S.get("hz.acb_boost._t")
|
||||
acb_age = _age(acb_t) if acb_t else "?"
|
||||
acb_ac = _age_col(acb_t, 3600, 86400) if acb_t else "dim"
|
||||
boost = acb.get("boost", 1.0) if acb else 1.0
|
||||
beta = acb.get("beta", 0.8) if acb else 0.8
|
||||
cut = acb.get("cut", 0.0) if acb else 0.0
|
||||
boost_c = "green" if boost >= 1.5 else ("yellow" if boost >= 1.0 else "red")
|
||||
cut_c = "red" if cut > 0 else "dim"
|
||||
self._w("#p_acb").update(
|
||||
f"[bold]ACB[/bold] [{acb_ac}]{acb.get('date','?') if acb else '?'}[/{acb_ac}]\n"
|
||||
f"boost:[{boost_c}]{boost:.2f}x[/{boost_c}] β={beta:.2f}"
|
||||
f" cut:[{cut_c}]{cut:.2f}[/{cut_c}]\n"
|
||||
f"w750_thr:{acb.get('w750_threshold',0.0) if acb else 0.0:.4f}\n"
|
||||
f"[dim]dvol={acb.get('factors',{}).get('dvol_btc','?') if acb else '?'}"
|
||||
f" fng={acb.get('factors',{}).get('fng','?') if acb else '?'}[/dim]\n"
|
||||
f"[dim]age:{acb_age} cfg:{acb.get('config_used','?') if acb else '?'}[/dim]"
|
||||
)
|
||||
|
||||
# ── MC-FOREWARNER — FIX: graceful when absent ─────────────────────────
|
||||
prob = float(mc.get("catastrophic_prob", 0.0)) if mc else 0.0
|
||||
env = float(mc.get("envelope_score", 0.0)) if mc else 0.0
|
||||
champ_p = mc.get("champion_probability") if mc else None
|
||||
mc_ts = mc.get("timestamp") if mc else None
|
||||
mc_warns = mc.get("warnings", []) if mc else []
|
||||
sc = _MC.get(mc_st, "dim")
|
||||
self._prob_hist.append(prob)
|
||||
|
||||
# Age since last 4h run
|
||||
mc_age_str = "never run"
|
||||
if mc_ts:
|
||||
try:
|
||||
mc_dt = datetime.fromisoformat(mc_ts.replace("Z", "+00:00"))
|
||||
age_s = (datetime.now(timezone.utc) - mc_dt).total_seconds()
|
||||
age_m = int(age_s // 60)
|
||||
mc_age_str = f"{age_m//60}h{age_m%60:02d}m ago" if age_m >= 60 else f"{age_m}m ago"
|
||||
except Exception: pass
|
||||
|
||||
mc_present = bool(mc)
|
||||
self._w("#mc_title").update(
|
||||
f"[bold cyan]⚡ MC-FOREWARNER RISK MANIFOLD[/bold cyan] {TUI_VERSION}"
|
||||
+ (f" [{sc}]▶ {mc_st}[/{sc}] [dim]assessed:{mc_age_str} cadence:4h[/dim]"
|
||||
if mc_present else
|
||||
" [yellow]⚠ no data yet — Prefect 4h schedule not yet run[/yellow]"
|
||||
" [dim](mc_forewarner_flow runs every 4h)[/dim]")
|
||||
)
|
||||
|
||||
# Left: digits + status
|
||||
self._w("#mc_digits", Digits).update(f"{prob:.3f}")
|
||||
status_str = {"GREEN":"🟢 SAFE","ORANGE":"🟡 CAUTION","RED":"🔴 DANGER"}.get(mc_st, "⚪ N/A")
|
||||
champ_str = f"champ:{champ_p*100:.0f}%" if champ_p is not None else "champ:—"
|
||||
self._w("#mc_status").update(
|
||||
(f"[{sc}]{status_str}[/{sc}]\n[dim]cat.prob {champ_str}[/dim]\n[dim]env:{env:.3f}[/dim]")
|
||||
if mc_present else
|
||||
"[yellow]awaiting[/yellow]\n[dim]first run[/dim]\n[dim]in ~4h[/dim]"
|
||||
)
|
||||
|
||||
# Center bars
|
||||
for bar_id, val, lo_thr, hi_thr, lo_cls, hi_cls, label, fmt in [
|
||||
("mc_prob_bar", prob, 0.10, 0.30, "-warning", "-danger",
|
||||
f"[dim]cat.prob[/dim] [{sc}]{prob:.4f}[/{sc}] [green]<0.10 OK[/green] [yellow]<0.30 WARN[/yellow] [red]≥0.30 CRIT[/red]",
|
||||
int(prob * 100)),
|
||||
("mc_env_bar", env, 0.40, 0.70, "-danger", "-warning",
|
||||
f"[dim]env.score[/dim] [green]{env:.4f}[/green] [red]<0.40 DANGER[/red] [yellow]<0.70 CAUTION[/yellow] [green]≥0.70 SAFE[/green]",
|
||||
int(env * 100)),
|
||||
]:
|
||||
pb = self._w(f"#{bar_id}", ProgressBar)
|
||||
pb.progress = fmt
|
||||
pb.remove_class("-danger", "-warning")
|
||||
if val < lo_thr: pb.add_class(lo_cls)
|
||||
elif val < hi_thr: pb.add_class(hi_cls)
|
||||
self._w(f"#{bar_id.replace('_bar','_label')}").update(label)
|
||||
|
||||
# champion_probability bar
|
||||
chp_val = champ_p if champ_p is not None else 0.0
|
||||
cb = self._w("#mc_champ_bar", ProgressBar)
|
||||
cb.progress = int(chp_val * 100)
|
||||
cb.remove_class("-danger", "-warning")
|
||||
if chp_val < 0.30: cb.add_class("-danger")
|
||||
elif chp_val < 0.60: cb.add_class("-warning")
|
||||
self._w("#mc_champ_label").update(
|
||||
f"[dim]champ.prob[/dim] "
|
||||
+ (f"[green]{champ_p*100:.1f}%[/green]" if champ_p is not None else "[dim]—[/dim]")
|
||||
+ " [green]>60% GOOD[/green] [yellow]>30% MARGINAL[/yellow] [red]<30% RISK[/red]"
|
||||
)
|
||||
|
||||
# Live performance tier
|
||||
cur_cap = float(cap.get("capital", 0.0)) if cap else 0.0
|
||||
if cur_cap > 0:
|
||||
if self._session_start_cap is None: self._session_start_cap = cur_cap
|
||||
if self._cap_peak is None or cur_cap > self._cap_peak: self._cap_peak = cur_cap
|
||||
live_roi = ((cur_cap - self._session_start_cap) / self._session_start_cap
|
||||
if cur_cap > 0 and self._session_start_cap else None)
|
||||
live_dd = ((self._cap_peak - cur_cap) / self._cap_peak
|
||||
if cur_cap > 0 and self._cap_peak and cur_cap < self._cap_peak else None)
|
||||
pnl_blue = _S.get("hz.pnl_blue") or {}
|
||||
def _pct(v): return f"{v*100:+.1f}%" if v is not None else "—"
|
||||
def _lm(k, fmt="{:.3f}"):
|
||||
v = pnl_blue.get(k); return fmt.format(v) if v is not None else "—"
|
||||
roi_c = "green" if live_roi and live_roi > 0 else ("red" if live_roi and live_roi < -0.10 else "yellow")
|
||||
dd_c2 = "red" if live_dd and live_dd > 0.20 else ("yellow" if live_dd and live_dd > 0.08 else "green")
|
||||
self._w("#mc_live").update(
|
||||
f"[dim]ROI[/dim] [{roi_c}]{_pct(live_roi):>8}[/{roi_c}]"
|
||||
f" [dim]champ gate:>+30% crit:<-30%[/dim]\n"
|
||||
f"[dim]DD [/dim] [{dd_c2}]{_pct(live_dd):>8}[/{dd_c2}]"
|
||||
f" [dim]champ gate:<20% crit:>40%[/dim]\n"
|
||||
f"[dim]WR:{_lm('win_rate','{:.1%}')} PF:{_lm('profit_factor','{:.2f}')}"
|
||||
f" Sh:{_lm('sharpe','{:.2f}')} Cal:{_lm('calmar','{:.2f}')}[/dim]\n"
|
||||
f"[dim]cap:${cur_cap:,.0f} start:${self._session_start_cap:,.0f} "
|
||||
f"trades:{eng.get('trades_executed','—')}[/dim]"
|
||||
if cur_cap > 0 and self._session_start_cap else
|
||||
"[dim]awaiting capital data…[/dim]"
|
||||
)
|
||||
|
||||
# Right: sparklines + legend
|
||||
self._w("#mc_spark_lbl").update(
|
||||
f"[dim]cat.prob history [{min(self._prob_hist):.3f}–{max(self._prob_hist):.3f}][/dim]")
|
||||
self._w("#mc_spark", Sparkline).data = list(self._prob_hist)
|
||||
mae_list = list(self._mae_deque)
|
||||
self._w("#mc_mae_lbl").update(f"[dim]MAE hist (n={len(mae_list)})[/dim]")
|
||||
self._w("#mc_mae_spark", Sparkline).data = mae_list[-40:] if mae_list else [0.0]
|
||||
warn_str = ("\n[yellow]⚠ " + mc_warns[0] + "[/yellow]") if mc_warns else ""
|
||||
self._w("#mc_legend").update(
|
||||
"[bold]MC THRESHOLDS[/bold]\n"
|
||||
"[green]GREEN[/green] cat < 0.10\n"
|
||||
"[yellow]ORANGE[/yellow] cat < 0.30\n"
|
||||
"[red]RED[/red] cat ≥ 0.30\n"
|
||||
"[dim]DD gate: <20%[/dim]\n"
|
||||
"[dim]DD crit: >40%[/dim]" + warn_str
|
||||
)
|
||||
|
||||
# ── TRADES FOOTER — real exits vs AE shadow exits ────────────────────────
|
||||
real_trades = _S.get("ch.recent_trades") or []
|
||||
ae_closed = _S.get("ch.ae_shadow_exits") or [] # CLOSED rows, one per trade
|
||||
|
||||
# Build asset→ae lookup for matching (most recent per asset)
|
||||
ae_by_asset: dict = {}
|
||||
for r in ae_closed:
|
||||
a = r.get("asset", "")
|
||||
if a and a not in ae_by_asset:
|
||||
ae_by_asset[a] = r
|
||||
|
||||
lines = []
|
||||
for r in real_trades[:5]:
|
||||
asset = r.get("asset", "?")
|
||||
pnl = float(r.get("pnl_pct", 0) or 0) * 100
|
||||
reason = str(r.get("exit_reason", "?"))[:20]
|
||||
bars = int(r.get("bars_held", 0) or 0)
|
||||
pnl_c = "green" if pnl >= 0 else "red"
|
||||
real_s = (f"[cyan]{asset:<9}[/cyan]"
|
||||
f" [{pnl_c}]{pnl:+.2f}%[/{pnl_c}]"
|
||||
f" [dim]{reason} {bars}b[/dim]")
|
||||
ae = ae_by_asset.get(asset)
|
||||
if ae:
|
||||
p = float(ae.get("p_cont", 0.5) or 0.5)
|
||||
mae_n = float(ae.get("mae_norm", 0) or 0)
|
||||
mfe_n = float(ae.get("mfe_norm", 0) or 0)
|
||||
p_c = "red" if p < 0.35 else ("yellow" if p < 0.50 else "green")
|
||||
ae_s = (f"[dim]AE p=[/dim][{p_c}]{p:.2f}[/{p_c}]"
|
||||
f"[dim] mae={mae_n:.1f} mfe={mfe_n:.1f}[/dim]")
|
||||
else:
|
||||
ae_s = "[dim]AE: no data[/dim]"
|
||||
lines.append(f" {real_s} {ae_s}")
|
||||
|
||||
body = "\n".join(lines) if lines else " [dim]no trades today[/dim]"
|
||||
self._w("#trades_footer").update(
|
||||
f"[bold green]TRADES[/bold green] [dim]real exit → AE state at close (poll 30s)[/dim]\n"
|
||||
f"{body}"
|
||||
)
|
||||
|
||||
# ── BUCKET PERFORMANCE ────────────────────────────────────────────────────
|
||||
bkt_rows = _S.get("ch.bucket_perf") or []
|
||||
if bkt_rows:
|
||||
cells = []
|
||||
for r in bkt_rows:
|
||||
bid = int(r.get("bucket_id", 0))
|
||||
n = int(r.get("n", 0))
|
||||
wins = int(r.get("wins", 0))
|
||||
avg_p = float(r.get("avg_pnl", 0)) * 100
|
||||
wr = wins / n if n > 0 else 0.0
|
||||
wr_c = "green" if wr >= 0.55 else ("yellow" if wr >= 0.45 else "red")
|
||||
ap_c = "green" if avg_p >= 0 else "red"
|
||||
cells.append(
|
||||
f"[bold]B{bid}[/bold] [dim]n={n}[/dim]"
|
||||
f" [{wr_c}]{wr:.0%}[/{wr_c}]"
|
||||
f" [{ap_c}]{avg_p:+.1f}%[/{ap_c}]"
|
||||
)
|
||||
mid = (len(cells) + 1) // 2
|
||||
col1, col2 = cells[:mid], cells[mid:]
|
||||
bkt_lines = []
|
||||
for i in range(max(len(col1), len(col2))):
|
||||
l = col1[i] if i < len(col1) else ""
|
||||
r_cell = col2[i] if i < len(col2) else ""
|
||||
bkt_lines.append(f" {l:<52}{r_cell}")
|
||||
bkt_body = "\n".join(bkt_lines)
|
||||
else:
|
||||
bkt_body = " [dim]no bucket data yet — AE shadow requires closed trades[/dim]"
|
||||
self._w("#bucket_footer").update(
|
||||
f"[bold cyan]BUCKETS[/bold cyan] [dim]excl HIBERNATE/ACB all-time WR% avg-pnl poll 60s[/dim]\n"
|
||||
f"{bkt_body}"
|
||||
)
|
||||
|
||||
# ── TEST FOOTER — schema: {passed, total, status: PASS|FAIL|N/A} ────────
|
||||
if self._test_vis:
|
||||
tr = self._read_json(_TEST_JSON) or {}
|
||||
run_at = tr.get("_run_at", "never")
|
||||
cats = [
|
||||
("data_integrity", "data"),
|
||||
("finance_fuzz", "fuzz"),
|
||||
("signal_fill", "signal"),
|
||||
("degradation", "degrad"),
|
||||
("actor", "actor"),
|
||||
]
|
||||
def _badge(key, short):
|
||||
info = tr.get(key, {})
|
||||
if not info:
|
||||
return f"[dim]{short}:n/a[/dim]"
|
||||
status = info.get("status", "N/A")
|
||||
passed = info.get("passed")
|
||||
total = info.get("total")
|
||||
if status == "N/A" or passed is None:
|
||||
return f"[dim]{short}:N/A[/dim]"
|
||||
col = "green" if status == "PASS" else "red"
|
||||
return f"[{col}]{short}:{passed}/{total}[/{col}]"
|
||||
badges = " ".join(_badge(k, s) for k, s in cats)
|
||||
self._w("#test_footer").update(
|
||||
f"[bold dim]TESTS[/bold dim] [dim]last run: {run_at}[/dim]"
|
||||
f" [dim]t=toggle r=reload[/dim]\n"
|
||||
f"{badges}\n"
|
||||
f"[dim]file: run_logs/test_results_latest.json "
|
||||
f"API: write_test_results() in dolphin_tui_v6.py[/dim]"
|
||||
)
|
||||
else:
|
||||
self._w("#test_footer").update("")
|
||||
|
||||
# ── LOG ───────────────────────────────────────────────────────────────
|
||||
if self._log_vis:
|
||||
self._w("#p_log").update(
|
||||
f"[bold]LOG[/bold] (l=hide)\n"
|
||||
f"[dim]{now_str}[/dim] scan=#{scan_no} vel={vel_div:+.5f}\n"
|
||||
f"hz_err:{_S.get('hz_err','none')} pf_err:{_S.get('prefect_err','none')}\n"
|
||||
f"[dim]state keys:{len(_S._d)} safety_live:{safety_live}[/dim]"
|
||||
)
|
||||
|
||||
def action_force_refresh(self) -> None: self._update()
|
||||
def action_toggle_log(self) -> None:
|
||||
self._log_vis = not self._log_vis
|
||||
self.query_one("#log_row").display = self._log_vis
|
||||
def action_toggle_tests(self) -> None:
|
||||
self._test_vis = not self._test_vis; self._update()
|
||||
|
||||
def _w(self, selector, widget_type=Static):
|
||||
return self.query_one(selector, widget_type)
|
||||
|
||||
@staticmethod
|
||||
def _read_json(path):
|
||||
try: return json.loads(path.read_text())
|
||||
except Exception: return None
|
||||
|
||||
|
||||
def write_test_results(results: dict):
|
||||
"""
|
||||
Update the TUI test footer. Called by test scripts / CI / conftest.py.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"_run_at": "auto-injected",
|
||||
"data_integrity": {"passed": 15, "total": 15, "status": "PASS"},
|
||||
"finance_fuzz": {"passed": null, "total": null, "status": "N/A"},
|
||||
...
|
||||
}
|
||||
status: "PASS" | "FAIL" | "N/A"
|
||||
|
||||
Example (conftest.py):
|
||||
import sys; sys.path.insert(0, "/mnt/dolphinng5_predict/Observability/TUI")
|
||||
from dolphin_tui_v5 import write_test_results
|
||||
write_test_results({"data_integrity": {"passed": 15, "total": 15, "status": "PASS"}})
|
||||
"""
|
||||
_TEST_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
# Merge with existing file so missing categories are preserved
|
||||
existing = {}
|
||||
try:
|
||||
existing = json.loads(_TEST_JSON.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
existing.update(results)
|
||||
existing["_run_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
_TEST_JSON.write_text(json.dumps(existing, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
DolphinTUI().run()
|
||||
2070
Observability/TUI/test_dolphin_tui.py
Executable file
2070
Observability/TUI/test_dolphin_tui.py
Executable file
File diff suppressed because it is too large
Load Diff
296
Observability/TUI/test_dolphin_tui_keyboard.py
Executable file
296
Observability/TUI/test_dolphin_tui_keyboard.py
Executable file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
Keyboard shortcut tests for DolphinTUIApp.
|
||||
|
||||
Validates: Requirements 10.1, 10.2, 10.3, 10.4
|
||||
|
||||
Uses Textual's built-in async pilot (App.run_test / pilot.press) to simulate
|
||||
keypresses and assert expected behaviour. DolphinDataFetcher is mocked so no
|
||||
real Hazelcast or Prefect connections are needed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import os
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Compatibility shim: Textual 8.x moved VerticalScroll to textual.containers.
|
||||
# Patch textual.widgets so dolphin_tui.py (which imports from textual.widgets)
|
||||
# can load without error.
|
||||
# ---------------------------------------------------------------------------
|
||||
import textual.widgets as _tw
|
||||
import textual.containers as _tc
|
||||
if not hasattr(_tw, "VerticalScroll"):
|
||||
_tw.VerticalScroll = _tc.VerticalScroll
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Import the real app (all deps are available in this environment)
|
||||
# ---------------------------------------------------------------------------
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinTUIApp,
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
LogPanel,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Shared fixture: a minimal DataSnapshot with no real data
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_EMPTY_SNAP = DataSnapshot()
|
||||
|
||||
|
||||
def _make_mock_fetcher_instance() -> MagicMock:
|
||||
"""Return a mock DolphinDataFetcher with all network methods stubbed."""
|
||||
fetcher = MagicMock(spec=DolphinDataFetcher)
|
||||
fetcher.hz_connected = False
|
||||
fetcher.connect_hz = AsyncMock(return_value=False)
|
||||
fetcher.disconnect_hz = AsyncMock(return_value=None)
|
||||
fetcher.fetch = AsyncMock(return_value=_EMPTY_SNAP)
|
||||
fetcher.fetch_prefect = AsyncMock(return_value=(False, []))
|
||||
fetcher.tail_log = MagicMock(return_value=[])
|
||||
fetcher._running = True
|
||||
fetcher._reconnect_task = None
|
||||
return fetcher
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.1 — `q` key: action_quit called, app exits cleanly
|
||||
# Validates: Requirements 10.1
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_q_key_quits_app_cleanly():
|
||||
"""Pressing `q` calls action_quit which disconnects HZ and exits (Req 10.1).
|
||||
|
||||
Verifies that disconnect_hz is awaited before exit so the HZ client
|
||||
is shut down cleanly.
|
||||
"""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
# Stop the poll timer to avoid background noise
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
await pilot.press("q")
|
||||
# run_test context exits cleanly after q — app.exit() was called
|
||||
|
||||
# After the context exits, verify disconnect_hz was called (clean HZ shutdown)
|
||||
mock_fetcher.disconnect_hz.assert_awaited_once()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.2 — `r` key: action_force_refresh triggers an immediate poll
|
||||
# Validates: Requirements 10.2
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_r_key_triggers_immediate_poll():
|
||||
"""Pressing `r` calls action_force_refresh which runs _poll immediately (Req 10.2).
|
||||
|
||||
Verifies that fetch() is called at least once more after pressing `r`,
|
||||
confirming a poll outside the normal 2s cycle.
|
||||
"""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
call_count_before = mock_fetcher.fetch.await_count
|
||||
|
||||
await pilot.press("r")
|
||||
await pilot.pause(0.2)
|
||||
|
||||
# fetch() should have been called at least once more after `r`
|
||||
assert mock_fetcher.fetch.await_count > call_count_before, (
|
||||
f"Expected fetch() to be called after pressing 'r', "
|
||||
f"but call count stayed at {mock_fetcher.fetch.await_count}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.3 — `l` key: LogPanel visibility toggles on/off
|
||||
# Validates: Requirements 10.3
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_shows_log_panel():
|
||||
"""Pressing `l` once makes the LogPanel visible (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
|
||||
# Initially hidden
|
||||
assert log_panel.display is False, "LogPanel should be hidden on startup"
|
||||
|
||||
# Press l → should become visible
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is True, "LogPanel should be visible after first 'l' press"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_hides_log_panel_on_second_press():
|
||||
"""Pressing `l` twice returns LogPanel to hidden state (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
|
||||
# Press l twice: hidden → visible → hidden
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is True
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert log_panel.display is False, "LogPanel should be hidden after second 'l' press"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_l_key_updates_log_visible_flag():
|
||||
"""Pressing `l` updates the _log_visible internal flag (Req 10.3)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
assert app._log_visible is False
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert app._log_visible is True
|
||||
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
assert app._log_visible is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test 10.4 — Arrow keys: scroll actions dispatched on LogPanel
|
||||
# Validates: Requirements 10.4
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_up_arrow_scrolls_log_panel():
|
||||
"""Pressing ↑ dispatches action_scroll_up on LogPanel when visible (Req 10.4)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Make log panel visible and focus it
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
assert log_panel.display is True
|
||||
|
||||
# Focus the log panel so it receives the scroll action
|
||||
log_panel.focus()
|
||||
await pilot.pause(0.05)
|
||||
|
||||
# Track action_scroll_up calls on the log panel
|
||||
scroll_up_called = []
|
||||
original = log_panel.action_scroll_up
|
||||
|
||||
def _track(*args, **kwargs):
|
||||
scroll_up_called.append(True)
|
||||
return original(*args, **kwargs)
|
||||
|
||||
log_panel.action_scroll_up = _track
|
||||
|
||||
await pilot.press("up")
|
||||
await pilot.pause(0.1)
|
||||
|
||||
assert len(scroll_up_called) > 0, (
|
||||
"action_scroll_up should have been called on LogPanel after pressing 'up'"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_down_arrow_scrolls_log_panel():
|
||||
"""Pressing ↓ dispatches action_scroll_down on LogPanel when visible (Req 10.4)."""
|
||||
mock_fetcher = _make_mock_fetcher_instance()
|
||||
|
||||
with patch("dolphin_tui.DolphinDataFetcher", return_value=mock_fetcher):
|
||||
app = DolphinTUIApp(hz_host="localhost", hz_port=5701)
|
||||
|
||||
async with app.run_test(size=(130, 35)) as pilot:
|
||||
try:
|
||||
app._poll_timer.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Make log panel visible and focus it
|
||||
await pilot.press("l")
|
||||
await pilot.pause(0.05)
|
||||
|
||||
log_panel = app.query_one("#panel_log", LogPanel)
|
||||
assert log_panel.display is True
|
||||
|
||||
log_panel.focus()
|
||||
await pilot.pause(0.05)
|
||||
|
||||
# Track action_scroll_down calls on the log panel
|
||||
scroll_down_called = []
|
||||
original = log_panel.action_scroll_down
|
||||
|
||||
def _track(*args, **kwargs):
|
||||
scroll_down_called.append(True)
|
||||
return original(*args, **kwargs)
|
||||
|
||||
log_panel.action_scroll_down = _track
|
||||
|
||||
await pilot.press("down")
|
||||
await pilot.pause(0.1)
|
||||
|
||||
assert len(scroll_down_called) > 0, (
|
||||
"action_scroll_down should have been called on LogPanel after pressing 'down'"
|
||||
)
|
||||
288
Observability/TUI/test_dolphin_tui_log_tail.py
Executable file
288
Observability/TUI/test_dolphin_tui_log_tail.py
Executable file
@@ -0,0 +1,288 @@
|
||||
"""
|
||||
test_dolphin_tui_log_tail.py
|
||||
|
||||
Verifies the tail_log() method in DolphinDataFetcher:
|
||||
- Uses seek(-N, 2) to read only the last N bytes (not the full file)
|
||||
- Returns the correct last N lines from a large file
|
||||
- Does not load the entire file into memory
|
||||
|
||||
Tests:
|
||||
- test_tail_log_returns_last_n_lines
|
||||
- test_tail_log_large_file_seek_not_full_read
|
||||
- test_tail_log_large_file_correctness
|
||||
- test_tail_log_file_not_found
|
||||
- test_tail_log_small_file
|
||||
- test_tail_log_empty_file
|
||||
- test_tail_log_n_param
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub hazelcast so the import succeeds without the package
|
||||
# ---------------------------------------------------------------------------
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub textual so dolphin_tui imports cleanly without a terminal
|
||||
# ---------------------------------------------------------------------------
|
||||
for _mod in ["textual", "textual.app", "textual.containers", "textual.widgets"]:
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub httpx
|
||||
# ---------------------------------------------------------------------------
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
from dolphin_tui import DolphinDataFetcher, LOG_TAIL_CHUNK_BYTES # noqa: E402
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_fetcher() -> DolphinDataFetcher:
|
||||
return DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestTailLogReturnsLastNLines(unittest.TestCase):
|
||||
"""test_tail_log_returns_last_n_lines
|
||||
|
||||
Create a temp file with 1000 known lines, call tail_log(path, 50),
|
||||
verify exactly the last 50 lines are returned.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(1000):
|
||||
self.tmp.write(f"Line {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_returns_last_n_lines(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
self.assertEqual(len(result), 50, f"Expected 50 lines, got {len(result)}")
|
||||
# The last 50 lines should be Line 950 .. Line 999
|
||||
for i, line in enumerate(result):
|
||||
expected = f"Line {950 + i}"
|
||||
self.assertEqual(line, expected, f"Line {i}: expected {expected!r}, got {line!r}")
|
||||
|
||||
|
||||
class TestTailLogLargeFileSeekNotFullRead(unittest.TestCase):
|
||||
"""test_tail_log_large_file_seek_not_full_read
|
||||
|
||||
Verify that tail_log uses seek(-chunk, 2) and does NOT call read()
|
||||
with the full file size.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
# Write a file that is clearly larger than the chunk size
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="wb", suffix=".log", delete=False)
|
||||
line = b"2026-01-01 00:00:00 [INFO] padding " + b"x" * 100 + b"\n"
|
||||
# Write enough to be > LOG_TAIL_CHUNK_BYTES
|
||||
total = 0
|
||||
while total < LOG_TAIL_CHUNK_BYTES * 3:
|
||||
self.tmp.write(line)
|
||||
total += len(line)
|
||||
self.tmp.close()
|
||||
self.file_size = os.path.getsize(self.tmp.name)
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_large_file_seek_not_full_read(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
read_sizes = []
|
||||
original_open = open
|
||||
|
||||
def spy_open(path, mode="r", **kwargs):
|
||||
fh = original_open(path, mode, **kwargs)
|
||||
original_read = fh.read
|
||||
|
||||
def tracking_read(size=-1):
|
||||
read_sizes.append(size)
|
||||
return original_read(size)
|
||||
|
||||
fh.read = tracking_read
|
||||
return fh
|
||||
|
||||
with patch("builtins.open", side_effect=spy_open):
|
||||
fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
# read() must never be called with the full file size
|
||||
self.assertNotIn(
|
||||
self.file_size,
|
||||
read_sizes,
|
||||
f"read() was called with full file size {self.file_size} — full file was loaded",
|
||||
)
|
||||
# At least one read() call must have happened
|
||||
self.assertTrue(len(read_sizes) > 0, "read() was never called")
|
||||
|
||||
|
||||
class TestTailLogLargeFileCorrectness(unittest.TestCase):
|
||||
"""test_tail_log_large_file_correctness
|
||||
|
||||
Create a temp file >10MB of repeated log lines, call tail_log,
|
||||
verify the returned lines match the actual last N lines of the file.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False, encoding="utf-8")
|
||||
line_template = "2026-01-01 00:00:00 [INFO] Line {i} padding " + "x" * 100
|
||||
self.lines = []
|
||||
total_bytes = 0
|
||||
i = 0
|
||||
while total_bytes < 10 * 1024 * 1024: # 10 MB
|
||||
line = line_template.format(i=i)
|
||||
self.tmp.write(line + "\n")
|
||||
self.lines.append(line)
|
||||
total_bytes += len(line) + 1
|
||||
i += 1
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_large_file_correctness(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
expected = self.lines[-50:]
|
||||
self.assertEqual(len(result), 50, f"Expected 50 lines, got {len(result)}")
|
||||
self.assertEqual(result, expected, "Returned lines do not match the actual last 50 lines")
|
||||
|
||||
|
||||
class TestTailLogFileNotFound(unittest.TestCase):
|
||||
"""test_tail_log_file_not_found
|
||||
|
||||
When path doesn't exist, returns ["Log not found: <path>"].
|
||||
"""
|
||||
|
||||
def test_tail_log_file_not_found(self):
|
||||
fetcher = _make_fetcher()
|
||||
missing = "/tmp/this_file_does_not_exist_dolphin_test_xyz.log"
|
||||
result = fetcher.tail_log(missing, n=50)
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertEqual(result[0], f"Log not found: {missing}")
|
||||
|
||||
|
||||
class TestTailLogSmallFile(unittest.TestCase):
|
||||
"""test_tail_log_small_file
|
||||
|
||||
File smaller than the seek chunk still returns correct lines
|
||||
(seek hits start of file via OSError fallback).
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(20):
|
||||
self.tmp.write(f"SmallLine {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_small_file(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
# File only has 20 lines — should return all 20
|
||||
self.assertEqual(len(result), 20, f"Expected 20 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"SmallLine {i}")
|
||||
|
||||
|
||||
class TestTailLogEmptyFile(unittest.TestCase):
|
||||
"""test_tail_log_empty_file
|
||||
|
||||
Empty file returns empty list.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_empty_file(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=50)
|
||||
|
||||
self.assertEqual(result, [], f"Expected empty list for empty file, got {result!r}")
|
||||
|
||||
|
||||
class TestTailLogNParam(unittest.TestCase):
|
||||
"""test_tail_log_n_param
|
||||
|
||||
Calling with n=10 returns exactly 10 lines, n=100 returns 100 lines
|
||||
(when file has enough).
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmp = tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False)
|
||||
for i in range(500):
|
||||
self.tmp.write(f"NParamLine {i}\n")
|
||||
self.tmp.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.tmp.name)
|
||||
|
||||
def test_tail_log_n_10(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=10)
|
||||
|
||||
self.assertEqual(len(result), 10, f"Expected 10 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"NParamLine {490 + i}")
|
||||
|
||||
def test_tail_log_n_100(self):
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher.tail_log(self.tmp.name, n=100)
|
||||
|
||||
self.assertEqual(len(result), 100, f"Expected 100 lines, got {len(result)}")
|
||||
for i, line in enumerate(result):
|
||||
self.assertEqual(line, f"NParamLine {400 + i}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
272
Observability/TUI/test_dolphin_tui_malformed_json.py
Executable file
272
Observability/TUI/test_dolphin_tui_malformed_json.py
Executable file
@@ -0,0 +1,272 @@
|
||||
# Tests for graceful handling of malformed JSON in HZ values.
|
||||
# Validates: Requirements 12.3
|
||||
# The TUI MUST NOT crash when any individual HZ key contains malformed JSON.
|
||||
# Malformed JSON MUST result in all fields being None (no crash, no exception).
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch, AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ensure the TUI module is importable without textual/hazelcast/httpx installed
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
for _mod in ("textual", "textual.app", "textual.widgets", "textual.containers", "httpx", "hazelcast"):
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
import textual.app as _textual_app
|
||||
import textual.widgets as _textual_widgets
|
||||
import textual.containers as _textual_containers
|
||||
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
from dolphin_tui import (
|
||||
color_age,
|
||||
fmt_float,
|
||||
fmt_pnl,
|
||||
DataSnapshot,
|
||||
DolphinDataFetcher,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Malformed JSON inputs to test
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
MALFORMED_JSON_INPUTS = [
|
||||
"{bad json",
|
||||
"not-json",
|
||||
"null",
|
||||
"[]",
|
||||
"123",
|
||||
"",
|
||||
"{",
|
||||
"}",
|
||||
"{'key': 'value'}", # single quotes — invalid JSON
|
||||
"undefined",
|
||||
"NaN",
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_fetcher():
|
||||
return DolphinDataFetcher()
|
||||
|
||||
|
||||
def _make_mock_map_returning(value):
|
||||
"""Return a mock IMap whose .get(key).result() returns the given value."""
|
||||
future = MagicMock()
|
||||
future.result.return_value = value
|
||||
hz_map = MagicMock()
|
||||
hz_map.get.return_value = future
|
||||
hz_map.key_set.return_value = future
|
||||
return hz_map
|
||||
|
||||
|
||||
def _make_fetcher_with_malformed_json(malformed: str):
|
||||
"""Create a DolphinDataFetcher whose hz_client returns malformed JSON for every map key."""
|
||||
fetcher = DolphinDataFetcher()
|
||||
fetcher.hz_connected = True
|
||||
|
||||
mock_map = _make_mock_map_returning(malformed)
|
||||
map_future = MagicMock()
|
||||
map_future.result.return_value = mock_map
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_map.return_value = map_future
|
||||
fetcher.hz_client = mock_client
|
||||
return fetcher
|
||||
|
||||
|
||||
def _run(coro):
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _fetch_with_malformed_json(malformed: str):
|
||||
fetcher = _make_fetcher_with_malformed_json(malformed)
|
||||
with patch.object(fetcher, "fetch_prefect", new=AsyncMock(return_value=(False, []))):
|
||||
with patch.object(fetcher, "tail_log", return_value=[]):
|
||||
with patch.object(fetcher, "_start_reconnect", return_value=None):
|
||||
return _run(fetcher.fetch())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_scan: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_scan_malformed_no_crash(bad_json):
|
||||
"""_parse_scan must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_scan(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_scan_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_scan must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_scan(bad_json)
|
||||
for key in ("scan_number", "vel_div", "w50_velocity", "w750_velocity",
|
||||
"instability_50", "scan_bridge_ts", "scan_age_s"):
|
||||
assert result[key] is None, f"_parse_scan({bad_json!r})[{key!r}] should be None"
|
||||
assert result["asset_prices"] == {}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_safety: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_safety_malformed_no_crash(bad_json):
|
||||
"""_parse_safety must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_safety(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_safety_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_safety must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_safety(bad_json)
|
||||
for key in ("posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5"):
|
||||
assert result[key] is None, f"_parse_safety({bad_json!r})[{key!r}] should be None"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _parse_heartbeat: malformed JSON -> all None, no exception
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_heartbeat_malformed_no_crash(bad_json):
|
||||
"""_parse_heartbeat must not raise on malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_heartbeat(bad_json) # must not raise
|
||||
assert isinstance(result, dict)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", MALFORMED_JSON_INPUTS)
|
||||
def test_parse_heartbeat_malformed_returns_none_fields(bad_json):
|
||||
"""_parse_heartbeat must return all-None fields for malformed JSON."""
|
||||
fetcher = _make_fetcher()
|
||||
result = fetcher._parse_heartbeat(bad_json)
|
||||
for key in ("heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s"):
|
||||
assert result[key] is None, f"_parse_heartbeat({bad_json!r})[{key!r}] should be None"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fetch() with malformed JSON from HZ -> DataSnapshot, no crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_returns_datasnapshot(bad_json):
|
||||
"""fetch() must return a DataSnapshot even when HZ returns malformed JSON."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert isinstance(snap, DataSnapshot)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_scan_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None scan fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.scan_number is None
|
||||
assert snap.vel_div is None
|
||||
assert snap.w50_velocity is None
|
||||
assert snap.instability_50 is None
|
||||
assert snap.scan_bridge_ts is None
|
||||
assert snap.scan_age_s is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_safety_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None safety fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.posture is None
|
||||
assert snap.rm is None
|
||||
assert snap.cat1 is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_heartbeat_fields_none(bad_json):
|
||||
"""fetch() with malformed JSON must produce None heartbeat fields."""
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap.heartbeat_ts is None
|
||||
assert snap.heartbeat_phase is None
|
||||
assert snap.heartbeat_age_s is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bad_json", ["{bad json", "not-json", "null", "[]", "123", ""])
|
||||
def test_fetch_malformed_json_no_crash(bad_json):
|
||||
"""fetch() must not raise any exception when HZ returns malformed JSON."""
|
||||
# If this doesn't raise, the test passes
|
||||
snap = _fetch_with_malformed_json(bad_json)
|
||||
assert snap is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fmt_float, fmt_pnl, color_age handle None gracefully (parse errors -> None fields)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fmt_float_handles_none_from_parse_error():
|
||||
"""fmt_float(None) must return '--' — parse errors produce None fields."""
|
||||
assert fmt_float(None) == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_handles_none_from_parse_error():
|
||||
"""fmt_pnl(None) must return ('white', '--') — parse errors produce None fields."""
|
||||
color, text = fmt_pnl(None)
|
||||
assert text == "--"
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_color_age_handles_none_from_parse_error():
|
||||
"""color_age(None) must return ('dim', 'N/A') — parse errors produce None fields."""
|
||||
color, text = color_age(None)
|
||||
assert text == "N/A"
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_all_none_snapshot_fmt_float_no_crash():
|
||||
"""All float fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in (
|
||||
"vel_div", "w50_velocity", "w750_velocity", "instability_50",
|
||||
"acb_boost", "acb_beta", "funding_btc", "dvol_btc", "fng",
|
||||
"vix", "capital", "pnl", "rm",
|
||||
):
|
||||
val = getattr(snap, field_name)
|
||||
result = fmt_float(val)
|
||||
assert result == "--", f"fmt_float({field_name}=None) should be '--', got {result!r}"
|
||||
|
||||
|
||||
def test_all_none_snapshot_fmt_pnl_no_crash():
|
||||
"""PnL fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("pnl", "nautilus_pnl"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = fmt_pnl(val)
|
||||
assert text == "--"
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_all_none_snapshot_color_age_no_crash():
|
||||
"""Age fields on an all-None DataSnapshot must format without crashing."""
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("scan_age_s", "exf_age_s", "esof_age_s", "heartbeat_age_s"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = color_age(val)
|
||||
assert text == "N/A"
|
||||
assert color == "dim"
|
||||
283
Observability/TUI/test_dolphin_tui_missing_keys.py
Executable file
283
Observability/TUI/test_dolphin_tui_missing_keys.py
Executable file
@@ -0,0 +1,283 @@
|
||||
# Tests for graceful "N/A" / "--" rendering when HZ maps return None for all keys.
|
||||
# Validates: Requirements 12.3
|
||||
# The TUI MUST NOT crash when any individual HZ key is missing or contains
|
||||
# malformed JSON. Missing fields MUST render as "--" or "N/A".
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
from unittest.mock import MagicMock, patch, AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ensure the TUI module is importable without textual/hazelcast/httpx installed
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
for _mod in ("textual", "textual.app", "textual.widgets", "textual.containers", "httpx", "hazelcast"):
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
import textual.app as _textual_app
|
||||
import textual.widgets as _textual_widgets
|
||||
import textual.containers as _textual_containers
|
||||
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
from dolphin_tui import (
|
||||
color_age,
|
||||
fmt_float,
|
||||
fmt_pnl,
|
||||
rm_bar,
|
||||
posture_color,
|
||||
status_color,
|
||||
DataSnapshot,
|
||||
DolphinDataFetcher,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_mock_map_returning_none():
|
||||
"""Return a mock IMap whose .get(key).result() always returns None."""
|
||||
future = MagicMock()
|
||||
future.result.return_value = None
|
||||
hz_map = MagicMock()
|
||||
hz_map.get.return_value = future
|
||||
hz_map.key_set.return_value = future
|
||||
return hz_map
|
||||
|
||||
|
||||
def _make_fetcher_with_mock_client():
|
||||
"""Create a DolphinDataFetcher whose hz_client returns None for every map key."""
|
||||
fetcher = DolphinDataFetcher()
|
||||
fetcher.hz_connected = True
|
||||
|
||||
mock_map = _make_mock_map_returning_none()
|
||||
map_future = MagicMock()
|
||||
map_future.result.return_value = mock_map
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_map.return_value = map_future
|
||||
fetcher.hz_client = mock_client
|
||||
return fetcher
|
||||
|
||||
|
||||
def _run(coro):
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _fetch_with_empty_maps():
|
||||
fetcher = _make_fetcher_with_mock_client()
|
||||
with patch.object(fetcher, "fetch_prefect", new=AsyncMock(return_value=(False, []))):
|
||||
with patch.object(fetcher, "tail_log", return_value=[]):
|
||||
with patch.object(fetcher, "_start_reconnect", return_value=None):
|
||||
return _run(fetcher.fetch())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Formatting helpers: None inputs
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fmt_float_none_returns_double_dash():
|
||||
assert fmt_float(None) == "--"
|
||||
|
||||
|
||||
def test_fmt_float_none_custom_decimals():
|
||||
assert fmt_float(None, decimals=2) == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_none_returns_double_dash():
|
||||
color, text = fmt_pnl(None)
|
||||
assert text == "--"
|
||||
|
||||
|
||||
def test_fmt_pnl_none_returns_white_color():
|
||||
color, text = fmt_pnl(None)
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_color_age_none_returns_na():
|
||||
color, text = color_age(None)
|
||||
assert text == "N/A"
|
||||
|
||||
|
||||
def test_color_age_none_returns_dim():
|
||||
color, text = color_age(None)
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_rm_bar_none_returns_double_dash():
|
||||
assert rm_bar(None) == "--"
|
||||
|
||||
|
||||
def test_posture_color_none_returns_dim():
|
||||
assert posture_color(None) == "dim"
|
||||
|
||||
|
||||
def test_status_color_none_returns_dim():
|
||||
assert status_color(None) == "dim"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sync parsers: None input -> all-None output, no crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_parse_scan_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_scan(None)
|
||||
assert result["scan_number"] is None
|
||||
assert result["vel_div"] is None
|
||||
assert result["w50_velocity"] is None
|
||||
assert result["w750_velocity"] is None
|
||||
assert result["instability_50"] is None
|
||||
assert result["scan_bridge_ts"] is None
|
||||
assert result["scan_age_s"] is None
|
||||
assert result["asset_prices"] == {}
|
||||
|
||||
|
||||
def test_parse_safety_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_safety(None)
|
||||
for key in ("posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5"):
|
||||
assert result[key] is None, "Expected {} to be None".format(key)
|
||||
|
||||
|
||||
def test_parse_heartbeat_none_no_crash():
|
||||
fetcher = DolphinDataFetcher()
|
||||
result = fetcher._parse_heartbeat(None)
|
||||
for key in ("heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s"):
|
||||
assert result[key] is None, "Expected {} to be None".format(key)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# fetch() with all-None HZ maps -> DataSnapshot with all HZ fields None
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_fetch_empty_maps_returns_datasnapshot():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert isinstance(snap, DataSnapshot)
|
||||
|
||||
|
||||
def test_fetch_empty_maps_hz_connected_true():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.hz_connected is True
|
||||
|
||||
|
||||
def test_fetch_empty_maps_scan_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.scan_number is None
|
||||
assert snap.vel_div is None
|
||||
assert snap.w50_velocity is None
|
||||
assert snap.w750_velocity is None
|
||||
assert snap.instability_50 is None
|
||||
assert snap.scan_bridge_ts is None
|
||||
assert snap.scan_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_safety_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.posture is None
|
||||
assert snap.rm is None
|
||||
assert snap.cat1 is None
|
||||
assert snap.cat2 is None
|
||||
assert snap.cat3 is None
|
||||
assert snap.cat4 is None
|
||||
assert snap.cat5 is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_extf_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.funding_btc is None
|
||||
assert snap.dvol_btc is None
|
||||
assert snap.fng is None
|
||||
assert snap.vix is None
|
||||
assert snap.exf_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_state_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.capital is None
|
||||
assert snap.pnl is None
|
||||
assert snap.trades is None
|
||||
assert snap.nautilus_capital is None
|
||||
assert snap.nautilus_pnl is None
|
||||
assert snap.nautilus_trades is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_heartbeat_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.heartbeat_ts is None
|
||||
assert snap.heartbeat_phase is None
|
||||
assert snap.heartbeat_age_s is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_meta_health_fields_none():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.meta_rm is None
|
||||
assert snap.meta_status is None
|
||||
assert snap.m1_proc is None
|
||||
assert snap.m2_heartbeat is None
|
||||
assert snap.m3_data is None
|
||||
|
||||
|
||||
def test_fetch_empty_maps_obf_top_empty_list():
|
||||
snap = _fetch_with_empty_maps()
|
||||
assert snap.obf_top == []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# All-None DataSnapshot: formatting helpers must not crash
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_all_none_snap_fmt_float_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in (
|
||||
"vel_div", "w50_velocity", "w750_velocity", "instability_50",
|
||||
"acb_boost", "acb_beta", "funding_btc", "dvol_btc", "fng",
|
||||
"vix", "capital", "pnl", "rm",
|
||||
):
|
||||
val = getattr(snap, field_name)
|
||||
result = fmt_float(val)
|
||||
assert result == "--", "fmt_float({}=None) should be '--', got {!r}".format(field_name, result)
|
||||
|
||||
|
||||
def test_all_none_snap_fmt_pnl_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("pnl", "nautilus_pnl"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = fmt_pnl(val)
|
||||
assert text == "--", "fmt_pnl({}=None) text should be '--'".format(field_name)
|
||||
assert color == "white"
|
||||
|
||||
|
||||
def test_all_none_snap_color_age_fields():
|
||||
snap = DataSnapshot()
|
||||
for field_name in ("scan_age_s", "exf_age_s", "esof_age_s", "heartbeat_age_s"):
|
||||
val = getattr(snap, field_name)
|
||||
color, text = color_age(val)
|
||||
assert text == "N/A", "color_age({}=None) text should be 'N/A'".format(field_name)
|
||||
assert color == "dim"
|
||||
|
||||
|
||||
def test_all_none_snap_rm_bar():
|
||||
snap = DataSnapshot()
|
||||
assert rm_bar(snap.rm) == "--"
|
||||
|
||||
|
||||
def test_all_none_snap_posture_color():
|
||||
snap = DataSnapshot()
|
||||
assert posture_color(snap.posture) == "dim"
|
||||
|
||||
|
||||
def test_all_none_snap_status_color():
|
||||
snap = DataSnapshot()
|
||||
assert status_color(snap.meta_status) == "dim"
|
||||
291
Observability/TUI/test_dolphin_tui_prefect_offline.py
Executable file
291
Observability/TUI/test_dolphin_tui_prefect_offline.py
Executable file
@@ -0,0 +1,291 @@
|
||||
"""
|
||||
test_dolphin_tui_prefect_offline.py
|
||||
|
||||
Verifies Prefect-offline behavior of DolphinDataFetcher and PrefectPanel.
|
||||
|
||||
Tests:
|
||||
- test_fetch_prefect_returns_false_on_connection_error
|
||||
- test_fetch_prefect_returns_false_on_timeout
|
||||
- test_fetch_prefect_returns_false_on_non_200
|
||||
- test_fetch_prefect_does_not_crash
|
||||
- test_snapshot_prefect_offline_fields
|
||||
- test_prefect_panel_shows_offline_text
|
||||
|
||||
All tests are self-contained and do NOT require a live Prefect instance.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
import types
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub hazelcast so the import succeeds without the package
|
||||
# ---------------------------------------------------------------------------
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub textual so dolphin_tui imports cleanly without a terminal
|
||||
# ---------------------------------------------------------------------------
|
||||
for _mod in [
|
||||
"textual",
|
||||
"textual.app",
|
||||
"textual.containers",
|
||||
"textual.widgets",
|
||||
]:
|
||||
if _mod not in sys.modules:
|
||||
sys.modules[_mod] = types.ModuleType(_mod)
|
||||
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub httpx — we will patch individual methods per test
|
||||
# ---------------------------------------------------------------------------
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
_httpx_stub.ConnectError = type("ConnectError", (Exception,), {})
|
||||
_httpx_stub.TimeoutException = type("TimeoutException", (Exception,), {})
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Now import the module under test
|
||||
# ---------------------------------------------------------------------------
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
PrefectPanel,
|
||||
)
|
||||
import httpx # noqa: E402 (the stub or real module)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _run(coro):
|
||||
"""Run a coroutine in the current event loop."""
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def _make_fetcher() -> DolphinDataFetcher:
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
fetcher._start_reconnect = MagicMock() # prevent background reconnect tasks
|
||||
return fetcher
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestFetchPrefectConnectionError(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_connection_error
|
||||
|
||||
When httpx raises ConnectError, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_connection_error(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=httpx.ConnectError("refused"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestFetchPrefectTimeout(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_timeout
|
||||
|
||||
When httpx raises TimeoutException, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_timeout(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=httpx.TimeoutException("timed out"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestFetchPrefectNon200(unittest.TestCase):
|
||||
"""test_fetch_prefect_returns_false_on_non_200
|
||||
|
||||
When /api/health returns HTTP 503, fetch_prefect() must return (False, []).
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_returns_false_on_non_200(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
health_resp = MagicMock()
|
||||
health_resp.status_code = 503
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(return_value=health_resp)
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
healthy, flows = _run(fetcher.fetch_prefect())
|
||||
|
||||
self.assertFalse(healthy, "healthy must be False when /api/health returns 503")
|
||||
# flows may be empty or populated depending on whether the flows call was made;
|
||||
# the key requirement is that healthy is False
|
||||
self.assertIsInstance(flows, list)
|
||||
|
||||
|
||||
class TestFetchPrefectDoesNotCrash(unittest.TestCase):
|
||||
"""test_fetch_prefect_does_not_crash
|
||||
|
||||
fetch_prefect() must never raise, even on unexpected exceptions.
|
||||
"""
|
||||
|
||||
def test_fetch_prefect_does_not_crash_on_unexpected_exception(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
# Simulate AsyncClient itself raising an unexpected error
|
||||
with patch.object(httpx, "AsyncClient", side_effect=RuntimeError("unexpected")):
|
||||
try:
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
except Exception as exc:
|
||||
self.fail(f"fetch_prefect() raised unexpectedly: {exc}")
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
def test_fetch_prefect_does_not_crash_on_os_error(self):
|
||||
fetcher = _make_fetcher()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
mock_client.get = AsyncMock(side_effect=OSError("network unreachable"))
|
||||
|
||||
with patch.object(httpx, "AsyncClient", return_value=mock_client):
|
||||
try:
|
||||
result = _run(fetcher.fetch_prefect())
|
||||
except Exception as exc:
|
||||
self.fail(f"fetch_prefect() raised unexpectedly: {exc}")
|
||||
|
||||
self.assertEqual(result, (False, []))
|
||||
|
||||
|
||||
class TestSnapshotPrefectOfflineFields(unittest.TestCase):
|
||||
"""test_snapshot_prefect_offline_fields
|
||||
|
||||
When fetch_prefect() returns (False, []), the assembled DataSnapshot
|
||||
must have prefect_healthy=False and prefect_flows=[].
|
||||
"""
|
||||
|
||||
def test_snapshot_prefect_offline_fields(self):
|
||||
snap = DataSnapshot(
|
||||
prefect_healthy=False,
|
||||
prefect_flows=[],
|
||||
)
|
||||
|
||||
self.assertFalse(snap.prefect_healthy, "prefect_healthy must be False")
|
||||
self.assertEqual(snap.prefect_flows, [], "prefect_flows must be empty list")
|
||||
|
||||
def test_snapshot_default_is_offline(self):
|
||||
"""Default DataSnapshot should represent offline state."""
|
||||
snap = DataSnapshot()
|
||||
|
||||
self.assertFalse(snap.prefect_healthy)
|
||||
self.assertEqual(snap.prefect_flows, [])
|
||||
|
||||
|
||||
class TestPrefectPanelShowsOfflineText(unittest.TestCase):
|
||||
"""test_prefect_panel_shows_offline_text
|
||||
|
||||
When DataSnapshot.prefect_healthy=False, PrefectPanel._render_markup()
|
||||
must contain the string "PREFECT OFFLINE".
|
||||
"""
|
||||
|
||||
def test_prefect_panel_shows_offline_text_when_unhealthy(self):
|
||||
panel = PrefectPanel()
|
||||
snap = DataSnapshot(prefect_healthy=False, prefect_flows=[])
|
||||
|
||||
panel._snap = snap
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"Expected 'PREFECT OFFLINE' in panel markup, got:\n{markup}",
|
||||
)
|
||||
|
||||
def test_prefect_panel_shows_offline_text_when_snap_is_none(self):
|
||||
"""Panel must show PREFECT OFFLINE when no snapshot has been set."""
|
||||
panel = PrefectPanel()
|
||||
# _snap defaults to None
|
||||
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"Expected 'PREFECT OFFLINE' when snap is None, got:\n{markup}",
|
||||
)
|
||||
|
||||
def test_prefect_panel_does_not_show_offline_when_healthy(self):
|
||||
"""Sanity check: healthy snapshot should NOT show PREFECT OFFLINE."""
|
||||
panel = PrefectPanel()
|
||||
snap = DataSnapshot(prefect_healthy=True, prefect_flows=[])
|
||||
|
||||
panel._snap = snap
|
||||
markup = panel._render_markup()
|
||||
|
||||
self.assertNotIn(
|
||||
"PREFECT OFFLINE",
|
||||
markup,
|
||||
f"'PREFECT OFFLINE' should not appear when prefect_healthy=True",
|
||||
)
|
||||
self.assertIn("PREFECT ✓", markup)
|
||||
|
||||
def test_update_data_does_not_crash_when_offline(self):
|
||||
"""update_data() must not raise when called with an offline snapshot."""
|
||||
panel = PrefectPanel()
|
||||
# Patch the inherited update() method (from Static/object) so it's a no-op
|
||||
panel.update = MagicMock()
|
||||
|
||||
snap = DataSnapshot(prefect_healthy=False, prefect_flows=[])
|
||||
|
||||
try:
|
||||
panel.update_data(snap)
|
||||
except Exception as exc:
|
||||
self.fail(f"update_data() raised unexpectedly: {exc}")
|
||||
|
||||
panel.update.assert_called_once()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
335
Observability/TUI/test_dolphin_tui_reconnect.py
Executable file
335
Observability/TUI/test_dolphin_tui_reconnect.py
Executable file
@@ -0,0 +1,335 @@
|
||||
"""
|
||||
test_dolphin_tui_reconnect.py
|
||||
|
||||
Verifies the HZ reconnect loop behavior of DolphinDataFetcher.
|
||||
|
||||
Tests:
|
||||
- test_hz_connected_flag_set_on_connect
|
||||
- test_hz_disconnected_flag_on_failure
|
||||
- test_reconnect_within_10s
|
||||
- test_backoff_resets_on_success
|
||||
- test_fetch_returns_none_fields_when_disconnected
|
||||
|
||||
All tests are self-contained and do NOT require a live Hazelcast instance.
|
||||
Backoff delays are patched to 0.05 s so the suite runs in seconds.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch, call
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure the TUI module is importable from this directory
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
# Provide a stub for hazelcast so the import succeeds even without the package
|
||||
import types
|
||||
|
||||
_hz_stub = types.ModuleType("hazelcast")
|
||||
_hz_stub.HazelcastClient = MagicMock()
|
||||
sys.modules.setdefault("hazelcast", _hz_stub)
|
||||
|
||||
# Provide stubs for textual and httpx so dolphin_tui imports cleanly
|
||||
for _mod in [
|
||||
"textual",
|
||||
"textual.app",
|
||||
"textual.containers",
|
||||
"textual.widgets",
|
||||
]:
|
||||
if _mod not in sys.modules:
|
||||
_stub = types.ModuleType(_mod)
|
||||
sys.modules[_mod] = _stub
|
||||
|
||||
# Minimal textual stubs
|
||||
_textual_app = sys.modules["textual.app"]
|
||||
_textual_app.App = object
|
||||
_textual_app.ComposeResult = object
|
||||
|
||||
_textual_containers = sys.modules["textual.containers"]
|
||||
_textual_containers.Horizontal = object
|
||||
|
||||
_textual_widgets = sys.modules["textual.widgets"]
|
||||
_textual_widgets.Static = object
|
||||
_textual_widgets.VerticalScroll = object
|
||||
|
||||
if "httpx" not in sys.modules:
|
||||
_httpx_stub = types.ModuleType("httpx")
|
||||
_httpx_stub.AsyncClient = MagicMock()
|
||||
sys.modules["httpx"] = _httpx_stub
|
||||
|
||||
from dolphin_tui import ( # noqa: E402
|
||||
DolphinDataFetcher,
|
||||
DataSnapshot,
|
||||
RECONNECT_INIT_S,
|
||||
RECONNECT_MULT,
|
||||
RECONNECT_MAX_S,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
FAST_BACKOFF = 0.05 # seconds — replaces 5 s initial delay in tests
|
||||
|
||||
|
||||
def _make_mock_client() -> MagicMock:
|
||||
"""Return a minimal mock that looks like a HazelcastClient."""
|
||||
client = MagicMock()
|
||||
client.shutdown = MagicMock()
|
||||
return client
|
||||
|
||||
|
||||
def _run(coro):
|
||||
"""Run a coroutine in a fresh event loop."""
|
||||
return asyncio.get_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestHZConnectedFlagOnConnect(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_hz_connected_flag_set_on_connect
|
||||
|
||||
A successful connect_hz() call must set hz_connected=True and store
|
||||
the client handle.
|
||||
"""
|
||||
|
||||
async def test_hz_connected_flag_set_on_connect(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
with patch("hazelcast.HazelcastClient", return_value=mock_client):
|
||||
result = await fetcher.connect_hz()
|
||||
|
||||
self.assertTrue(result, "connect_hz() should return True on success")
|
||||
self.assertTrue(fetcher.hz_connected, "hz_connected must be True after successful connect")
|
||||
self.assertIs(fetcher.hz_client, mock_client, "hz_client must be the returned client")
|
||||
|
||||
# Clean up any background task
|
||||
fetcher._running = False
|
||||
if fetcher._reconnect_task and not fetcher._reconnect_task.done():
|
||||
fetcher._reconnect_task.cancel()
|
||||
|
||||
|
||||
class TestHZDisconnectedFlagOnFailure(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_hz_disconnected_flag_on_failure
|
||||
|
||||
When HazelcastClient() raises, connect_hz() must return False and
|
||||
hz_connected must be False.
|
||||
"""
|
||||
|
||||
async def test_hz_disconnected_flag_on_failure(self):
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=Exception("Connection refused")):
|
||||
result = await fetcher.connect_hz()
|
||||
|
||||
self.assertFalse(result, "connect_hz() should return False on failure")
|
||||
self.assertFalse(fetcher.hz_connected, "hz_connected must be False after failed connect")
|
||||
self.assertIsNone(fetcher.hz_client, "hz_client must remain None after failed connect")
|
||||
|
||||
# Stop the reconnect loop that was started by connect_hz on failure
|
||||
fetcher._running = False
|
||||
if fetcher._reconnect_task and not fetcher._reconnect_task.done():
|
||||
fetcher._reconnect_task.cancel()
|
||||
try:
|
||||
await fetcher._reconnect_task
|
||||
except (asyncio.CancelledError, Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestReconnectWithin10s(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_reconnect_within_10s
|
||||
|
||||
Scenario:
|
||||
1. Initial connect fails → hz_connected=False, reconnect loop starts.
|
||||
2. After ~0.1 s the mock is switched to succeed.
|
||||
3. hz_connected must become True within 10 s of the mock being restored.
|
||||
|
||||
Backoff is patched to FAST_BACKOFF (0.05 s) so the test completes quickly.
|
||||
"""
|
||||
|
||||
async def test_reconnect_within_10s(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
# State shared between the mock and the test
|
||||
should_succeed = False
|
||||
|
||||
def hz_client_factory(**kwargs):
|
||||
if should_succeed:
|
||||
return mock_client
|
||||
raise Exception("Connection refused")
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
# Patch backoff to be very short so the test is fast
|
||||
fetcher._reconnect_backoff_initial = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff_max = FAST_BACKOFF * 3
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=hz_client_factory):
|
||||
# Start the reconnect loop manually (simulates connect_hz failing)
|
||||
fetcher._start_reconnect()
|
||||
|
||||
# Let the loop spin for a moment while HZ is "down"
|
||||
await asyncio.sleep(FAST_BACKOFF * 2)
|
||||
self.assertFalse(fetcher.hz_connected, "Should still be disconnected while HZ is down")
|
||||
|
||||
# "Restart" HZ
|
||||
should_succeed = True
|
||||
t0 = time.monotonic()
|
||||
|
||||
# Wait up to 10 s for reconnect
|
||||
deadline = 10.0
|
||||
while not fetcher.hz_connected and (time.monotonic() - t0) < deadline:
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
elapsed = time.monotonic() - t0
|
||||
self.assertTrue(
|
||||
fetcher.hz_connected,
|
||||
f"hz_connected must be True within 10 s of HZ restart (elapsed: {elapsed:.2f}s)",
|
||||
)
|
||||
self.assertLess(elapsed, 10.0, f"Reconnect took too long: {elapsed:.2f}s")
|
||||
|
||||
# Cleanup
|
||||
await fetcher.disconnect_hz()
|
||||
|
||||
|
||||
class TestBackoffResetsOnSuccess(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_backoff_resets_on_success
|
||||
|
||||
After a successful reconnect the backoff delay must be reset to the
|
||||
initial value (RECONNECT_INIT_S / patched FAST_BACKOFF).
|
||||
"""
|
||||
|
||||
async def test_backoff_resets_on_success(self):
|
||||
mock_client = _make_mock_client()
|
||||
|
||||
call_count = 0
|
||||
|
||||
def hz_client_factory(**kwargs):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count == 1:
|
||||
raise Exception("First attempt fails")
|
||||
return mock_client
|
||||
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
fetcher._reconnect_backoff_initial = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff = FAST_BACKOFF
|
||||
fetcher._reconnect_backoff_max = FAST_BACKOFF * 10
|
||||
|
||||
with patch("hazelcast.HazelcastClient", side_effect=hz_client_factory):
|
||||
fetcher._start_reconnect()
|
||||
|
||||
# Wait for reconnect to succeed
|
||||
deadline = 5.0
|
||||
t0 = time.monotonic()
|
||||
while not fetcher.hz_connected and (time.monotonic() - t0) < deadline:
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
self.assertTrue(fetcher.hz_connected, "Should have reconnected")
|
||||
self.assertAlmostEqual(
|
||||
fetcher._reconnect_backoff,
|
||||
FAST_BACKOFF,
|
||||
delta=1e-9,
|
||||
msg="Backoff must reset to initial value after successful reconnect",
|
||||
)
|
||||
|
||||
await fetcher.disconnect_hz()
|
||||
|
||||
|
||||
class TestFetchReturnsNoneFieldsWhenDisconnected(unittest.IsolatedAsyncioTestCase):
|
||||
"""test_fetch_returns_none_fields_when_disconnected
|
||||
|
||||
When hz_client is None (disconnected), fetch() must return a DataSnapshot
|
||||
with hz_connected=False and all HZ-derived fields as None.
|
||||
"""
|
||||
|
||||
async def test_fetch_returns_none_fields_when_disconnected(self):
|
||||
fetcher = DolphinDataFetcher(hz_host="localhost", hz_port=5701)
|
||||
# Ensure no client is set
|
||||
fetcher.hz_client = None
|
||||
fetcher.hz_connected = False
|
||||
|
||||
# Patch fetch_prefect so we don't need a live Prefect server
|
||||
fetcher.fetch_prefect = AsyncMock(return_value=(False, []))
|
||||
# Patch tail_log so we don't need a real log file
|
||||
fetcher.tail_log = MagicMock(return_value=[])
|
||||
# Prevent reconnect loop from starting during fetch
|
||||
fetcher._start_reconnect = MagicMock()
|
||||
|
||||
snap = await fetcher.fetch()
|
||||
|
||||
self.assertIsInstance(snap, DataSnapshot)
|
||||
self.assertFalse(snap.hz_connected, "hz_connected must be False when disconnected")
|
||||
|
||||
# All HZ-derived numeric/string fields must be None
|
||||
hz_fields = [
|
||||
"scan_number", "vel_div", "w50_velocity", "w750_velocity",
|
||||
"instability_50", "scan_bridge_ts", "scan_age_s",
|
||||
"acb_boost", "acb_beta",
|
||||
"funding_btc", "dvol_btc", "fng", "taker", "vix", "ls_btc",
|
||||
"acb_ready", "acb_present", "exf_age_s",
|
||||
"moon_phase", "mercury_retro", "liquidity_session",
|
||||
"market_cycle_pos", "esof_age_s",
|
||||
"posture", "rm", "cat1", "cat2", "cat3", "cat4", "cat5",
|
||||
"capital", "drawdown", "peak_capital", "pnl", "trades",
|
||||
"nautilus_capital", "nautilus_pnl", "nautilus_trades",
|
||||
"nautilus_posture", "nautilus_param_hash",
|
||||
"heartbeat_ts", "heartbeat_phase", "heartbeat_flow", "heartbeat_age_s",
|
||||
"meta_rm", "meta_status",
|
||||
"m1_proc", "m2_heartbeat", "m3_data", "m4_cp", "m5_coh",
|
||||
]
|
||||
for field_name in hz_fields:
|
||||
value = getattr(snap, field_name)
|
||||
self.assertIsNone(
|
||||
value,
|
||||
f"Field '{field_name}' must be None when disconnected, got {value!r}",
|
||||
)
|
||||
|
||||
# Collection fields must be empty
|
||||
self.assertEqual(snap.asset_prices, {}, "asset_prices must be empty dict when disconnected")
|
||||
self.assertEqual(snap.obf_top, [], "obf_top must be empty list when disconnected")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backoff constants sanity check (not a reconnect test, but validates spec)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestBackoffConstants(unittest.TestCase):
|
||||
"""Verify the module-level backoff constants match the spec."""
|
||||
|
||||
def test_reconnect_init_s(self):
|
||||
self.assertEqual(RECONNECT_INIT_S, 5.0, "Initial backoff must be 5 s per spec")
|
||||
|
||||
def test_reconnect_multiplier(self):
|
||||
self.assertEqual(RECONNECT_MULT, 1.5, "Backoff multiplier must be 1.5x per spec")
|
||||
|
||||
def test_reconnect_max_s(self):
|
||||
self.assertEqual(RECONNECT_MAX_S, 60.0, "Max backoff must be 60 s per spec")
|
||||
|
||||
def test_backoff_sequence(self):
|
||||
"""Verify the exponential sequence: 5 → 7.5 → 11.25 → ... capped at 60."""
|
||||
backoff = RECONNECT_INIT_S
|
||||
sequence = [backoff]
|
||||
for _ in range(10):
|
||||
backoff = min(backoff * RECONNECT_MULT, RECONNECT_MAX_S)
|
||||
sequence.append(backoff)
|
||||
|
||||
self.assertAlmostEqual(sequence[0], 5.0)
|
||||
self.assertAlmostEqual(sequence[1], 7.5)
|
||||
self.assertAlmostEqual(sequence[2], 11.25)
|
||||
self.assertTrue(all(v <= RECONNECT_MAX_S for v in sequence))
|
||||
self.assertEqual(sequence[-1], RECONNECT_MAX_S)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
63
Observability/TUI/textual_poc.py
Executable file
63
Observability/TUI/textual_poc.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Minimal Textual proof-of-concept.
|
||||
Run: python3 textual_poc.py
|
||||
Press q to quit.
|
||||
"""
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.widgets import Static, Header, Footer
|
||||
from textual.containers import Horizontal
|
||||
import time
|
||||
|
||||
|
||||
class Box(Static):
|
||||
def on_mount(self) -> None:
|
||||
self.update(self.id or "box")
|
||||
|
||||
|
||||
class PocApp(App):
|
||||
CSS = """
|
||||
Screen { background: #111; }
|
||||
Box {
|
||||
border: solid green;
|
||||
height: 8;
|
||||
content-align: center middle;
|
||||
color: white;
|
||||
}
|
||||
#clock { border: solid cyan; height: 3; }
|
||||
"""
|
||||
|
||||
BINDINGS = [("q", "quit", "Quit")]
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield Static(id="clock")
|
||||
with Horizontal():
|
||||
yield Box("PANEL A\nstatic text", id="panel_a")
|
||||
yield Box("PANEL B\nstatic text", id="panel_b")
|
||||
yield Box("PANEL C\nstatic text", id="panel_c")
|
||||
yield Static("[green]q=quit[/green] | Textual POC running OK")
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self.set_interval(1, self._tick)
|
||||
self._tick()
|
||||
|
||||
def _tick(self) -> None:
|
||||
t = time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime())
|
||||
self.query_one("#clock", Static).update(
|
||||
f"[bold cyan]🐬 DOLPHIN TUI POC[/bold cyan] | {t} | Textual is working"
|
||||
)
|
||||
# Update panels with incrementing counter
|
||||
n = int(time.time()) % 100
|
||||
self.query_one("#panel_a", Box).update(
|
||||
f"[green]PANEL A[/green]\nvalue = {n}\nstatus = OK"
|
||||
)
|
||||
self.query_one("#panel_b", Box).update(
|
||||
f"[yellow]PANEL B[/yellow]\nvalue = {n*2}\nstatus = WARN"
|
||||
)
|
||||
self.query_one("#panel_c", Box).update(
|
||||
f"[red]PANEL C[/red]\nvalue = {n*3}\nstatus = CRIT"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
PocApp().run()
|
||||
Reference in New Issue
Block a user