Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
114 lines
3.7 KiB
Python
Executable File
114 lines
3.7 KiB
Python
Executable File
"""
|
|
Live Monte Carlo Forewarning Service
|
|
====================================
|
|
|
|
Continously monitors the active Nautilus-Dolphin configuration
|
|
against the pre-trained Monte Carlo operational envelope.
|
|
|
|
Logs warnings and generates alerts if the parameters drift near
|
|
the edge of the validated MC envelope, preventing catastrophic swans.
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import time
|
|
import json
|
|
import logging
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
|
|
# Adjust paths
|
|
PROJECT_ROOT = Path(__file__).resolve().parent
|
|
sys.path.insert(0, str(PROJECT_ROOT))
|
|
sys.path.insert(0, str(PROJECT_ROOT.parent / 'external_factors'))
|
|
|
|
from mc.mc_ml import DolphinForewarner
|
|
from mc.mc_sampler import MCSampler
|
|
|
|
# Configure Logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s - [FOREWARNER] - %(levelname)s - %(message)s",
|
|
handlers=[
|
|
logging.StreamHandler(sys.stdout),
|
|
logging.FileHandler(PROJECT_ROOT / "forewarning_service.log")
|
|
]
|
|
)
|
|
|
|
MODELS_DIR = PROJECT_ROOT / "mc_results" / "models"
|
|
CHECK_INTERVAL_SECONDS = 3600 * 4 # Check every 4 hours
|
|
|
|
def get_current_live_config() -> dict:
|
|
"""
|
|
Simulates fetching the active trading system configuration.
|
|
In full production, this would query Nautilus' live dictionary.
|
|
For now, it pulls the baseline champion and applies any overrides.
|
|
"""
|
|
sampler = MCSampler()
|
|
# Baseline champion config
|
|
raw_config = sampler.generate_champion_trial().to_dict()
|
|
|
|
# In a fully dynamic environment, we would overlay real-time changes
|
|
# For demonstration, we simply return the dict
|
|
return raw_config
|
|
|
|
def determine_risk_level(report):
|
|
"""
|
|
Assess risk level per MONTE_CARLO_SYSTEM_ENVELOPE_SPEC.md mapping.
|
|
"""
|
|
env = report.envelope_score
|
|
cat = report.catastrophic_probability
|
|
champ = report.champion_probability
|
|
|
|
if cat > 0.25 or env < -1.0:
|
|
return "RED"
|
|
elif env < 0 or cat > 0.10:
|
|
return "ORANGE"
|
|
elif env > 0 and champ > 0.4:
|
|
return "AMBER"
|
|
elif env > 0.5 and champ > 0.6:
|
|
return "GREEN"
|
|
else:
|
|
return "AMBER" # Default transitional state
|
|
|
|
def run_service():
|
|
logging.info(f"Starting Monte Carlo Forewarning Service. Checking every {CHECK_INTERVAL_SECONDS} seconds.")
|
|
if not MODELS_DIR.exists():
|
|
logging.error(f"Models directory not found at {MODELS_DIR}. Ensure you've run 'python run_mc_envelope.py --mode train' first.")
|
|
sys.exit(1)
|
|
|
|
try:
|
|
forewarner = DolphinForewarner(models_dir=str(MODELS_DIR))
|
|
except Exception as e:
|
|
logging.error(f"Failed to load ML models: {e}")
|
|
sys.exit(1)
|
|
|
|
while True:
|
|
try:
|
|
config_dict = get_current_live_config()
|
|
report = forewarner.assess_config_dict(config_dict)
|
|
level = determine_risk_level(report)
|
|
|
|
log_msg = f"Check complete. Risk Level: {level} | Env_Score: {report.envelope_score:.3f} | Cat_Prob: {report.catastrophic_probability:.1%}"
|
|
|
|
if level in ['ORANGE', 'RED']:
|
|
logging.warning("!!! HIGH RISK CONFIGURATION DETECTED !!!")
|
|
logging.warning(log_msg)
|
|
if report.warnings:
|
|
for w in report.warnings:
|
|
logging.warning(f" -> {w}")
|
|
else:
|
|
logging.info(log_msg)
|
|
|
|
except Exception as e:
|
|
logging.error(f"Error during assessment loop: {e}")
|
|
|
|
# Sleep till next cycle
|
|
time.sleep(CHECK_INTERVAL_SECONDS)
|
|
|
|
if __name__ == "__main__":
|
|
try:
|
|
run_service()
|
|
except KeyboardInterrupt:
|
|
logging.info("Forewarning service shutting down.")
|