initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems: - prod/ (BLUE harness, configs, scripts, docs) - nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved) - adaptive_exit/ (AEM engine + models/bucket_assignments.pkl) - Observability/ (EsoF advisor, TUI, dashboards) - external_factors/ (EsoF producer) - mc_forewarning_qlabs_fork/ (MC regime/envelope) Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
This commit is contained in:
376
prod/tests/test_scan_bridge_prefect_daemon.py
Executable file
376
prod/tests/test_scan_bridge_prefect_daemon.py
Executable file
@@ -0,0 +1,376 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Tests for Scan Bridge Prefect Daemon
|
||||
=====================================
|
||||
Unit and integration tests for the Prefect-managed scan bridge.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import signal
|
||||
import subprocess
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
|
||||
# Add paths
|
||||
sys.path.insert(0, '/mnt/dolphinng5_predict')
|
||||
sys.path.insert(0, '/mnt/dolphinng5_predict/prod')
|
||||
|
||||
import pytest
|
||||
|
||||
# Import module under test
|
||||
from scan_bridge_prefect_daemon import (
|
||||
ScanBridgeProcess,
|
||||
check_hazelcast_data_freshness,
|
||||
perform_health_check,
|
||||
HEALTH_CHECK_INTERVAL,
|
||||
DATA_STALE_THRESHOLD,
|
||||
DATA_WARNING_THRESHOLD,
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Fixtures
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def mock_hazelcast_client():
|
||||
"""Mock Hazelcast client for testing."""
|
||||
with patch('scan_bridge_prefect_daemon.hazelcast') as mock_hz:
|
||||
mock_client = MagicMock()
|
||||
mock_map = MagicMock()
|
||||
|
||||
# Default: fresh data
|
||||
mock_data = {
|
||||
'scan_number': 9999,
|
||||
'file_mtime': time.time(),
|
||||
'assets': ['BTCUSDT'] * 50,
|
||||
'asset_prices': [70000.0] * 50,
|
||||
}
|
||||
mock_map.get.return_value = json.dumps(mock_data)
|
||||
mock_client.get_map.return_value.blocking.return_value = mock_map
|
||||
mock_hz.HazelcastClient.return_value = mock_client
|
||||
|
||||
yield mock_hz
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def process_manager():
|
||||
"""Create a process manager instance."""
|
||||
pm = ScanBridgeProcess()
|
||||
yield pm
|
||||
# Cleanup
|
||||
if pm.is_running():
|
||||
pm.stop()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Class: ScanBridgeProcess
|
||||
# =============================================================================
|
||||
|
||||
class TestScanBridgeProcess:
|
||||
"""Test the ScanBridgeProcess manager."""
|
||||
|
||||
def test_initialization(self, process_manager):
|
||||
"""Test process manager initializes correctly."""
|
||||
assert process_manager.process is None
|
||||
assert process_manager.start_time is None
|
||||
assert process_manager.restart_count == 0
|
||||
assert not process_manager.is_running()
|
||||
|
||||
def test_is_running_false_when_not_started(self, process_manager):
|
||||
"""Test is_running returns False when process not started."""
|
||||
assert not process_manager.is_running()
|
||||
|
||||
def test_get_exit_code_none_when_not_started(self, process_manager):
|
||||
"""Test get_exit_code returns None when process not started."""
|
||||
assert process_manager.get_exit_code() is None
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.subprocess.Popen')
|
||||
def test_start_success(self, mock_popen, process_manager):
|
||||
"""Test successful process start."""
|
||||
mock_process = MagicMock()
|
||||
mock_process.poll.return_value = None # Still running
|
||||
mock_process.pid = 12345
|
||||
mock_popen.return_value = mock_process
|
||||
|
||||
with patch('scan_bridge_prefect_daemon.time.sleep'):
|
||||
result = process_manager.start()
|
||||
|
||||
assert result is True
|
||||
assert process_manager.is_running()
|
||||
assert process_manager.process.pid == 12345
|
||||
assert process_manager.start_time is not None
|
||||
mock_popen.assert_called_once()
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.subprocess.Popen')
|
||||
def test_start_failure_immediate_exit(self, mock_popen, process_manager):
|
||||
"""Test start failure when process exits immediately."""
|
||||
mock_process = MagicMock()
|
||||
mock_process.poll.return_value = 1 # Already exited with error
|
||||
mock_popen.return_value = mock_process
|
||||
|
||||
with patch('scan_bridge_prefect_daemon.time.sleep'):
|
||||
result = process_manager.start()
|
||||
|
||||
assert result is False
|
||||
assert not process_manager.is_running()
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.subprocess.Popen')
|
||||
def test_stop_graceful(self, mock_popen, process_manager):
|
||||
"""Test graceful process stop."""
|
||||
mock_process = MagicMock()
|
||||
mock_process.poll.return_value = None # Running
|
||||
mock_process.pid = 12345
|
||||
mock_process.wait.return_value = None
|
||||
mock_popen.return_value = mock_process
|
||||
|
||||
# Start first
|
||||
with patch('scan_bridge_prefect_daemon.time.sleep'):
|
||||
with patch('scan_bridge_prefect_daemon.threading.Thread'):
|
||||
process_manager.start()
|
||||
|
||||
# Then stop
|
||||
process_manager.stop()
|
||||
|
||||
mock_process.send_signal.assert_called_once_with(signal.SIGTERM)
|
||||
mock_process.wait.assert_called_once()
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.subprocess.Popen')
|
||||
def test_stop_force_kill(self, mock_popen, process_manager):
|
||||
"""Test force kill when graceful stop fails."""
|
||||
mock_process = MagicMock()
|
||||
mock_process.poll.return_value = None
|
||||
mock_process.pid = 12345
|
||||
mock_process.wait.side_effect = subprocess.TimeoutExpired(cmd='test', timeout=10)
|
||||
mock_popen.return_value = mock_process
|
||||
|
||||
# Start first
|
||||
with patch('scan_bridge_prefect_daemon.time.sleep'):
|
||||
with patch('scan_bridge_prefect_daemon.threading.Thread'):
|
||||
process_manager.start()
|
||||
|
||||
# Stop (will timeout and force kill)
|
||||
process_manager.stop(timeout=1)
|
||||
|
||||
mock_process.kill.assert_called_once()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Class: Hazelcast Data Freshness
|
||||
# =============================================================================
|
||||
|
||||
class TestHazelcastDataFreshness:
|
||||
"""Test Hazelcast data freshness checking."""
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_fresh_data(self, mock_hazelcast_client):
|
||||
"""Test detection of fresh data."""
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is True
|
||||
assert result['has_data'] is True
|
||||
assert result['scan_number'] == 9999
|
||||
assert result['asset_count'] == 50
|
||||
assert result['data_age_sec'] < 5 # Just created
|
||||
assert result['is_fresh'] is True
|
||||
assert result['is_warning'] is False
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_stale_data(self, mock_hazelcast_client):
|
||||
"""Test detection of stale data."""
|
||||
# Mock old data
|
||||
old_time = time.time() - 120 # 2 minutes ago
|
||||
mock_data = {
|
||||
'scan_number': 1000,
|
||||
'file_mtime': old_time,
|
||||
'assets': ['BTCUSDT'],
|
||||
}
|
||||
mock_hazelcast_client.HazelcastClient.return_value.get_map.return_value.blocking.return_value.get.return_value = json.dumps(mock_data)
|
||||
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is True
|
||||
assert result['has_data'] is True
|
||||
assert result['data_age_sec'] > DATA_STALE_THRESHOLD
|
||||
assert result['is_fresh'] is False
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_warning_data(self, mock_hazelcast_client):
|
||||
"""Test detection of warning-level data age."""
|
||||
# Mock slightly old data
|
||||
warn_time = time.time() - 45 # 45 seconds ago
|
||||
mock_data = {
|
||||
'scan_number': 1000,
|
||||
'file_mtime': warn_time,
|
||||
'assets': ['BTCUSDT'],
|
||||
}
|
||||
mock_hazelcast_client.HazelcastClient.return_value.get_map.return_value.blocking.return_value.get.return_value = json.dumps(mock_data)
|
||||
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is True
|
||||
assert result['data_age_sec'] > DATA_WARNING_THRESHOLD
|
||||
assert result['is_warning'] is True
|
||||
assert result['is_fresh'] is True # Not yet stale
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_no_data_in_hz(self, mock_hazelcast_client):
|
||||
"""Test when no data exists in Hazelcast."""
|
||||
mock_hazelcast_client.HazelcastClient.return_value.get_map.return_value.blocking.return_value.get.return_value = None
|
||||
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is True
|
||||
assert result['has_data'] is False
|
||||
assert 'error' in result
|
||||
|
||||
def test_hazelcast_not_available(self):
|
||||
"""Test when Hazelcast module not available."""
|
||||
with patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', False):
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is False
|
||||
assert 'error' in result
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_hazelcast_connection_error(self, mock_hazelcast_client):
|
||||
"""Test handling of Hazelcast connection error."""
|
||||
mock_hazelcast_client.HazelcastClient.side_effect = Exception("Connection refused")
|
||||
|
||||
result = check_hazelcast_data_freshness()
|
||||
|
||||
assert result['available'] is True # Module available
|
||||
assert result['has_data'] is False
|
||||
assert 'error' in result
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Class: Health Check Task
|
||||
# =============================================================================
|
||||
|
||||
class TestPerformHealthCheck:
|
||||
"""Test the perform_health_check Prefect task."""
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.get_run_logger')
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_healthy_state(self, mock_logger, mock_hazelcast_client):
|
||||
"""Test health check with healthy system."""
|
||||
# Mock running process
|
||||
with patch('scan_bridge_prefect_daemon.bridge_process') as mock_pm:
|
||||
mock_pm.is_running.return_value = True
|
||||
mock_pm.process = MagicMock()
|
||||
mock_pm.process.pid = 12345
|
||||
mock_pm.start_time = datetime.now(timezone.utc)
|
||||
|
||||
result = perform_health_check()
|
||||
|
||||
assert result['healthy'] is True
|
||||
assert result['process_running'] is True
|
||||
assert result['action_required'] is None
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.get_run_logger')
|
||||
def test_process_not_running(self, mock_logger):
|
||||
"""Test health check when process not running."""
|
||||
with patch('scan_bridge_prefect_daemon.bridge_process') as mock_pm:
|
||||
mock_pm.is_running.return_value = False
|
||||
|
||||
result = perform_health_check()
|
||||
|
||||
assert result['healthy'] is False
|
||||
assert result['process_running'] is False
|
||||
assert result['action_required'] == 'restart'
|
||||
|
||||
@patch('scan_bridge_prefect_daemon.get_run_logger')
|
||||
@patch('scan_bridge_prefect_daemon.HAZELCAST_AVAILABLE', True)
|
||||
def test_stale_data_triggers_restart(self, mock_logger, mock_hazelcast_client):
|
||||
"""Test that stale data triggers restart action."""
|
||||
# Mock old data
|
||||
old_time = time.time() - 120
|
||||
mock_data = {
|
||||
'scan_number': 1000,
|
||||
'file_mtime': old_time,
|
||||
'assets': ['BTCUSDT'],
|
||||
}
|
||||
mock_hazelcast_client.HazelcastClient.return_value.get_map.return_value.blocking.return_value.get.return_value = json.dumps(mock_data)
|
||||
|
||||
with patch('scan_bridge_prefect_daemon.bridge_process') as mock_pm:
|
||||
mock_pm.is_running.return_value = True
|
||||
mock_pm.process = MagicMock()
|
||||
mock_pm.process.pid = 12345
|
||||
mock_pm.start_time = datetime.now(timezone.utc)
|
||||
|
||||
result = perform_health_check()
|
||||
|
||||
assert result['healthy'] is False
|
||||
assert result['action_required'] == 'restart'
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Class: Integration Tests
|
||||
# =============================================================================
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestIntegration:
|
||||
"""Integration tests requiring real infrastructure."""
|
||||
|
||||
def test_real_hazelcast_connection(self):
|
||||
"""Test with real Hazelcast (if available)."""
|
||||
try:
|
||||
import hazelcast
|
||||
client = hazelcast.HazelcastClient(
|
||||
cluster_name="dolphin",
|
||||
cluster_members=["127.0.0.1:5701"],
|
||||
)
|
||||
|
||||
# Check if we can get data
|
||||
features_map = client.get_map('DOLPHIN_FEATURES').blocking()
|
||||
val = features_map.get('latest_eigen_scan')
|
||||
|
||||
client.shutdown()
|
||||
|
||||
if val:
|
||||
data = json.loads(val)
|
||||
print(f"\n✓ Real Hz: Scan #{data.get('scan_number')}, {len(data.get('assets', []))} assets")
|
||||
else:
|
||||
print("\n⚠ Real Hz connected but no data")
|
||||
|
||||
except Exception as e:
|
||||
pytest.skip(f"Hazelcast not available: {e}")
|
||||
|
||||
def test_real_process_lifecycle(self):
|
||||
"""Test actual process start/stop (if script exists)."""
|
||||
script_path = Path('/mnt/dolphinng5_predict/prod/scan_bridge_service.py')
|
||||
if not script_path.exists():
|
||||
pytest.skip("scan_bridge_service.py not found")
|
||||
|
||||
# Don't actually start the real bridge in tests
|
||||
# Just verify the script exists and is valid Python
|
||||
result = subprocess.run(
|
||||
[sys.executable, '-m', 'py_compile', str(script_path)],
|
||||
capture_output=True
|
||||
)
|
||||
assert result.returncode == 0, "Script has syntax errors"
|
||||
print("\n✓ Script syntax valid")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Runner
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("=" * 70)
|
||||
print("🧪 Scan Bridge Prefect Daemon Tests")
|
||||
print("=" * 70)
|
||||
|
||||
# Run with pytest
|
||||
exit_code = pytest.main([
|
||||
__file__,
|
||||
'-v',
|
||||
'--tb=short',
|
||||
'-k', 'not integration' # Skip integration by default
|
||||
])
|
||||
|
||||
sys.exit(exit_code)
|
||||
Reference in New Issue
Block a user