Files
DOLPHIN/nautilus_dolphin/Tail_Reinement_.Prompt.md
hjnormey 01c19662cb initial: import DOLPHIN baseline 2026-04-21 from dolphinng5_predict working tree
Includes core prod + GREEN/BLUE subsystems:
- prod/ (BLUE harness, configs, scripts, docs)
- nautilus_dolphin/ (GREEN Nautilus-native impl + dvae/ preserved)
- adaptive_exit/ (AEM engine + models/bucket_assignments.pkl)
- Observability/ (EsoF advisor, TUI, dashboards)
- external_factors/ (EsoF producer)
- mc_forewarning_qlabs_fork/ (MC regime/envelope)

Excludes runtime caches, logs, backups, and reproducible artifacts per .gitignore.
2026-04-21 16:58:38 +02:00

253 lines
11 KiB
Markdown
Executable File
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

The Key Test You Havent Run Yet
Right now you tested:
“Do tail days have high precursor levels?”
What you must test next is:
When precursor spikes occur, how often do tails follow?
This flips the conditional direction.
You need:
P(Tail | v750 spike) vs P(Tail | no spike)
If:
P(Tail | spike) is 35× baseline,
then you truly have a surgical filter.
If:
P(Tail | spike) is only modestly higher, then you're just observing volatility clustering.
Why This Matters
Right now:
Extreme days are ~10% of sample.
If v750 spike happens on, say, 25% of days, and tails occur on 20% of those spike days, thats not a clean dodger.
Because filtering spike days cuts too much μ.
A surgical dodger must:
Trigger infrequently
Contain a disproportionate share of disasters
Preserve most high-μ days
Thats the geometric requirement.One table:
Baseline P(Tail) P(Tail | v750 > 75th percentile) P(Tail | v750 > 90th percentile) P(Tail | v750 > 95th percentile)
If the curve explodes upward nonlinearly, you have a true convex hazard zone.
APART FROM THIS, I WILL PASTE SOME sample code prototyping the ESOTERIC_FACTORS. REVIEW THE CODE TO EXTRACT THE FEATURES THAT CAN BE COMPUTED ON THE FLY, and add each of them to the correlation tests you have run. Try and include also the population/weight stats, etc. You are wlecome to write any stats you gather to disk, per scan period, right next to the source parquuet files, name them ESOTERIC_data_TIMESTAMP, like the origibal filess. Do not overwrite or alter any data files.
import datetime
import json
import math
import time
import zoneinfo
import numpy as np
from astropy.time import Time
import astropy.coordinates as coord
import astropy.units as u
from astropy.coordinates import solar_system_ephemeris, get_body, EarthLocation
class MarketIndicators:
def __init__(self):
# Regions defined by NON-OVERLAPPING population clusters for accurate global weighting.
# Population in Millions (approximate). Liquidity weight is estimated crypto volume share.
# This fixes the previous "triple-counting" of Asia.
self.regions = [
{'name': 'Americas', 'tz': 'America/New_York', 'pop': 1000, 'liq_weight': 0.35}, # N/S America
{'name': 'EMEA', 'tz': 'Europe/London', 'pop': 2200, 'liq_weight': 0.30}, # Europe/Africa/Mid-East
{'name': 'South_Asia', 'tz': 'Asia/Kolkata', 'pop': 1400, 'liq_weight': 0.05}, # India
{'name': 'East_Asia', 'tz': 'Asia/Shanghai', 'pop': 1600, 'liq_weight': 0.20}, # China/Japan/Korea
{'name': 'Oceania_SEA', 'tz': 'Asia/Singapore', 'pop': 800, 'liq_weight': 0.10} # SE Asia/Australia
]
# Market cycle: Bitcoin halving based, ~4 years
self.cycle_length_days = 1460
self.last_halving = datetime.datetime(2024, 4, 20, tzinfo=datetime.timezone.utc)
# Cache for expensive calculations
self._cache = {
'moon': {'val': None, 'ts': 0},
'mercury': {'val': None, 'ts': 0}
}
self.cache_ttl_seconds = 3600 * 6 # Update astro every 6 hours
def get_calendar_items(self, now):
"""Explicit simple calendar outputs."""
return {
'year': now.year,
'month': now.month,
'day_of_month': now.day,
'hour': now.hour,
'minute': now.minute,
'day_of_week': now.weekday(), # 0=Monday
'week_of_year': now.isocalendar().week
}
def get_regional_times(self, now_utc):
times = {}
for region in self.regions:
tz = zoneinfo.ZoneInfo(region['tz'])
local_time = now_utc.astimezone(tz)
times[region['name']] = {
'hour': local_time.hour + local_time.minute / 60.0,
'is_tradfi_open': self.is_tradfi_open(region['name'], local_time)
}
return times
def is_tradfi_open(self, region_name, local_time):
day = local_time.weekday()
if day >= 5: return False
hour = local_time.hour + local_time.minute / 60.0
if 'Americas' in region_name:
return 9.5 <= hour < 16.0
elif 'EMEA' in region_name:
return 8.0 <= hour < 16.5
elif 'Asia' in region_name:
return 9.0 <= hour < 15.0
return False
def get_liquidity_session(self, now_utc):
"""Maps time to Crypto Liquidity Sessions."""
utc_hour = now_utc.hour
if 13 <= utc_hour < 17:
return "LONDON_NEW_YORK_OVERLAP"
elif 8 <= utc_hour < 13:
return "LONDON_MORNING"
elif 0 <= utc_hour < 8:
return "ASIA_PACIFIC"
elif 17 <= utc_hour < 21:
return "NEW_YORK_AFTERNOON"
else:
return "LOW_LIQUIDITY"
def get_weighted_times(self, now_utc):
"""
Calculates two types of weighted hours:
1. Population Weighted: "Global Human Activity Cycle"
2. Liquidity Weighted: "Global Money Activity Cycle"
"""
pop_sin, pop_cos = 0, 0
liq_sin, liq_cos = 0, 0
total_pop = sum(r['pop'] for r in self.regions) # ~7000M
for region in self.regions:
tz = zoneinfo.ZoneInfo(region['tz'])
local_time = now_utc.astimezone(tz)
hour_frac = (local_time.hour + local_time.minute / 60.0) / 24.0
angle = 2 * math.pi * hour_frac
# Population Calculation
w_pop = region['pop'] / total_pop
pop_sin += math.sin(angle) * w_pop
pop_cos += math.cos(angle) * w_pop
# Liquidity Calculation
w_liq = region['liq_weight']
liq_sin += math.sin(angle) * w_liq
liq_cos += math.cos(angle) * w_liq
# Calculate Population Hour
pop_angle = math.atan2(pop_sin, pop_cos)
if pop_angle < 0: pop_angle += 2 * math.pi
pop_hour = (pop_angle / (2 * math.pi)) * 24
# Calculate Liquidity Hour
liq_angle = math.atan2(liq_sin, liq_cos)
if liq_angle < 0: liq_angle += 2 * math.pi
liq_hour = (liq_angle / (2 * math.pi)) * 24
return round(pop_hour, 2), round(liq_hour, 2)
def get_market_cycle_position(self, now_utc):
days_since_halving = (now_utc - self.last_halving).days
position = (days_since_halving % self.cycle_length_days) / self.cycle_length_days
return position
def get_fibonacci_time(self, now_utc):
mins_passed = now_utc.hour * 60 + now_utc.minute
fib_seq = [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597]
closest = min(fib_seq, key=lambda x: abs(x - mins_passed))
distance = abs(mins_passed - closest)
strength = 1.0 - min(distance / 30.0, 1.0)
return {'closest_fib_minute': closest, 'harmonic_strength': round(strength, 3)}
def get_moon_phase(self, now_utc):
now_ts = now_utc.timestamp()
if self._cache['moon']['val'] and (now_ts - self._cache['moon']['ts'] < self.cache_ttl_seconds):
return self._cache['moon']['val']
t = Time(now_utc)
with solar_system_ephemeris.set('builtin'):
moon = coord.get_moon(t)
sun = coord.get_sun(t)
elongation = sun.separation(moon)
phase_angle = np.arctan2(sun.distance * np.sin(elongation),
moon.distance - sun.distance * np.cos(elongation))
illumination = (1 + np.cos(phase_angle)) / 2.0
phase_name = "WAXING"
if illumination < 0.03: phase_name = "NEW_MOON"
elif illumination > 0.97: phase_name = "FULL_MOON"
elif illumination < 0.5: phase_name = "WAXING_CRESCENT" if moon.dec.deg > sun.dec.deg else "WANING_CRESCENT"
else: phase_name = "WAXING_GIBBOUS" if moon.dec.deg > sun.dec.deg else "WANING_GIBBOUS"
result = {'illumination': float(illumination), 'phase_name': phase_name}
self._cache['moon'] = {'val': result, 'ts': now_ts}
return result
def is_mercury_retrograde(self, now_utc):
now_ts = now_utc.timestamp()
if self._cache['mercury']['val'] is not None and (now_ts - self._cache['mercury']['ts'] < self.cache_ttl_seconds):
return self._cache['mercury']['val']
t = Time(now_utc)
is_retro = False
try:
with solar_system_ephemeris.set('builtin'):
loc = EarthLocation.of_site('greenwich')
merc_now = get_body('mercury', t, loc)
merc_later = get_body('mercury', t + 1 * u.day, loc)
# Use Geocentric Ecliptic Longitude for correct astrological determination
lon_now = merc_now.geometrictrueecliptic.lon.deg
lon_later = merc_later.geometrictrueecliptic.lon.deg
diff = (lon_later - lon_now) % 360
is_retro = diff > 180 # If movement is "backwards" (wrapping 360)
except Exception as e:
print(f"Astro calc error: {e}")
self._cache['mercury'] = {'val': is_retro, 'ts': now_ts}
return is_retro
def get_indicators(self):
now_utc = datetime.datetime.now(datetime.timezone.utc)
pop_hour, liq_hour = self.get_weighted_times(now_utc)
moon_data = self.get_moon_phase(now_utc)
calendar = self.get_calendar_items(now_utc)
indicators = {
'timestamp': now_utc.isoformat(),
'unix': int(now_utc.timestamp()),
# Simple Calendar
'calendar': calendar,
# Temporal & Geometry
'fibonacci_time': self.get_fibonacci_time(now_utc),
# Global Activity (Dual Weighted)
'regional_times': self.get_regional_times(now_utc),
'population_weighted_hour': pop_hour, # Human Activity Cycle
'liquidity_weighted_hour': liq_hour, # Money Activity Cycle
'liquidity_session': self.get_liquidity_session(now_utc),
# Macro Cycles
'market_cycle_position': round(self.get_market_cycle_position(now_utc), 4),
# Esoteric
'moon_illumination': moon_data['illumination'],
'moon_phase_name': moon_data['phase_name'],
'mercury_retrograde': self.is_mercury_retrograde(now_utc),
}
return indicators
if __name__ == "__main__":
mi = MarketIndicators()
ind = mi.get_indicators()
print(json.dumps(ind, indent=2))