WIP: feat(nautilus): initial integration #1

Draft
hjnormey wants to merge 14 commits from feat/nautilus-dolphin-integration into main
Showing only changes of commit ba220f2380 - Show all commits

View File

@@ -412,61 +412,77 @@ class SILOQYSymbolDiscoveryActor(Actor):
self.log.error(f"Nautilus ActorExecutor: Failed to complete symbol discovery: {e}")
# Don't re-raise, let system continue
async def _discover_all_symbols(self):
"""PRESERVED: Original Binance symbol discovery algorithm"""
self.log.info("Starting dynamic symbol discovery from Binance...")
url = "https://api.binance.com/api/v3/exchangeInfo"
async def _discover_all_symbols(self):
"""PRESERVED: Original Binance symbol discovery algorithm"""
self.log.info("Starting dynamic symbol discovery from Binance...")
url = "https://api.binance.com/api/v3/exchangeInfo"
async with httpx.AsyncClient() as client:
self.log.info("Fetching exchange info from Binance API...")
response = await client.get(url, timeout=10)
if response.status_code == 200:
self.log.info("Successfully received exchange info")
data = response.json()
async with httpx.AsyncClient() as client:
self.log.info("Fetching exchange info from Binance API...")
response = await client.get(url, timeout=10)
if response.status_code == 200:
self.log.info("Successfully received exchange info")
data = response.json()
# Combined symbol discovery and tick size extraction
self.log.info("Processing symbols and extracting tick sizes...")
full_symbols = []
for symbol_info in data['symbols']:
if symbol_info['status'] == 'TRADING' and symbol_info['symbol'].endswith('USDT'):
symbol = symbol_info['symbol']
full_symbols.append(symbol)
# STEP 1: Collect all symbol names first
self.log.info("Collecting all available symbols...")
full_symbols = []
for symbol_info in data['symbols']:
if symbol_info['status'] == 'TRADING' and symbol_info['symbol'].endswith('USDT'):
symbol = symbol_info['symbol']
full_symbols.append(symbol)
# Extract tick size while processing # Extract tick size while processing
tick_size = None
for filter_info in symbol_info['filters']:
if filter_info['filterType'] == 'PRICE_FILTER':
tick_size = float(filter_info['tickSize'])
break
# Log the full symbols list obtained from exchange
self.log.info(f"Full symbols discovered from exchange: {len(full_symbols)} total")
self.log.info(f"First 10 symbols: {full_symbols[:10]}")
# If no PRICE_FILTER found, try other filter types
if tick_size is None:
for filter_info in symbol_info['filters']:
if filter_info['filterType'] == 'TICK_SIZE':
tick_size = float(filter_info['tickSize'])
break
# Fallback to default if still not found
if tick_size is None:
tick_size = 1e-8 # Default fallback
self.log.warning(f"No tick size found for {symbol}, using fallback {tick_size}")
self.tick_sizes[symbol] = tick_size
self.log.info(f"Processed {len(full_symbols)} symbols, extracted {len(self.tick_sizes)} tick sizes")
# Apply throttle mode symbol limiting
if self.throttle_mode:
self.symbols = full_symbols[:self.max_symbols_throttled]
self.log.warning(f"THROTTLE MODE: Limited to {len(self.symbols)} symbols (from {len(full_symbols)} available)")
else:
self.symbols = full_symbols
self.log.info(f"Discovered {len(self.symbols)} trading symbols")
self.log.info(f"First 10 symbols: {self.symbols[:10]}")
# STEP 2: Apply throttle mode symbol limiting BEFORE processing
if self.throttle_mode:
symbols_to_process = full_symbols[:self.max_symbols_throttled]
self.log.warning(f"THROTTLE MODE: Will process {len(symbols_to_process)} symbols (from {len(full_symbols)} available)")
else:
self.log.error(f"Failed to fetch exchange info: {response.status_code}")
raise Exception(f"Failed to fetch exchange info: {response.status_code}")
symbols_to_process = full_symbols
# STEP 3: Process tick sizes only for selected symbols
self.log.info("Processing symbols and extracting tick sizes...")
symbols_to_process_set = set(symbols_to_process) # For O(1) lookup
for symbol_info in data['symbols']:
symbol = symbol_info['symbol']
# Only process symbols that are in our selected list
if symbol not in symbols_to_process_set:
continue
# Extract tick size while processing
tick_size = None
for filter_info in symbol_info['filters']:
if filter_info['filterType'] == 'PRICE_FILTER':
tick_size = float(filter_info['tickSize'])
break
# If no PRICE_FILTER found, try other filter types
if tick_size is None:
for filter_info in symbol_info['filters']:
if filter_info['filterType'] == 'TICK_SIZE':
tick_size = float(filter_info['tickSize'])
break
# Fallback to default if still not found
if tick_size is None:
tick_size = 1e-8 # Default fallback
self.log.warning(f"No tick size found for {symbol}, using fallback {tick_size}")
self.tick_sizes[symbol] = tick_size
# Set final symbols list
self.symbols = symbols_to_process
self.log.info(f"Processed {len(symbols_to_process)} symbols, extracted {len(self.tick_sizes)} tick sizes")
self.log.info(f"Discovered {len(self.symbols)} trading symbols")
self.log.info(f"First 10 symbols: {self.symbols[:10]}")
else:
self.log.error(f"Failed to fetch exchange info: {response.status_code}")
raise Exception(f"Failed to fetch exchange info: {response.status_code}")
async def _fetch_stats_and_reconstruct_candles(self):
"""PRESERVED: All original rate limiting with Nautilus async patterns"""
@@ -1430,7 +1446,7 @@ def test_siloqy_actors_with_nautilus_process_management():
"candle_interval_ms": 15 * 60 * 1000,
"throttle_mode": True, # ENABLED: Safe for dual instance testing
"throttle_rate_limit_seconds": 10.0, # 10s between batches (vs 2.5s)
"max_symbols_throttled": 414 # Only 100 symbols (vs 2000+)
"max_symbols_throttled": 400 # Only 100 symbols (vs 2000+)
}
)