Local changes: Updated model training, removed debug instrumentation, and configuration improvements

This commit is contained in:
kfox
2025-12-26 01:15:43 -05:00
commit cc60da49e7
388 changed files with 57127 additions and 0 deletions

View File

@@ -0,0 +1,153 @@
#!/usr/bin/env python3
"""Utility script to fetch and store historical OHLCV data from Binance public API.
This script uses the PublicDataAdapter to fetch historical market data without
requiring API keys. Perfect for populating your database with historical data
for backtesting and analysis.
Usage:
python scripts/fetch_historical_data.py --symbol BTC/USDT --timeframe 1h --days 30
python scripts/fetch_historical_data.py --symbol ETH/USDT --timeframe 1d --days 365
"""
import sys
import argparse
from datetime import datetime, timedelta
from pathlib import Path
# Add src to path
sys.path.insert(0, str(Path(__file__).parent.parent))
from src.exchanges.public_data import PublicDataAdapter
from src.data.collector import get_data_collector
from src.core.logger import setup_logging, get_logger
setup_logging()
logger = get_logger(__name__)
def fetch_historical_data(
symbol: str,
timeframe: str,
days: int,
exchange_name: str = "Binance Public"
) -> int:
"""Fetch and store historical OHLCV data.
Args:
symbol: Trading symbol (e.g., 'BTC/USDT')
timeframe: Timeframe (e.g., '1h', '1d', '4h')
days: Number of days of historical data to fetch
exchange_name: Exchange name for storage
Returns:
Number of candles stored
"""
logger.info(f"Fetching {days} days of {timeframe} data for {symbol}")
# Create public data adapter
adapter = PublicDataAdapter()
if not adapter.connect():
logger.error("Failed to connect to Binance public API")
return 0
# Calculate start date
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
# Fetch data in chunks (Binance limit is 1000 candles per request)
collector = get_data_collector()
total_candles = 0
current_date = start_date
chunk_days = 30 # Fetch 30 days at a time to stay under 1000 candle limit
while current_date < end_date:
chunk_end = min(current_date + timedelta(days=chunk_days), end_date)
logger.info(f"Fetching data from {current_date.date()} to {chunk_end.date()}")
# Fetch OHLCV data
ohlcv = adapter.get_ohlcv(
symbol=symbol,
timeframe=timeframe,
since=current_date,
limit=1000
)
if ohlcv:
# Store in database
collector.store_ohlcv(exchange_name, symbol, timeframe, ohlcv)
total_candles += len(ohlcv)
logger.info(f"Stored {len(ohlcv)} candles (total: {total_candles})")
else:
logger.warning(f"No data returned for period {current_date} to {chunk_end}")
# Move to next chunk
current_date = chunk_end
# Small delay to respect rate limits
import time
time.sleep(1)
adapter.disconnect()
logger.info(f"Completed! Total candles stored: {total_candles}")
return total_candles
def main():
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Fetch historical OHLCV data from Binance public API"
)
parser.add_argument(
'--symbol',
type=str,
default='BTC/USDT',
help='Trading symbol (e.g., BTC/USDT, ETH/USDT)'
)
parser.add_argument(
'--timeframe',
type=str,
default='1h',
choices=['1m', '5m', '15m', '30m', '1h', '4h', '1d', '1w'],
help='Timeframe for candles'
)
parser.add_argument(
'--days',
type=int,
default=30,
help='Number of days of historical data to fetch'
)
parser.add_argument(
'--exchange',
type=str,
default='Binance Public',
help='Exchange name for storage (default: Binance Public)'
)
args = parser.parse_args()
try:
count = fetch_historical_data(
symbol=args.symbol,
timeframe=args.timeframe,
days=args.days,
exchange_name=args.exchange
)
print(f"\n✓ Successfully fetched and stored {count} candles")
print(f" Symbol: {args.symbol}")
print(f" Timeframe: {args.timeframe}")
print(f" Period: {args.days} days")
return 0
except KeyboardInterrupt:
print("\n\nInterrupted by user")
return 1
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
print(f"\n✗ Error: {e}")
return 1
if __name__ == '__main__':
sys.exit(main())