Files
crypto_trader/tests/integration/test_autopilot_workflow.py

139 lines
4.3 KiB
Python
Raw Normal View History

"""Integration tests for autopilot workflows."""
import pytest
from unittest.mock import Mock, patch, AsyncMock
from fastapi.testclient import TestClient
from backend.main import app
@pytest.fixture
def client():
"""Test client fixture."""
return TestClient(app)
@pytest.fixture
def mock_intelligent_autopilot():
"""Mock intelligent autopilot."""
autopilot = Mock()
autopilot.symbol = "BTC/USD"
autopilot.is_running = False
autopilot.enable_auto_execution = False
autopilot.get_status.return_value = {
"symbol": "BTC/USD",
"timeframe": "1h",
"running": False,
"selected_strategy": None,
"trades_today": 0,
"max_trades_per_day": 10,
"min_confidence_threshold": 0.75,
"enable_auto_execution": False,
"last_analysis": None,
"model_info": {},
}
return autopilot
@pytest.mark.integration
class TestAutopilotWorkflow:
"""Integration tests for autopilot workflows."""
@patch('backend.api.autopilot.get_intelligent_autopilot')
def test_start_intelligent_mode_workflow(
self, mock_get_intelligent, client, mock_intelligent_autopilot
):
"""Test complete workflow for starting intelligent mode autopilot."""
mock_get_intelligent.return_value = mock_intelligent_autopilot
# Start autopilot
response = client.post(
"/api/autopilot/start-unified",
json={
"symbol": "BTC/USD",
"mode": "intelligent",
"auto_execute": True,
"exchange_id": 1,
"timeframe": "1h",
},
)
assert response.status_code == 200
assert mock_intelligent_autopilot.enable_auto_execution is True
# Check status
response = client.get(
"/api/autopilot/status-unified/BTC/USD?mode=intelligent&timeframe=1h"
)
assert response.status_code == 200
data = response.json()
assert data["mode"] == "intelligent"
# Stop autopilot
response = client.post(
"/api/autopilot/stop-unified?symbol=BTC/USD&mode=intelligent&timeframe=1h"
)
assert response.status_code == 200
assert mock_intelligent_autopilot.stop.called
@pytest.mark.integration
class TestTrainingConfigWorkflow:
"""Integration tests for training configuration workflow."""
def test_configure_and_verify_bootstrap(self, client):
"""Test configuring bootstrap settings and verifying they persist."""
# Set custom config
custom_config = {
"days": 180,
"timeframe": "4h",
"min_samples_per_strategy": 25,
"symbols": ["BTC/USD", "ETH/USD", "SOL/USD", "DOGE/USD"]
}
response = client.put("/api/autopilot/bootstrap-config", json=custom_config)
assert response.status_code == 200
# Verify it was saved
response = client.get("/api/autopilot/bootstrap-config")
assert response.status_code == 200
data = response.json()
assert data["days"] == 180
assert data["timeframe"] == "4h"
assert data["min_samples_per_strategy"] == 25
assert len(data["symbols"]) == 4
assert "DOGE/USD" in data["symbols"]
@patch('backend.api.autopilot.train_model_task')
def test_training_uses_configured_symbols(self, mock_task, client):
"""Test that training uses the configured symbols."""
# Setup mock
mock_result = Mock()
mock_result.id = "test-task-123"
mock_task.delay.return_value = mock_result
# Configure with specific symbols
config = {
"days": 90,
"timeframe": "1h",
"min_samples_per_strategy": 10,
"symbols": ["BTC/USD", "ETH/USD", "XRP/USD"]
}
client.put("/api/autopilot/bootstrap-config", json=config)
# Trigger training
response = client.post("/api/autopilot/intelligent/retrain")
assert response.status_code == 200
# Verify the symbols were passed
call_kwargs = mock_task.delay.call_args.kwargs
assert call_kwargs["symbols"] == ["BTC/USD", "ETH/USD", "XRP/USD"]
assert call_kwargs["days"] == 90
assert call_kwargs["timeframe"] == "1h"
assert call_kwargs["min_samples_per_strategy"] == 10