Fix: Unit test async mock
All checks were successful
CI/CD Pipeline / Unit Tests (Python 3.10) (push) Successful in 9m20s
CI/CD Pipeline / Unit Tests (Python 3.11) (push) Successful in 9m17s
CI/CD Pipeline / Unit Tests (Python 3.9) (push) Successful in 9m19s
CI/CD Pipeline / Code Quality & Linting (push) Successful in 43s
CI/CD Pipeline / Security Scanning (push) Successful in 15s
CI/CD Pipeline / Integration Tests (push) Successful in 9m13s
CI/CD Pipeline / Build Docker Image (push) Successful in 10m37s
CI/CD Pipeline / Generate Test Report (push) Successful in 3s
CI/CD Pipeline / CI/CD Pipeline Status (push) Successful in 1s
All checks were successful
CI/CD Pipeline / Unit Tests (Python 3.10) (push) Successful in 9m20s
CI/CD Pipeline / Unit Tests (Python 3.11) (push) Successful in 9m17s
CI/CD Pipeline / Unit Tests (Python 3.9) (push) Successful in 9m19s
CI/CD Pipeline / Code Quality & Linting (push) Successful in 43s
CI/CD Pipeline / Security Scanning (push) Successful in 15s
CI/CD Pipeline / Integration Tests (push) Successful in 9m13s
CI/CD Pipeline / Build Docker Image (push) Successful in 10m37s
CI/CD Pipeline / Generate Test Report (push) Successful in 3s
CI/CD Pipeline / CI/CD Pipeline Status (push) Successful in 1s
This commit is contained in:
@@ -42,7 +42,12 @@ from server_metrics_graphs import ServerMetricsGraphs, ServerMetricsManager
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config():
|
||||
"""Create a mock configuration for testing."""
|
||||
"""
|
||||
Create a mock configuration for testing.
|
||||
|
||||
Returns:
|
||||
ConfigParser: A properly configured test configuration object
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
config['Pterodactyl'] = {
|
||||
'PanelURL': 'https://panel.example.com',
|
||||
@@ -58,19 +63,31 @@ def mock_config():
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pterodactyl_api():
|
||||
"""Create a mock PterodactylAPI instance."""
|
||||
"""
|
||||
Create a mock PterodactylAPI instance with properly configured session.
|
||||
|
||||
Returns:
|
||||
PterodactylAPI: A mocked API instance ready for testing
|
||||
"""
|
||||
api = PterodactylAPI(
|
||||
'https://panel.example.com',
|
||||
'ptlc_test_client_key',
|
||||
'ptla_test_app_key'
|
||||
)
|
||||
# Create a proper async mock session
|
||||
api.session = AsyncMock(spec=aiohttp.ClientSession)
|
||||
api.session.close = AsyncMock() # Ensure close is an async mock
|
||||
return api
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_server_data():
|
||||
"""Sample server data from Pterodactyl API."""
|
||||
"""
|
||||
Sample server data from Pterodactyl API.
|
||||
|
||||
Returns:
|
||||
dict: Server attributes in Pterodactyl API format
|
||||
"""
|
||||
return {
|
||||
'attributes': {
|
||||
'identifier': 'abc123',
|
||||
@@ -88,7 +105,12 @@ def sample_server_data():
|
||||
|
||||
@pytest.fixture
|
||||
def sample_resources_data():
|
||||
"""Sample resource usage data from Pterodactyl API."""
|
||||
"""
|
||||
Sample resource usage data from Pterodactyl API.
|
||||
|
||||
Returns:
|
||||
dict: Resource usage attributes in Pterodactyl API format
|
||||
"""
|
||||
return {
|
||||
'attributes': {
|
||||
'current_state': 'running',
|
||||
@@ -106,11 +128,21 @@ def sample_resources_data():
|
||||
|
||||
@pytest.fixture
|
||||
def mock_discord_interaction():
|
||||
"""Create a mock Discord interaction."""
|
||||
"""
|
||||
Create a mock Discord interaction with properly configured user roles.
|
||||
|
||||
Returns:
|
||||
AsyncMock: A mocked Discord interaction object
|
||||
"""
|
||||
interaction = AsyncMock(spec=discord.Interaction)
|
||||
interaction.user = Mock()
|
||||
interaction.user.name = 'TestUser'
|
||||
interaction.user.roles = [Mock(name=REQUIRED_ROLE)]
|
||||
|
||||
# Create mock role with proper name attribute
|
||||
mock_role = Mock()
|
||||
mock_role.name = REQUIRED_ROLE
|
||||
interaction.user.roles = [mock_role]
|
||||
|
||||
interaction.guild_id = 123456789
|
||||
interaction.channel = Mock()
|
||||
interaction.channel.id = 987654321
|
||||
@@ -125,50 +157,79 @@ def mock_discord_interaction():
|
||||
|
||||
class TestConfigValidation:
|
||||
"""Test configuration validation logic."""
|
||||
|
||||
|
||||
def test_valid_config(self, mock_config, monkeypatch):
|
||||
"""Test that valid configuration passes validation."""
|
||||
monkeypatch.setattr('pterodisbot.config', mock_config)
|
||||
"""
|
||||
Test that valid configuration passes validation.
|
||||
|
||||
Args:
|
||||
mock_config: Pytest fixture providing valid config
|
||||
monkeypatch: Pytest monkeypatch fixture for patching
|
||||
"""
|
||||
monkeypatch.setattr('pterodisbot.config', mock_config)
|
||||
|
||||
# Should not raise any exceptions
|
||||
try:
|
||||
validate_config()
|
||||
except ConfigValidationError:
|
||||
pytest.fail("Valid configuration should not raise ConfigValidationError")
|
||||
|
||||
|
||||
def test_missing_pterodactyl_section(self, monkeypatch):
|
||||
"""Test validation fails with missing Pterodactyl section."""
|
||||
"""
|
||||
Test validation fails with missing Pterodactyl section.
|
||||
|
||||
Args:
|
||||
monkeypatch: Pytest monkeypatch fixture for patching
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
config['Discord'] = {
|
||||
'Token': 'test_token',
|
||||
'AllowedGuildID': '123456789'
|
||||
}
|
||||
monkeypatch.setattr('pterodisbot.config', config)
|
||||
|
||||
|
||||
with pytest.raises(ConfigValidationError, match="Missing \\[Pterodactyl\\] section"):
|
||||
validate_config()
|
||||
|
||||
|
||||
def test_invalid_api_key_prefix(self, mock_config, monkeypatch):
|
||||
"""Test validation fails with incorrect API key prefix."""
|
||||
"""
|
||||
Test validation fails with incorrect API key prefix.
|
||||
|
||||
Args:
|
||||
mock_config: Pytest fixture providing config
|
||||
monkeypatch: Pytest monkeypatch fixture for patching
|
||||
"""
|
||||
mock_config['Pterodactyl']['ClientAPIKey'] = 'invalid_prefix_key'
|
||||
monkeypatch.setattr('pterodisbot.config', mock_config)
|
||||
|
||||
|
||||
with pytest.raises(ConfigValidationError, match="ClientAPIKey should start with 'ptlc_'"):
|
||||
validate_config()
|
||||
|
||||
|
||||
def test_invalid_guild_id(self, mock_config, monkeypatch):
|
||||
"""Test validation fails with invalid guild ID."""
|
||||
"""
|
||||
Test validation fails with invalid guild ID.
|
||||
|
||||
Args:
|
||||
mock_config: Pytest fixture providing config
|
||||
monkeypatch: Pytest monkeypatch fixture for patching
|
||||
"""
|
||||
mock_config['Discord']['AllowedGuildID'] = 'not_a_number'
|
||||
monkeypatch.setattr('pterodisbot.config', mock_config)
|
||||
|
||||
|
||||
with pytest.raises(ConfigValidationError, match="AllowedGuildID must be a valid integer"):
|
||||
validate_config()
|
||||
|
||||
|
||||
def test_invalid_panel_url(self, mock_config, monkeypatch):
|
||||
"""Test validation fails with invalid panel URL."""
|
||||
"""
|
||||
Test validation fails with invalid panel URL.
|
||||
|
||||
Args:
|
||||
mock_config: Pytest fixture providing config
|
||||
monkeypatch: Pytest monkeypatch fixture for patching
|
||||
"""
|
||||
mock_config['Pterodactyl']['PanelURL'] = 'not-a-url'
|
||||
monkeypatch.setattr('pterodisbot.config', mock_config)
|
||||
|
||||
|
||||
with pytest.raises(ConfigValidationError, match="PanelURL must start with http"):
|
||||
validate_config()
|
||||
|
||||
@@ -179,102 +240,158 @@ class TestConfigValidation:
|
||||
|
||||
class TestPterodactylAPI:
|
||||
"""Test Pterodactyl API client functionality."""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_initialize(self):
|
||||
"""Test API client initialization."""
|
||||
"""
|
||||
Test API client initialization.
|
||||
|
||||
Verifies that the API client properly creates an aiohttp session
|
||||
"""
|
||||
api = PterodactylAPI('https://panel.example.com', 'ptlc_key', 'ptla_key')
|
||||
await api.initialize()
|
||||
|
||||
|
||||
assert api.session is not None
|
||||
assert isinstance(api.session, aiohttp.ClientSession)
|
||||
|
||||
|
||||
await api.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_close(self, mock_pterodactyl_api):
|
||||
"""Test API client cleanup."""
|
||||
"""
|
||||
Test API client cleanup properly calls session.close().
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
"""
|
||||
# Ensure the session is marked as not closed
|
||||
mock_pterodactyl_api.session.closed = False
|
||||
|
||||
await mock_pterodactyl_api.close()
|
||||
|
||||
# Verify close was called once
|
||||
mock_pterodactyl_api.session.close.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_success(self, mock_pterodactyl_api):
|
||||
"""Test successful API request."""
|
||||
"""
|
||||
Test successful API request with properly mocked context manager.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
"""
|
||||
# Create a mock response
|
||||
mock_response = AsyncMock()
|
||||
mock_response.status = 200
|
||||
mock_response.json = AsyncMock(return_value={'data': 'test'})
|
||||
|
||||
mock_pterodactyl_api.session.request = AsyncMock(return_value=mock_response)
|
||||
mock_pterodactyl_api.session.request.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_pterodactyl_api.session.request.return_value.__aexit__ = AsyncMock()
|
||||
|
||||
|
||||
# Create a mock context manager that returns the response
|
||||
mock_context = AsyncMock()
|
||||
mock_context.__aenter__.return_value = mock_response
|
||||
mock_context.__aexit__.return_value = AsyncMock()
|
||||
|
||||
# Configure the session.request to return the context manager
|
||||
mock_pterodactyl_api.session.request = Mock(return_value=mock_context)
|
||||
|
||||
result = await mock_pterodactyl_api._request('GET', 'test/endpoint')
|
||||
|
||||
|
||||
assert result == {'data': 'test'}
|
||||
mock_pterodactyl_api.session.request.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_error(self, mock_pterodactyl_api):
|
||||
"""Test API request error handling."""
|
||||
"""
|
||||
Test API request error handling with properly mocked context manager.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
"""
|
||||
# Create a mock error response
|
||||
mock_response = AsyncMock()
|
||||
mock_response.status = 404
|
||||
mock_response.json = AsyncMock(return_value={
|
||||
'errors': [{'detail': 'Server not found'}]
|
||||
})
|
||||
|
||||
mock_pterodactyl_api.session.request = AsyncMock(return_value=mock_response)
|
||||
mock_pterodactyl_api.session.request.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_pterodactyl_api.session.request.return_value.__aexit__ = AsyncMock()
|
||||
|
||||
|
||||
# Create a mock context manager that returns the error response
|
||||
mock_context = AsyncMock()
|
||||
mock_context.__aenter__.return_value = mock_response
|
||||
mock_context.__aexit__.return_value = AsyncMock()
|
||||
|
||||
# Configure the session.request to return the context manager
|
||||
mock_pterodactyl_api.session.request = Mock(return_value=mock_context)
|
||||
|
||||
result = await mock_pterodactyl_api._request('GET', 'test/endpoint')
|
||||
|
||||
|
||||
assert result['status'] == 'error'
|
||||
assert 'Server not found' in result['message']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_servers(self, mock_pterodactyl_api, sample_server_data):
|
||||
"""Test retrieving server list."""
|
||||
"""
|
||||
Test retrieving server list from API.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
sample_server_data: Pytest fixture providing sample server data
|
||||
"""
|
||||
mock_pterodactyl_api._request = AsyncMock(return_value={
|
||||
'data': [sample_server_data]
|
||||
})
|
||||
|
||||
|
||||
servers = await mock_pterodactyl_api.get_servers()
|
||||
|
||||
|
||||
assert len(servers) == 1
|
||||
assert servers[0] == sample_server_data
|
||||
mock_pterodactyl_api._request.assert_called_once_with(
|
||||
'GET', 'application/servers', use_application_key=True
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_server_resources(self, mock_pterodactyl_api, sample_resources_data):
|
||||
"""Test retrieving server resource usage."""
|
||||
"""
|
||||
Test retrieving server resource usage from API.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
sample_resources_data: Pytest fixture providing sample resource data
|
||||
"""
|
||||
mock_pterodactyl_api._request = AsyncMock(return_value=sample_resources_data)
|
||||
|
||||
|
||||
resources = await mock_pterodactyl_api.get_server_resources('abc123')
|
||||
|
||||
|
||||
assert resources['attributes']['current_state'] == 'running'
|
||||
mock_pterodactyl_api._request.assert_called_once_with(
|
||||
'GET', 'client/servers/abc123/resources'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_power_action_valid(self, mock_pterodactyl_api):
|
||||
"""Test sending valid power action."""
|
||||
"""
|
||||
Test sending valid power action to server.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
"""
|
||||
mock_pterodactyl_api._request = AsyncMock(return_value={'status': 'success'})
|
||||
|
||||
|
||||
result = await mock_pterodactyl_api.send_power_action('abc123', 'start')
|
||||
|
||||
|
||||
assert result['status'] == 'success'
|
||||
mock_pterodactyl_api._request.assert_called_once_with(
|
||||
'POST', 'client/servers/abc123/power', {'signal': 'start'}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_power_action_invalid(self, mock_pterodactyl_api):
|
||||
"""Test sending invalid power action."""
|
||||
result = await mock_pterodactyl_api.send_power_action('abc123', 'invalid_action')
|
||||
"""
|
||||
Test sending invalid power action returns error.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
"""
|
||||
result = await mock_pterodactyl_api.send_power_action('abc123', 'invalid_action')
|
||||
|
||||
assert result['status'] == 'error'
|
||||
assert 'Invalid action' in result['message']
|
||||
|
||||
@@ -285,83 +402,95 @@ class TestPterodactylAPI:
|
||||
|
||||
class TestServerMetricsGraphs:
|
||||
"""Test server metrics tracking and graphing."""
|
||||
|
||||
|
||||
def test_initialization(self):
|
||||
"""Test metrics graph initialization."""
|
||||
"""
|
||||
Test metrics graph initialization with empty state.
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
|
||||
assert graphs.server_id == 'abc123'
|
||||
assert graphs.server_name == 'Test Server'
|
||||
assert len(graphs.data_points) == 0
|
||||
assert graphs.has_sufficient_data is False
|
||||
|
||||
|
||||
def test_add_data_point(self):
|
||||
"""Test adding data points."""
|
||||
"""
|
||||
Test adding data points and checking sufficient data threshold.
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
|
||||
graphs.add_data_point(50.0, 1024.0)
|
||||
|
||||
|
||||
assert len(graphs.data_points) == 1
|
||||
assert graphs.has_sufficient_data is False
|
||||
|
||||
|
||||
graphs.add_data_point(55.0, 1100.0)
|
||||
|
||||
|
||||
assert len(graphs.data_points) == 2
|
||||
assert graphs.has_sufficient_data is True
|
||||
|
||||
|
||||
def test_data_rotation(self):
|
||||
"""Test automatic data point rotation (FIFO with maxlen=6)."""
|
||||
"""
|
||||
Test automatic data point rotation (FIFO with maxlen=6).
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
# Add 8 data points
|
||||
|
||||
# Add 8 data points to test rotation
|
||||
for i in range(8):
|
||||
graphs.add_data_point(float(i * 10), float(i * 100))
|
||||
|
||||
|
||||
# Should only keep the last 6
|
||||
assert len(graphs.data_points) == 6
|
||||
assert graphs.data_points[0][1] == 20.0 # CPU of 3rd point
|
||||
assert graphs.data_points[-1][1] == 70.0 # CPU of 8th point
|
||||
|
||||
|
||||
def test_cpu_scale_calculation(self):
|
||||
"""Test dynamic CPU scale limit calculation."""
|
||||
"""
|
||||
Test dynamic CPU scale limit calculation for multi-vCPU servers.
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
|
||||
# Test single vCPU (<=100%)
|
||||
assert graphs._calculate_cpu_scale_limit(75.0) == 100
|
||||
assert graphs._calculate_cpu_scale_limit(100.0) == 100
|
||||
|
||||
|
||||
# Test multi-vCPU scenarios
|
||||
assert graphs._calculate_cpu_scale_limit(150.0) == 200
|
||||
assert graphs._calculate_cpu_scale_limit(250.0) == 300
|
||||
assert graphs._calculate_cpu_scale_limit(350.0) == 400
|
||||
|
||||
|
||||
def test_get_data_summary(self):
|
||||
"""Test data summary generation."""
|
||||
"""
|
||||
Test data summary generation including trends.
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
# No data
|
||||
|
||||
# No data case
|
||||
summary = graphs.get_data_summary()
|
||||
assert summary['point_count'] == 0
|
||||
assert summary['has_data'] is False
|
||||
|
||||
|
||||
# Add data points with increasing trend
|
||||
graphs.add_data_point(50.0, 1000.0)
|
||||
graphs.add_data_point(60.0, 1100.0)
|
||||
|
||||
|
||||
summary = graphs.get_data_summary()
|
||||
assert summary['point_count'] == 2
|
||||
assert summary['has_data'] is True
|
||||
assert summary['latest_cpu'] == 60.0
|
||||
assert summary['latest_memory'] == 1100.0
|
||||
assert summary['cpu_trend'] == 'increasing'
|
||||
|
||||
|
||||
def test_generate_graph_insufficient_data(self):
|
||||
"""Test graph generation with insufficient data."""
|
||||
"""
|
||||
Test graph generation returns None with insufficient data.
|
||||
"""
|
||||
graphs = ServerMetricsGraphs('abc123', 'Test Server')
|
||||
|
||||
|
||||
# Only one data point - should return None
|
||||
graphs.add_data_point(50.0, 1000.0)
|
||||
|
||||
|
||||
assert graphs.generate_cpu_graph() is None
|
||||
assert graphs.generate_memory_graph() is None
|
||||
assert graphs.generate_combined_graph() is None
|
||||
@@ -369,67 +498,79 @@ class TestServerMetricsGraphs:
|
||||
|
||||
class TestServerMetricsManager:
|
||||
"""Test server metrics manager."""
|
||||
|
||||
|
||||
def test_initialization(self):
|
||||
"""Test manager initialization."""
|
||||
"""
|
||||
Test manager initialization with empty state.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
assert len(manager.server_graphs) == 0
|
||||
|
||||
|
||||
def test_get_or_create_server_graphs(self):
|
||||
"""Test getting or creating server graphs."""
|
||||
"""
|
||||
Test getting or creating server graphs returns same instance.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
graphs1 = manager.get_or_create_server_graphs('abc123', 'Test Server')
|
||||
graphs2 = manager.get_or_create_server_graphs('abc123', 'Test Server')
|
||||
|
||||
|
||||
assert graphs1 is graphs2 # Should return same instance
|
||||
assert len(manager.server_graphs) == 1
|
||||
|
||||
|
||||
def test_add_server_data(self):
|
||||
"""Test adding data through manager."""
|
||||
"""
|
||||
Test adding data through manager properly creates graphs.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
manager.add_server_data('abc123', 'Test Server', 50.0, 1024.0)
|
||||
|
||||
|
||||
graphs = manager.get_server_graphs('abc123')
|
||||
assert graphs is not None
|
||||
assert len(graphs.data_points) == 1
|
||||
|
||||
|
||||
def test_remove_server(self):
|
||||
"""Test removing server from tracking."""
|
||||
"""
|
||||
Test removing server from tracking.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
manager.add_server_data('abc123', 'Test Server', 50.0, 1024.0)
|
||||
assert 'abc123' in manager.server_graphs
|
||||
|
||||
|
||||
manager.remove_server('abc123')
|
||||
assert 'abc123' not in manager.server_graphs
|
||||
|
||||
|
||||
def test_cleanup_old_servers(self):
|
||||
"""Test cleanup of inactive servers."""
|
||||
"""
|
||||
Test cleanup of inactive servers not in active list.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
# Add data for 3 servers
|
||||
manager.add_server_data('server1', 'Server 1', 50.0, 1024.0)
|
||||
manager.add_server_data('server2', 'Server 2', 60.0, 2048.0)
|
||||
manager.add_server_data('server3', 'Server 3', 70.0, 3072.0)
|
||||
|
||||
|
||||
# Only server1 and server2 are still active
|
||||
manager.cleanup_old_servers(['server1', 'server2'])
|
||||
|
||||
|
||||
assert 'server1' in manager.server_graphs
|
||||
assert 'server2' in manager.server_graphs
|
||||
assert 'server3' not in manager.server_graphs
|
||||
|
||||
|
||||
def test_get_summary(self):
|
||||
"""Test getting manager summary."""
|
||||
"""
|
||||
Test getting manager summary with statistics.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
# Add some servers with varying data
|
||||
manager.add_server_data('server1', 'Server 1', 50.0, 1024.0)
|
||||
manager.add_server_data('server1', 'Server 1', 55.0, 1100.0)
|
||||
manager.add_server_data('server2', 'Server 2', 60.0, 2048.0)
|
||||
|
||||
|
||||
summary = manager.get_summary()
|
||||
assert summary['total_servers'] == 2
|
||||
assert summary['servers_with_data'] == 1 # Only server1 has >=2 points
|
||||
@@ -442,84 +583,113 @@ class TestServerMetricsManager:
|
||||
|
||||
class TestServerStatusView:
|
||||
"""Test Discord UI view for server status."""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_view_initialization(self, mock_pterodactyl_api, sample_server_data):
|
||||
"""Test view initialization."""
|
||||
"""
|
||||
Test view initialization with server data.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
sample_server_data: Pytest fixture providing sample server data
|
||||
"""
|
||||
view = ServerStatusView(
|
||||
'abc123',
|
||||
'Test Server',
|
||||
mock_pterodactyl_api,
|
||||
sample_server_data
|
||||
)
|
||||
|
||||
|
||||
assert view.server_id == 'abc123'
|
||||
assert view.server_name == 'Test Server'
|
||||
assert view.api is mock_pterodactyl_api
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_interaction_check_authorized(self, mock_pterodactyl_api,
|
||||
async def test_interaction_check_authorized(self, mock_pterodactyl_api,
|
||||
sample_server_data, mock_discord_interaction):
|
||||
"""Test interaction check with authorized user."""
|
||||
view = ServerStatusView('abc123', 'Test Server',
|
||||
"""
|
||||
Test interaction check with authorized user having required role.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
sample_server_data: Pytest fixture providing sample server data
|
||||
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
|
||||
"""
|
||||
view = ServerStatusView('abc123', 'Test Server',
|
||||
mock_pterodactyl_api, sample_server_data)
|
||||
|
||||
|
||||
result = await view.interaction_check(mock_discord_interaction)
|
||||
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_interaction_check_wrong_guild(self, mock_pterodactyl_api,
|
||||
async def test_interaction_check_wrong_guild(self, mock_pterodactyl_api,
|
||||
sample_server_data, mock_discord_interaction):
|
||||
"""Test interaction check with wrong guild."""
|
||||
view = ServerStatusView('abc123', 'Test Server',
|
||||
"""
|
||||
Test interaction check rejects wrong guild.
|
||||
|
||||
Args:
|
||||
mock_pterodactyl_api: Pytest fixture providing mocked API instance
|
||||
sample_server_data: Pytest fixture providing sample server data
|
||||
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
|
||||
"""
|
||||
view = ServerStatusView('abc123', 'Test Server',
|
||||
mock_pterodactyl_api, sample_server_data)
|
||||
|
||||
|
||||
mock_discord_interaction.guild_id = 999999999 # Wrong guild
|
||||
|
||||
|
||||
result = await view.interaction_check(mock_discord_interaction)
|
||||
|
||||
|
||||
assert result is False
|
||||
mock_discord_interaction.response.send_message.assert_called_once()
|
||||
|
||||
|
||||
class TestPterodactylBot:
|
||||
"""Test main bot class."""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_initialization(self):
|
||||
"""Test bot initialization."""
|
||||
"""
|
||||
Test bot initialization with default values.
|
||||
"""
|
||||
intents = discord.Intents.default()
|
||||
bot = PterodactylBot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
assert bot.server_cache == {}
|
||||
assert bot.embed_locations == {}
|
||||
assert bot.metrics_manager is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_track_new_embed(self):
|
||||
"""Test tracking new embed location."""
|
||||
"""
|
||||
Test tracking new embed location in storage.
|
||||
"""
|
||||
intents = discord.Intents.default()
|
||||
bot = PterodactylBot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
mock_message = Mock()
|
||||
mock_message.channel = Mock()
|
||||
mock_message.channel.id = 123456
|
||||
mock_message.id = 789012
|
||||
|
||||
|
||||
with patch.object(bot, 'save_embed_locations', new=AsyncMock()):
|
||||
await bot.track_new_embed('abc123', mock_message)
|
||||
|
||||
|
||||
assert 'abc123' in bot.embed_locations
|
||||
assert bot.embed_locations['abc123']['channel_id'] == '123456'
|
||||
assert bot.embed_locations['abc123']['message_id'] == '789012'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_embed_locations(self, tmp_path):
|
||||
"""Test loading embed locations from file."""
|
||||
"""
|
||||
Test loading embed locations from JSON file.
|
||||
|
||||
Args:
|
||||
tmp_path: Pytest fixture providing temporary directory
|
||||
"""
|
||||
intents = discord.Intents.default()
|
||||
bot = PterodactylBot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
# Create temporary embed locations file
|
||||
embed_file = tmp_path / "embed_locations.json"
|
||||
test_data = {
|
||||
@@ -529,31 +699,36 @@ class TestPterodactylBot:
|
||||
}
|
||||
}
|
||||
embed_file.write_text(json.dumps(test_data))
|
||||
|
||||
|
||||
bot.embed_storage_path = embed_file
|
||||
await bot.load_embed_locations()
|
||||
|
||||
|
||||
assert 'abc123' in bot.embed_locations
|
||||
assert bot.embed_locations['abc123']['channel_id'] == '123456'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_save_embed_locations(self, tmp_path):
|
||||
"""Test saving embed locations to file."""
|
||||
"""
|
||||
Test saving embed locations to JSON file.
|
||||
|
||||
Args:
|
||||
tmp_path: Pytest fixture providing temporary directory
|
||||
"""
|
||||
intents = discord.Intents.default()
|
||||
bot = PterodactylBot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
embed_file = tmp_path / "embed_locations.json"
|
||||
bot.embed_storage_path = embed_file
|
||||
|
||||
|
||||
bot.embed_locations = {
|
||||
'abc123': {
|
||||
'channel_id': '123456',
|
||||
'message_id': '789012'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
await bot.save_embed_locations()
|
||||
|
||||
|
||||
assert embed_file.exists()
|
||||
loaded_data = json.loads(embed_file.read_text())
|
||||
assert loaded_data == bot.embed_locations
|
||||
@@ -565,40 +740,49 @@ class TestPterodactylBot:
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests for complete workflows."""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_server_status_command_flow(self, mock_discord_interaction,
|
||||
sample_server_data, sample_resources_data):
|
||||
"""Test complete server status command flow."""
|
||||
async def test_server_status_command_flow(self, mock_discord_interaction,
|
||||
sample_server_data, sample_resources_data):
|
||||
"""
|
||||
Test complete server status command flow.
|
||||
|
||||
Args:
|
||||
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
|
||||
sample_server_data: Pytest fixture providing sample server data
|
||||
sample_resources_data: Pytest fixture providing sample resource data
|
||||
"""
|
||||
# This would require extensive mocking of Discord.py internals
|
||||
# Simplified test to verify command registration
|
||||
|
||||
|
||||
intents = discord.Intents.default()
|
||||
bot = PterodactylBot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
# Verify command exists in tree
|
||||
assert bot.tree is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_metrics_collection_and_graphing(self):
|
||||
"""Test complete metrics collection and graph generation flow."""
|
||||
"""
|
||||
Test complete metrics collection and graph generation flow.
|
||||
"""
|
||||
manager = ServerMetricsManager()
|
||||
|
||||
|
||||
# Simulate data collection over time
|
||||
for i in range(6):
|
||||
cpu = 50.0 + (i * 5)
|
||||
memory = 1000.0 + (i * 100)
|
||||
manager.add_server_data('test_server', 'Test Server', cpu, memory)
|
||||
|
||||
|
||||
graphs = manager.get_server_graphs('test_server')
|
||||
assert graphs is not None
|
||||
assert graphs.has_sufficient_data
|
||||
|
||||
|
||||
# Generate graphs
|
||||
cpu_graph = graphs.generate_cpu_graph()
|
||||
memory_graph = graphs.generate_memory_graph()
|
||||
combined_graph = graphs.generate_combined_graph()
|
||||
|
||||
|
||||
# Verify graphs were generated
|
||||
assert cpu_graph is not None
|
||||
assert memory_graph is not None
|
||||
|
||||
Reference in New Issue
Block a user