Fix: Unit test async mock
All checks were successful
CI/CD Pipeline / Unit Tests (Python 3.10) (push) Successful in 9m20s
CI/CD Pipeline / Unit Tests (Python 3.11) (push) Successful in 9m17s
CI/CD Pipeline / Unit Tests (Python 3.9) (push) Successful in 9m19s
CI/CD Pipeline / Code Quality & Linting (push) Successful in 43s
CI/CD Pipeline / Security Scanning (push) Successful in 15s
CI/CD Pipeline / Integration Tests (push) Successful in 9m13s
CI/CD Pipeline / Build Docker Image (push) Successful in 10m37s
CI/CD Pipeline / Generate Test Report (push) Successful in 3s
CI/CD Pipeline / CI/CD Pipeline Status (push) Successful in 1s

This commit is contained in:
2025-10-24 14:22:00 +00:00
parent 08dee3db99
commit 11ee1447de

View File

@@ -42,7 +42,12 @@ from server_metrics_graphs import ServerMetricsGraphs, ServerMetricsManager
@pytest.fixture
def mock_config():
"""Create a mock configuration for testing."""
"""
Create a mock configuration for testing.
Returns:
ConfigParser: A properly configured test configuration object
"""
config = configparser.ConfigParser()
config['Pterodactyl'] = {
'PanelURL': 'https://panel.example.com',
@@ -58,19 +63,31 @@ def mock_config():
@pytest.fixture
def mock_pterodactyl_api():
"""Create a mock PterodactylAPI instance."""
"""
Create a mock PterodactylAPI instance with properly configured session.
Returns:
PterodactylAPI: A mocked API instance ready for testing
"""
api = PterodactylAPI(
'https://panel.example.com',
'ptlc_test_client_key',
'ptla_test_app_key'
)
# Create a proper async mock session
api.session = AsyncMock(spec=aiohttp.ClientSession)
api.session.close = AsyncMock() # Ensure close is an async mock
return api
@pytest.fixture
def sample_server_data():
"""Sample server data from Pterodactyl API."""
"""
Sample server data from Pterodactyl API.
Returns:
dict: Server attributes in Pterodactyl API format
"""
return {
'attributes': {
'identifier': 'abc123',
@@ -88,7 +105,12 @@ def sample_server_data():
@pytest.fixture
def sample_resources_data():
"""Sample resource usage data from Pterodactyl API."""
"""
Sample resource usage data from Pterodactyl API.
Returns:
dict: Resource usage attributes in Pterodactyl API format
"""
return {
'attributes': {
'current_state': 'running',
@@ -106,11 +128,21 @@ def sample_resources_data():
@pytest.fixture
def mock_discord_interaction():
"""Create a mock Discord interaction."""
"""
Create a mock Discord interaction with properly configured user roles.
Returns:
AsyncMock: A mocked Discord interaction object
"""
interaction = AsyncMock(spec=discord.Interaction)
interaction.user = Mock()
interaction.user.name = 'TestUser'
interaction.user.roles = [Mock(name=REQUIRED_ROLE)]
# Create mock role with proper name attribute
mock_role = Mock()
mock_role.name = REQUIRED_ROLE
interaction.user.roles = [mock_role]
interaction.guild_id = 123456789
interaction.channel = Mock()
interaction.channel.id = 987654321
@@ -127,7 +159,13 @@ class TestConfigValidation:
"""Test configuration validation logic."""
def test_valid_config(self, mock_config, monkeypatch):
"""Test that valid configuration passes validation."""
"""
Test that valid configuration passes validation.
Args:
mock_config: Pytest fixture providing valid config
monkeypatch: Pytest monkeypatch fixture for patching
"""
monkeypatch.setattr('pterodisbot.config', mock_config)
# Should not raise any exceptions
@@ -137,7 +175,12 @@ class TestConfigValidation:
pytest.fail("Valid configuration should not raise ConfigValidationError")
def test_missing_pterodactyl_section(self, monkeypatch):
"""Test validation fails with missing Pterodactyl section."""
"""
Test validation fails with missing Pterodactyl section.
Args:
monkeypatch: Pytest monkeypatch fixture for patching
"""
config = configparser.ConfigParser()
config['Discord'] = {
'Token': 'test_token',
@@ -149,7 +192,13 @@ class TestConfigValidation:
validate_config()
def test_invalid_api_key_prefix(self, mock_config, monkeypatch):
"""Test validation fails with incorrect API key prefix."""
"""
Test validation fails with incorrect API key prefix.
Args:
mock_config: Pytest fixture providing config
monkeypatch: Pytest monkeypatch fixture for patching
"""
mock_config['Pterodactyl']['ClientAPIKey'] = 'invalid_prefix_key'
monkeypatch.setattr('pterodisbot.config', mock_config)
@@ -157,7 +206,13 @@ class TestConfigValidation:
validate_config()
def test_invalid_guild_id(self, mock_config, monkeypatch):
"""Test validation fails with invalid guild ID."""
"""
Test validation fails with invalid guild ID.
Args:
mock_config: Pytest fixture providing config
monkeypatch: Pytest monkeypatch fixture for patching
"""
mock_config['Discord']['AllowedGuildID'] = 'not_a_number'
monkeypatch.setattr('pterodisbot.config', mock_config)
@@ -165,7 +220,13 @@ class TestConfigValidation:
validate_config()
def test_invalid_panel_url(self, mock_config, monkeypatch):
"""Test validation fails with invalid panel URL."""
"""
Test validation fails with invalid panel URL.
Args:
mock_config: Pytest fixture providing config
monkeypatch: Pytest monkeypatch fixture for patching
"""
mock_config['Pterodactyl']['PanelURL'] = 'not-a-url'
monkeypatch.setattr('pterodisbot.config', mock_config)
@@ -182,7 +243,11 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_initialize(self):
"""Test API client initialization."""
"""
Test API client initialization.
Verifies that the API client properly creates an aiohttp session
"""
api = PterodactylAPI('https://panel.example.com', 'ptlc_key', 'ptla_key')
await api.initialize()
@@ -193,20 +258,40 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_close(self, mock_pterodactyl_api):
"""Test API client cleanup."""
"""
Test API client cleanup properly calls session.close().
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
"""
# Ensure the session is marked as not closed
mock_pterodactyl_api.session.closed = False
await mock_pterodactyl_api.close()
# Verify close was called once
mock_pterodactyl_api.session.close.assert_called_once()
@pytest.mark.asyncio
async def test_request_success(self, mock_pterodactyl_api):
"""Test successful API request."""
"""
Test successful API request with properly mocked context manager.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
"""
# Create a mock response
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json = AsyncMock(return_value={'data': 'test'})
mock_pterodactyl_api.session.request = AsyncMock(return_value=mock_response)
mock_pterodactyl_api.session.request.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_pterodactyl_api.session.request.return_value.__aexit__ = AsyncMock()
# Create a mock context manager that returns the response
mock_context = AsyncMock()
mock_context.__aenter__.return_value = mock_response
mock_context.__aexit__.return_value = AsyncMock()
# Configure the session.request to return the context manager
mock_pterodactyl_api.session.request = Mock(return_value=mock_context)
result = await mock_pterodactyl_api._request('GET', 'test/endpoint')
@@ -215,16 +300,26 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_request_error(self, mock_pterodactyl_api):
"""Test API request error handling."""
"""
Test API request error handling with properly mocked context manager.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
"""
# Create a mock error response
mock_response = AsyncMock()
mock_response.status = 404
mock_response.json = AsyncMock(return_value={
'errors': [{'detail': 'Server not found'}]
})
mock_pterodactyl_api.session.request = AsyncMock(return_value=mock_response)
mock_pterodactyl_api.session.request.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_pterodactyl_api.session.request.return_value.__aexit__ = AsyncMock()
# Create a mock context manager that returns the error response
mock_context = AsyncMock()
mock_context.__aenter__.return_value = mock_response
mock_context.__aexit__.return_value = AsyncMock()
# Configure the session.request to return the context manager
mock_pterodactyl_api.session.request = Mock(return_value=mock_context)
result = await mock_pterodactyl_api._request('GET', 'test/endpoint')
@@ -233,7 +328,13 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_get_servers(self, mock_pterodactyl_api, sample_server_data):
"""Test retrieving server list."""
"""
Test retrieving server list from API.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
sample_server_data: Pytest fixture providing sample server data
"""
mock_pterodactyl_api._request = AsyncMock(return_value={
'data': [sample_server_data]
})
@@ -248,7 +349,13 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_get_server_resources(self, mock_pterodactyl_api, sample_resources_data):
"""Test retrieving server resource usage."""
"""
Test retrieving server resource usage from API.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
sample_resources_data: Pytest fixture providing sample resource data
"""
mock_pterodactyl_api._request = AsyncMock(return_value=sample_resources_data)
resources = await mock_pterodactyl_api.get_server_resources('abc123')
@@ -260,7 +367,12 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_send_power_action_valid(self, mock_pterodactyl_api):
"""Test sending valid power action."""
"""
Test sending valid power action to server.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
"""
mock_pterodactyl_api._request = AsyncMock(return_value={'status': 'success'})
result = await mock_pterodactyl_api.send_power_action('abc123', 'start')
@@ -272,7 +384,12 @@ class TestPterodactylAPI:
@pytest.mark.asyncio
async def test_send_power_action_invalid(self, mock_pterodactyl_api):
"""Test sending invalid power action."""
"""
Test sending invalid power action returns error.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
"""
result = await mock_pterodactyl_api.send_power_action('abc123', 'invalid_action')
assert result['status'] == 'error'
@@ -287,7 +404,9 @@ class TestServerMetricsGraphs:
"""Test server metrics tracking and graphing."""
def test_initialization(self):
"""Test metrics graph initialization."""
"""
Test metrics graph initialization with empty state.
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
assert graphs.server_id == 'abc123'
@@ -296,7 +415,9 @@ class TestServerMetricsGraphs:
assert graphs.has_sufficient_data is False
def test_add_data_point(self):
"""Test adding data points."""
"""
Test adding data points and checking sufficient data threshold.
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
graphs.add_data_point(50.0, 1024.0)
@@ -310,10 +431,12 @@ class TestServerMetricsGraphs:
assert graphs.has_sufficient_data is True
def test_data_rotation(self):
"""Test automatic data point rotation (FIFO with maxlen=6)."""
"""
Test automatic data point rotation (FIFO with maxlen=6).
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
# Add 8 data points
# Add 8 data points to test rotation
for i in range(8):
graphs.add_data_point(float(i * 10), float(i * 100))
@@ -323,7 +446,9 @@ class TestServerMetricsGraphs:
assert graphs.data_points[-1][1] == 70.0 # CPU of 8th point
def test_cpu_scale_calculation(self):
"""Test dynamic CPU scale limit calculation."""
"""
Test dynamic CPU scale limit calculation for multi-vCPU servers.
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
# Test single vCPU (<=100%)
@@ -336,10 +461,12 @@ class TestServerMetricsGraphs:
assert graphs._calculate_cpu_scale_limit(350.0) == 400
def test_get_data_summary(self):
"""Test data summary generation."""
"""
Test data summary generation including trends.
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
# No data
# No data case
summary = graphs.get_data_summary()
assert summary['point_count'] == 0
assert summary['has_data'] is False
@@ -356,7 +483,9 @@ class TestServerMetricsGraphs:
assert summary['cpu_trend'] == 'increasing'
def test_generate_graph_insufficient_data(self):
"""Test graph generation with insufficient data."""
"""
Test graph generation returns None with insufficient data.
"""
graphs = ServerMetricsGraphs('abc123', 'Test Server')
# Only one data point - should return None
@@ -371,12 +500,16 @@ class TestServerMetricsManager:
"""Test server metrics manager."""
def test_initialization(self):
"""Test manager initialization."""
"""
Test manager initialization with empty state.
"""
manager = ServerMetricsManager()
assert len(manager.server_graphs) == 0
def test_get_or_create_server_graphs(self):
"""Test getting or creating server graphs."""
"""
Test getting or creating server graphs returns same instance.
"""
manager = ServerMetricsManager()
graphs1 = manager.get_or_create_server_graphs('abc123', 'Test Server')
@@ -386,7 +519,9 @@ class TestServerMetricsManager:
assert len(manager.server_graphs) == 1
def test_add_server_data(self):
"""Test adding data through manager."""
"""
Test adding data through manager properly creates graphs.
"""
manager = ServerMetricsManager()
manager.add_server_data('abc123', 'Test Server', 50.0, 1024.0)
@@ -396,7 +531,9 @@ class TestServerMetricsManager:
assert len(graphs.data_points) == 1
def test_remove_server(self):
"""Test removing server from tracking."""
"""
Test removing server from tracking.
"""
manager = ServerMetricsManager()
manager.add_server_data('abc123', 'Test Server', 50.0, 1024.0)
@@ -406,7 +543,9 @@ class TestServerMetricsManager:
assert 'abc123' not in manager.server_graphs
def test_cleanup_old_servers(self):
"""Test cleanup of inactive servers."""
"""
Test cleanup of inactive servers not in active list.
"""
manager = ServerMetricsManager()
# Add data for 3 servers
@@ -422,7 +561,9 @@ class TestServerMetricsManager:
assert 'server3' not in manager.server_graphs
def test_get_summary(self):
"""Test getting manager summary."""
"""
Test getting manager summary with statistics.
"""
manager = ServerMetricsManager()
# Add some servers with varying data
@@ -445,7 +586,13 @@ class TestServerStatusView:
@pytest.mark.asyncio
async def test_view_initialization(self, mock_pterodactyl_api, sample_server_data):
"""Test view initialization."""
"""
Test view initialization with server data.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
sample_server_data: Pytest fixture providing sample server data
"""
view = ServerStatusView(
'abc123',
'Test Server',
@@ -460,7 +607,14 @@ class TestServerStatusView:
@pytest.mark.asyncio
async def test_interaction_check_authorized(self, mock_pterodactyl_api,
sample_server_data, mock_discord_interaction):
"""Test interaction check with authorized user."""
"""
Test interaction check with authorized user having required role.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
sample_server_data: Pytest fixture providing sample server data
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
"""
view = ServerStatusView('abc123', 'Test Server',
mock_pterodactyl_api, sample_server_data)
@@ -471,7 +625,14 @@ class TestServerStatusView:
@pytest.mark.asyncio
async def test_interaction_check_wrong_guild(self, mock_pterodactyl_api,
sample_server_data, mock_discord_interaction):
"""Test interaction check with wrong guild."""
"""
Test interaction check rejects wrong guild.
Args:
mock_pterodactyl_api: Pytest fixture providing mocked API instance
sample_server_data: Pytest fixture providing sample server data
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
"""
view = ServerStatusView('abc123', 'Test Server',
mock_pterodactyl_api, sample_server_data)
@@ -488,7 +649,9 @@ class TestPterodactylBot:
@pytest.mark.asyncio
async def test_bot_initialization(self):
"""Test bot initialization."""
"""
Test bot initialization with default values.
"""
intents = discord.Intents.default()
bot = PterodactylBot(command_prefix="!", intents=intents)
@@ -498,7 +661,9 @@ class TestPterodactylBot:
@pytest.mark.asyncio
async def test_track_new_embed(self):
"""Test tracking new embed location."""
"""
Test tracking new embed location in storage.
"""
intents = discord.Intents.default()
bot = PterodactylBot(command_prefix="!", intents=intents)
@@ -516,7 +681,12 @@ class TestPterodactylBot:
@pytest.mark.asyncio
async def test_load_embed_locations(self, tmp_path):
"""Test loading embed locations from file."""
"""
Test loading embed locations from JSON file.
Args:
tmp_path: Pytest fixture providing temporary directory
"""
intents = discord.Intents.default()
bot = PterodactylBot(command_prefix="!", intents=intents)
@@ -538,7 +708,12 @@ class TestPterodactylBot:
@pytest.mark.asyncio
async def test_save_embed_locations(self, tmp_path):
"""Test saving embed locations to file."""
"""
Test saving embed locations to JSON file.
Args:
tmp_path: Pytest fixture providing temporary directory
"""
intents = discord.Intents.default()
bot = PterodactylBot(command_prefix="!", intents=intents)
@@ -568,8 +743,15 @@ class TestIntegration:
@pytest.mark.asyncio
async def test_server_status_command_flow(self, mock_discord_interaction,
sample_server_data, sample_resources_data):
"""Test complete server status command flow."""
sample_server_data, sample_resources_data):
"""
Test complete server status command flow.
Args:
mock_discord_interaction: Pytest fixture providing mocked Discord interaction
sample_server_data: Pytest fixture providing sample server data
sample_resources_data: Pytest fixture providing sample resource data
"""
# This would require extensive mocking of Discord.py internals
# Simplified test to verify command registration
@@ -581,7 +763,9 @@ class TestIntegration:
@pytest.mark.asyncio
async def test_metrics_collection_and_graphing(self):
"""Test complete metrics collection and graph generation flow."""
"""
Test complete metrics collection and graph generation flow.
"""
manager = ServerMetricsManager()
# Simulate data collection over time