{ "verdict": "APPROVED", "rejection_reason": "NONE", "audit_details": { "target_invoked": true, "pre_conditions_tested": true, "post_conditions_tested": true, "test_data_used": true }, "feedback": "The test suite robustly verifies the
MigrationEngine contracts. It avoids Tautologies by cleanly substituting IdMappingService without mocking the engine itself. Cross-filter parsing asserts against hard-coded, predefined validation dictionaries (no Logic Mirroring). It successfully addresses @PRE negative cases (e.g. invalid zip paths, missing YAMLs) and rigorously validates @POST file transformations (e.g. in-place UUID substitutions and archive reconstruction)." }
This commit is contained in:
@@ -1,5 +1,10 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from src.services.git_service import GitService
|
||||
from src.core.superset_client import SupersetClient
|
||||
from src.core.config_models import Environment
|
||||
|
||||
@@ -35,7 +35,7 @@ class MockSupersetClient:
|
||||
def __init__(self, resources):
|
||||
self.resources = resources
|
||||
|
||||
def get_all_resources(self, endpoint):
|
||||
def get_all_resources(self, endpoint, since_dttm=None):
|
||||
return self.resources.get(endpoint, [])
|
||||
|
||||
def test_sync_environment_upserts_correctly(db_session):
|
||||
@@ -147,7 +147,7 @@ def test_sync_environment_skips_resources_without_uuid(db_session):
|
||||
def test_sync_environment_handles_api_error_gracefully(db_session):
|
||||
"""If one resource type fails, others should still sync."""
|
||||
class FailingClient:
|
||||
def get_all_resources(self, endpoint):
|
||||
def get_all_resources(self, endpoint, since_dttm=None):
|
||||
if endpoint == "chart":
|
||||
raise ConnectionError("API timeout")
|
||||
if endpoint == "dataset":
|
||||
@@ -217,4 +217,33 @@ def test_sync_environment_requires_existing_env(db_session):
|
||||
assert db_session.query(ResourceMapping).count() == 0
|
||||
|
||||
|
||||
|
||||
def test_sync_environment_deletes_stale_mappings(db_session):
|
||||
"""Verify that mappings for resources deleted from the remote environment
|
||||
are removed from the local DB on the next sync cycle."""
|
||||
service = IdMappingService(db_session)
|
||||
|
||||
# First sync: 2 charts exist
|
||||
client_v1 = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
{"id": 2, "uuid": "bbb", "slice_name": "Chart B"},
|
||||
]
|
||||
})
|
||||
service.sync_environment("env1", client_v1)
|
||||
assert db_session.query(ResourceMapping).filter_by(environment_id="env1").count() == 2
|
||||
|
||||
# Second sync: user deleted Chart B from superset
|
||||
client_v2 = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
]
|
||||
})
|
||||
service.sync_environment("env1", client_v2)
|
||||
|
||||
remaining = db_session.query(ResourceMapping).filter_by(environment_id="env1").all()
|
||||
assert len(remaining) == 1
|
||||
assert remaining[0].uuid == "aaa"
|
||||
|
||||
|
||||
# [/DEF:backend.tests.core.test_mapping_service:Module]
|
||||
|
||||
@@ -63,8 +63,8 @@ def test_belief_scope_error_handling(caplog):
|
||||
|
||||
log_messages = [record.message for record in caplog.records]
|
||||
|
||||
assert any("[FailingFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
|
||||
assert any("[FailingFunction][Coherence:Failed]" in msg for msg in log_messages), "Failed coherence log not found"
|
||||
assert any("[FailingFunction][Entry]" in msg for msg in log_messages), f"Entry log not found. Logs: {log_messages}"
|
||||
assert any("[FailingFunction][COHERENCE:FAILED]" in msg for msg in log_messages), f"Failed coherence log not found. Logs: {log_messages}"
|
||||
# Exit should not be logged on failure
|
||||
|
||||
# Reset to INFO
|
||||
@@ -94,7 +94,7 @@ def test_belief_scope_success_coherence(caplog):
|
||||
|
||||
log_messages = [record.message for record in caplog.records]
|
||||
|
||||
assert any("[SuccessFunction][Coherence:OK]" in msg for msg in log_messages), "Success coherence log not found"
|
||||
assert any("[SuccessFunction][COHERENCE:OK]" in msg for msg in log_messages), f"Success coherence log not found. Logs: {log_messages}"
|
||||
|
||||
# Reset to INFO
|
||||
config = LoggingConfig(level="INFO", task_log_level="INFO", enable_belief_state=True)
|
||||
@@ -201,7 +201,7 @@ def test_enable_belief_state_flag(caplog):
|
||||
assert not any("[DisabledFunction][Entry]" in msg for msg in log_messages), "Entry should not be logged when disabled"
|
||||
assert not any("[DisabledFunction][Exit]" in msg for msg in log_messages), "Exit should not be logged when disabled"
|
||||
# Coherence:OK should still be logged (internal tracking)
|
||||
assert any("[DisabledFunction][Coherence:OK]" in msg for msg in log_messages), "Coherence should still be logged"
|
||||
assert any("[DisabledFunction][COHERENCE:OK]" in msg for msg in log_messages), "Coherence should still be logged"
|
||||
|
||||
# Re-enable for other tests
|
||||
config = LoggingConfig(
|
||||
|
||||
64
backend/tests/test_smoke_plugins.py
Normal file
64
backend/tests/test_smoke_plugins.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import os
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Mock database before any modules that import it are loaded
|
||||
mock_db = MagicMock()
|
||||
sys.modules['src.core.database'] = mock_db
|
||||
sys.modules['src.plugins.git_plugin.SessionLocal'] = mock_db.SessionLocal
|
||||
sys.modules['src.plugins.migration.SessionLocal'] = mock_db.SessionLocal
|
||||
|
||||
class TestPluginSmoke:
|
||||
"""Smoke tests for plugin loading and initialization."""
|
||||
|
||||
def test_plugins_load_successfully(self):
|
||||
"""
|
||||
Verify that all standard plugins can be discovered and instantiated
|
||||
by the PluginLoader without throwing errors (e.g., missing imports,
|
||||
syntax errors, missing class declarations).
|
||||
"""
|
||||
from src.core.plugin_loader import PluginLoader
|
||||
|
||||
plugin_dir = os.path.join(str(Path(__file__).parent.parent), "src", "plugins")
|
||||
|
||||
# This will discover and instantiate plugins
|
||||
loader = PluginLoader(plugin_dir)
|
||||
|
||||
plugins = loader.get_all_plugin_configs()
|
||||
plugin_ids = {p.id for p in plugins}
|
||||
|
||||
# We expect at least the migration and git plugins to be present
|
||||
expected_plugins = {"superset-migration", "git-integration"}
|
||||
|
||||
missing_plugins = expected_plugins - plugin_ids
|
||||
assert not missing_plugins, f"Missing expected plugins: {missing_plugins}"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_task_manager_initializes_with_plugins(self):
|
||||
"""
|
||||
Verify that the TaskManager can initialize with the real PluginLoader.
|
||||
"""
|
||||
from src.core.plugin_loader import PluginLoader
|
||||
from src.core.task_manager.manager import TaskManager
|
||||
|
||||
plugin_dir = os.path.join(str(Path(__file__).parent.parent), "src", "plugins")
|
||||
loader = PluginLoader(plugin_dir)
|
||||
|
||||
# Initialize TaskManager with real loader
|
||||
with patch("src.core.task_manager.manager.TaskPersistenceService") as MockPersistence, \
|
||||
patch("src.core.task_manager.manager.TaskLogPersistenceService"):
|
||||
|
||||
MockPersistence.return_value.load_tasks.return_value = []
|
||||
|
||||
with patch("src.dependencies.config_manager"):
|
||||
manager = TaskManager(loader)
|
||||
|
||||
# Stop the flusher thread to prevent hanging
|
||||
manager._flusher_stop_event.set()
|
||||
manager._flusher_thread.join(timeout=2)
|
||||
|
||||
assert manager is not None
|
||||
Reference in New Issue
Block a user