feat: Enhance ID mapping service robustness, add defensive guards, and expand migration engine and API testing.

This commit is contained in:
2026-02-25 14:44:21 +03:00
parent 33433c3173
commit 2a5b225800
11 changed files with 640 additions and 32 deletions

View File

@@ -0,0 +1,164 @@
# [DEF:backend.src.api.routes.__tests__.test_migration_routes:Module]
#
# @TIER: STANDARD
# @PURPOSE: Unit tests for migration API route handlers.
# @LAYER: API
# @RELATION: VERIFIES -> backend.src.api.routes.migration
#
import pytest
import sys
from pathlib import Path
from unittest.mock import MagicMock, AsyncMock, patch
from datetime import datetime, timezone
# Add backend directory to sys.path
backend_dir = str(Path(__file__).parent.parent.parent.parent.resolve())
if backend_dir not in sys.path:
sys.path.insert(0, backend_dir)
from fastapi import HTTPException
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from src.models.mapping import Base, ResourceMapping, ResourceType
# --- Fixtures ---
@pytest.fixture
def db_session():
"""In-memory SQLite session for testing."""
engine = create_engine('sqlite:///:memory:')
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
yield session
session.close()
def _make_config_manager(cron="0 2 * * *"):
"""Creates a mock config manager with settable config."""
config = {"migration_sync_cron": cron}
cm = MagicMock()
cm.get_config.return_value = config
cm.save_config = MagicMock()
return cm
# --- get_migration_settings tests ---
@pytest.mark.asyncio
async def test_get_migration_settings_returns_default_cron():
"""Verify the settings endpoint returns the stored cron string."""
from src.api.routes.migration import get_migration_settings
cm = _make_config_manager(cron="0 3 * * *")
# Call the handler directly, bypassing Depends
result = await get_migration_settings(config_manager=cm, _=None)
assert result == {"cron": "0 3 * * *"}
cm.get_config.assert_called_once()
@pytest.mark.asyncio
async def test_get_migration_settings_returns_fallback_when_no_cron():
"""When migration_sync_cron is not in config, should return default '0 2 * * *'."""
from src.api.routes.migration import get_migration_settings
cm = MagicMock()
cm.get_config.return_value = {} # No cron key
result = await get_migration_settings(config_manager=cm, _=None)
assert result == {"cron": "0 2 * * *"}
# --- update_migration_settings tests ---
@pytest.mark.asyncio
async def test_update_migration_settings_saves_cron():
"""Verify that a valid cron update saves to config."""
from src.api.routes.migration import update_migration_settings
cm = _make_config_manager()
result = await update_migration_settings(
payload={"cron": "0 4 * * *"},
config_manager=cm,
_=None
)
assert result["cron"] == "0 4 * * *"
assert result["status"] == "updated"
cm.save_config.assert_called_once()
@pytest.mark.asyncio
async def test_update_migration_settings_rejects_missing_cron():
"""Verify 400 error when 'cron' key is missing from payload."""
from src.api.routes.migration import update_migration_settings
cm = _make_config_manager()
with pytest.raises(HTTPException) as exc_info:
await update_migration_settings(
payload={"interval": "daily"},
config_manager=cm,
_=None
)
assert exc_info.value.status_code == 400
assert "cron" in exc_info.value.detail.lower()
# --- get_resource_mappings tests ---
@pytest.mark.asyncio
async def test_get_resource_mappings_returns_formatted_list(db_session):
"""Verify mappings are returned as formatted dicts with correct keys."""
from src.api.routes.migration import get_resource_mappings
# Populate test data
m1 = ResourceMapping(
environment_id="prod",
resource_type=ResourceType.CHART,
uuid="uuid-1",
remote_integer_id="42",
resource_name="Sales Chart",
last_synced_at=datetime(2026, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
)
db_session.add(m1)
db_session.commit()
result = await get_resource_mappings(skip=0, limit=100, db=db_session, _=None)
assert len(result) == 1
assert result[0]["environment_id"] == "prod"
assert result[0]["resource_type"] == "chart"
assert result[0]["uuid"] == "uuid-1"
assert result[0]["remote_id"] == "42"
assert result[0]["resource_name"] == "Sales Chart"
assert result[0]["last_synced_at"] is not None
@pytest.mark.asyncio
async def test_get_resource_mappings_respects_pagination(db_session):
"""Verify skip and limit parameters work correctly."""
from src.api.routes.migration import get_resource_mappings
for i in range(5):
db_session.add(ResourceMapping(
environment_id="prod",
resource_type=ResourceType.DATASET,
uuid=f"uuid-{i}",
remote_integer_id=str(i),
))
db_session.commit()
result = await get_resource_mappings(skip=2, limit=2, db=db_session, _=None)
assert len(result) == 2
# [/DEF:backend.src.api.routes.__tests__.test_migration_routes:Module]

View File

@@ -101,11 +101,13 @@ class IdMappingService:
for res in resources:
res_uuid = res.get("uuid")
res_id = str(res.get("id")) # Store as string
raw_id = res.get("id")
res_name = res.get(name_field)
if not res_uuid or not res_id:
if not res_uuid or raw_id is None:
continue
res_id = str(raw_id) # Store as string
# Upsert Logic
mapping = self.db.query(ResourceMapping).filter_by(

View File

@@ -400,6 +400,8 @@ class SupersetClient:
# @RETURN: Dict - Ответ API в случае успеха.
def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict:
with belief_scope("import_dashboard"):
if file_name is None:
raise ValueError("file_name cannot be None")
file_path = str(file_name)
self._validate_import_file(file_path)
try:

View File

@@ -51,6 +51,8 @@ class GitService:
# @RETURN: str
def _get_repo_path(self, dashboard_id: int) -> str:
with belief_scope("GitService._get_repo_path"):
if dashboard_id is None:
raise ValueError("dashboard_id cannot be None")
return os.path.join(self.base_path, str(dashboard_id))
# [/DEF:_get_repo_path:Function]