dry run migration

This commit is contained in:
2026-02-27 20:48:18 +03:00
parent 149d230426
commit 8fa951fc93
16 changed files with 12141 additions and 2051 deletions

View File

@@ -0,0 +1,62 @@
# [DEF:backend.tests.core.migration.test_archive_parser:Module]
#
# @TIER: STANDARD
# @PURPOSE: Unit tests for MigrationArchiveParser ZIP extraction contract.
# @LAYER: Domain
# @RELATION: VERIFIES -> backend.src.core.migration.archive_parser
#
import os
import sys
import tempfile
import zipfile
from pathlib import Path
import yaml
backend_dir = str(Path(__file__).parent.parent.parent.parent.resolve())
if backend_dir not in sys.path:
sys.path.insert(0, backend_dir)
from src.core.migration.archive_parser import MigrationArchiveParser
def test_extract_objects_from_zip_collects_all_types():
parser = MigrationArchiveParser()
with tempfile.TemporaryDirectory() as td:
td_path = Path(td)
zip_path = td_path / "objects.zip"
src_dir = td_path / "src"
(src_dir / "dashboards").mkdir(parents=True)
(src_dir / "charts").mkdir(parents=True)
(src_dir / "datasets").mkdir(parents=True)
with open(src_dir / "dashboards" / "dash.yaml", "w") as file_obj:
yaml.dump({"uuid": "dash-u1", "dashboard_title": "D1", "json_metadata": "{}"}, file_obj)
with open(src_dir / "charts" / "chart.yaml", "w") as file_obj:
yaml.dump({"uuid": "chart-u1", "slice_name": "C1", "viz_type": "bar"}, file_obj)
with open(src_dir / "datasets" / "dataset.yaml", "w") as file_obj:
yaml.dump({"uuid": "ds-u1", "table_name": "orders", "database_uuid": "db-u1"}, file_obj)
with zipfile.ZipFile(zip_path, "w") as zip_obj:
for root, _, files in os.walk(src_dir):
for file_name in files:
file_path = Path(root) / file_name
zip_obj.write(file_path, file_path.relative_to(src_dir))
extracted = parser.extract_objects_from_zip(str(zip_path))
if len(extracted["dashboards"]) != 1:
raise AssertionError("dashboards extraction size mismatch")
if extracted["dashboards"][0]["uuid"] != "dash-u1":
raise AssertionError("dashboard uuid mismatch")
if len(extracted["charts"]) != 1:
raise AssertionError("charts extraction size mismatch")
if extracted["charts"][0]["uuid"] != "chart-u1":
raise AssertionError("chart uuid mismatch")
if len(extracted["datasets"]) != 1:
raise AssertionError("datasets extraction size mismatch")
if extracted["datasets"][0]["uuid"] != "ds-u1":
raise AssertionError("dataset uuid mismatch")
# [/DEF:backend.tests.core.migration.test_archive_parser:Module]

View File

@@ -0,0 +1,110 @@
# [DEF:backend.tests.core.migration.test_dry_run_orchestrator:Module]
#
# @TIER: STANDARD
# @PURPOSE: Unit tests for MigrationDryRunService diff and risk computation contracts.
# @LAYER: Domain
# @RELATION: VERIFIES -> backend.src.core.migration.dry_run_orchestrator
#
import json
import sys
from pathlib import Path
from unittest.mock import MagicMock, patch
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import StaticPool
backend_dir = str(Path(__file__).parent.parent.parent.parent.resolve())
if backend_dir not in sys.path:
sys.path.insert(0, backend_dir)
from src.core.migration.dry_run_orchestrator import MigrationDryRunService
from src.models.dashboard import DashboardSelection
from src.models.mapping import Base
def _load_fixture() -> dict:
fixture_path = Path(__file__).parents[2] / "fixtures" / "migration_dry_run_fixture.json"
return json.loads(fixture_path.read_text())
def _make_session():
engine = create_engine(
"sqlite:///:memory:",
connect_args={"check_same_thread": False},
poolclass=StaticPool,
)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
return Session()
def test_migration_dry_run_service_builds_diff_and_risk():
# @TEST_CONTRACT: dry_run_result_contract -> {
# required_fields: {diff: object, summary: object, risk: object},
# invariants: ["risk.score >= 0", "summary.selected_dashboards == len(selection.selected_ids)"]
# }
# @TEST_FIXTURE: migration_dry_run_fixture -> backend/tests/fixtures/migration_dry_run_fixture.json
# @TEST_EDGE: missing_target_datasource -> fixture.transformed_zip_objects.datasets[0].database_uuid
# @TEST_EDGE: breaking_reference -> fixture.transformed_zip_objects.charts[0].dataset_uuid
fixture = _load_fixture()
db = _make_session()
selection = DashboardSelection(
selected_ids=[42],
source_env_id="src",
target_env_id="tgt",
replace_db_config=False,
fix_cross_filters=True,
)
source_client = MagicMock()
source_client.get_dashboards_summary.return_value = fixture["source_dashboard_summary"]
source_client.export_dashboard.return_value = (b"PK\x03\x04", "source.zip")
target_client = MagicMock()
target_client.get_dashboards.return_value = (
len(fixture["target"]["dashboards"]),
fixture["target"]["dashboards"],
)
target_client.get_datasets.return_value = (
len(fixture["target"]["datasets"]),
fixture["target"]["datasets"],
)
target_client.get_charts.return_value = (
len(fixture["target"]["charts"]),
fixture["target"]["charts"],
)
target_client.get_databases.return_value = (
len(fixture["target"]["databases"]),
fixture["target"]["databases"],
)
parser = MagicMock()
parser.extract_objects_from_zip.return_value = fixture["transformed_zip_objects"]
service = MigrationDryRunService(parser=parser)
with patch("src.core.migration.dry_run_orchestrator.MigrationEngine") as EngineMock:
engine = MagicMock()
engine.transform_zip.return_value = True
EngineMock.return_value = engine
result = service.run(selection, source_client, target_client, db)
if "summary" not in result:
raise AssertionError("summary is missing in dry-run payload")
if result["summary"]["selected_dashboards"] != 1:
raise AssertionError("selected_dashboards summary mismatch")
if result["summary"]["dashboards"]["update"] != 1:
raise AssertionError("dashboard update count mismatch")
if result["summary"]["charts"]["create"] != 1:
raise AssertionError("chart create count mismatch")
if result["summary"]["datasets"]["create"] != 1:
raise AssertionError("dataset create count mismatch")
risk_codes = {item["code"] for item in result["risk"]["items"]}
if "missing_datasource" not in risk_codes:
raise AssertionError("missing_datasource risk is not detected")
if "breaking_reference" not in risk_codes:
raise AssertionError("breaking_reference risk is not detected")
# [/DEF:backend.tests.core.migration.test_dry_run_orchestrator:Module]

View File

@@ -0,0 +1,58 @@
{
"source_dashboard_summary": [
{
"id": 42,
"title": "Sales"
}
],
"target": {
"dashboards": [
{
"uuid": "dash-1",
"dashboard_title": "Sales Old",
"slug": "sales-old",
"position_json": "{}",
"json_metadata": "{}",
"description": "",
"owners": [
{
"username": "owner-a"
}
]
}
],
"datasets": [],
"charts": [],
"databases": []
},
"transformed_zip_objects": {
"dashboards": [
{
"uuid": "dash-1",
"title": "Sales New",
"signature": "{\"title\":\"Sales New\"}",
"owners": [
{
"username": "owner-b"
}
]
}
],
"charts": [
{
"uuid": "chart-1",
"title": "Chart A",
"signature": "{\"title\":\"Chart A\"}",
"dataset_uuid": "dataset-404"
}
],
"datasets": [
{
"uuid": "dataset-1",
"title": "orders",
"signature": "{\"title\":\"orders\"}",
"database_uuid": "db-missing"
}
]
}
}