sync worked

This commit is contained in:
2026-02-25 15:20:26 +03:00
parent 2a5b225800
commit 590ba49ddb
6 changed files with 319 additions and 61 deletions

View File

@@ -37,8 +37,11 @@ def db_session():
def _make_config_manager(cron="0 2 * * *"):
"""Creates a mock config manager with settable config."""
config = {"migration_sync_cron": cron}
"""Creates a mock config manager with a realistic AppConfig-like object."""
settings = MagicMock()
settings.migration_sync_cron = cron
config = MagicMock()
config.settings = settings
cm = MagicMock()
cm.get_config.return_value = config
cm.save_config = MagicMock()
@@ -63,11 +66,11 @@ async def test_get_migration_settings_returns_default_cron():
@pytest.mark.asyncio
async def test_get_migration_settings_returns_fallback_when_no_cron():
"""When migration_sync_cron is not in config, should return default '0 2 * * *'."""
"""When migration_sync_cron uses the default, should return '0 2 * * *'."""
from src.api.routes.migration import get_migration_settings
cm = MagicMock()
cm.get_config.return_value = {} # No cron key
# Use the default cron value (simulating a fresh config)
cm = _make_config_manager()
result = await get_migration_settings(config_manager=cm, _=None)
@@ -161,4 +164,123 @@ async def test_get_resource_mappings_respects_pagination(db_session):
assert len(result) == 2
# --- trigger_sync_now tests ---
@pytest.fixture
def _mock_env():
"""Creates a mock config environment object."""
env = MagicMock()
env.id = "test-env-1"
env.name = "Test Env"
env.url = "http://superset.test"
env.username = "admin"
env.password = "admin"
env.verify_ssl = False
env.timeout = 30
return env
def _make_sync_config_manager(environments):
"""Creates a mock config manager with environments list."""
settings = MagicMock()
settings.migration_sync_cron = "0 2 * * *"
config = MagicMock()
config.settings = settings
config.environments = environments
cm = MagicMock()
cm.get_config.return_value = config
return cm
@pytest.mark.asyncio
async def test_trigger_sync_now_creates_env_row_and_syncs(db_session, _mock_env):
"""Verify that trigger_sync_now creates an Environment row in DB before syncing,
preventing FK constraint violations on resource_mappings inserts."""
from src.api.routes.migration import trigger_sync_now
from src.models.mapping import Environment as EnvironmentModel
cm = _make_sync_config_manager([_mock_env])
with patch("src.api.routes.migration.SupersetClient") as MockClient, \
patch("src.api.routes.migration.IdMappingService") as MockService:
mock_client_instance = MagicMock()
MockClient.return_value = mock_client_instance
mock_service_instance = MagicMock()
MockService.return_value = mock_service_instance
result = await trigger_sync_now(config_manager=cm, db=db_session, _=None)
# Environment row must exist in DB
env_row = db_session.query(EnvironmentModel).filter_by(id="test-env-1").first()
assert env_row is not None
assert env_row.name == "Test Env"
assert env_row.url == "http://superset.test"
# Sync must have been called
mock_service_instance.sync_environment.assert_called_once_with("test-env-1", mock_client_instance)
assert result["synced_count"] == 1
assert result["failed_count"] == 0
@pytest.mark.asyncio
async def test_trigger_sync_now_rejects_empty_environments(db_session):
"""Verify 400 error when no environments are configured."""
from src.api.routes.migration import trigger_sync_now
cm = _make_sync_config_manager([])
with pytest.raises(HTTPException) as exc_info:
await trigger_sync_now(config_manager=cm, db=db_session, _=None)
assert exc_info.value.status_code == 400
assert "No environments" in exc_info.value.detail
@pytest.mark.asyncio
async def test_trigger_sync_now_handles_partial_failure(db_session, _mock_env):
"""Verify that if sync_environment raises for one env, it's captured in failed list."""
from src.api.routes.migration import trigger_sync_now
env2 = MagicMock()
env2.id = "test-env-2"
env2.name = "Failing Env"
env2.url = "http://fail.test"
env2.username = "admin"
env2.password = "admin"
env2.verify_ssl = False
env2.timeout = 30
cm = _make_sync_config_manager([_mock_env, env2])
with patch("src.api.routes.migration.SupersetClient") as MockClient, \
patch("src.api.routes.migration.IdMappingService") as MockService:
mock_service_instance = MagicMock()
mock_service_instance.sync_environment.side_effect = [None, RuntimeError("Connection refused")]
MockService.return_value = mock_service_instance
MockClient.return_value = MagicMock()
result = await trigger_sync_now(config_manager=cm, db=db_session, _=None)
assert result["synced_count"] == 1
assert result["failed_count"] == 1
assert result["details"]["failed"][0]["env_id"] == "test-env-2"
@pytest.mark.asyncio
async def test_trigger_sync_now_idempotent_env_upsert(db_session, _mock_env):
"""Verify that calling sync twice doesn't duplicate the Environment row."""
from src.api.routes.migration import trigger_sync_now
from src.models.mapping import Environment as EnvironmentModel
cm = _make_sync_config_manager([_mock_env])
with patch("src.api.routes.migration.SupersetClient"), \
patch("src.api.routes.migration.IdMappingService"):
await trigger_sync_now(config_manager=cm, db=db_session, _=None)
await trigger_sync_now(config_manager=cm, db=db_session, _=None)
env_count = db_session.query(EnvironmentModel).filter_by(id="test-env-1").count()
assert env_count == 1
# [/DEF:backend.src.api.routes.__tests__.test_migration_routes:Module]

View File

@@ -48,7 +48,7 @@ async def get_dashboards(
# @POST: Starts the migration task and returns the task ID.
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
# @RETURN: Dict - {"task_id": str, "message": str}
@router.post("/execute")
@router.post("/migration/execute")
async def execute_migration(
selection: DashboardSelection,
config_manager=Depends(get_config_manager),
@@ -85,22 +85,20 @@ async def execute_migration(
# [DEF:get_migration_settings:Function]
# @PURPOSE: Get current migration Cron string explicitly.
@router.get("/settings", response_model=Dict[str, str])
@router.get("/migration/settings", response_model=Dict[str, str])
async def get_migration_settings(
config_manager=Depends(get_config_manager),
_ = Depends(has_permission("plugin:migration", "READ"))
):
with belief_scope("get_migration_settings"):
# For simplicity in MVP, assuming cron expression is stored in config
# default to a valid cron if not set.
config = config_manager.get_config()
cron = config.get("migration_sync_cron", "0 2 * * *")
cron = config.settings.migration_sync_cron
return {"cron": cron}
# [/DEF:get_migration_settings:Function]
# [DEF:update_migration_settings:Function]
# @PURPOSE: Update migration Cron string.
@router.put("/settings", response_model=Dict[str, str])
@router.put("/migration/settings", response_model=Dict[str, str])
async def update_migration_settings(
payload: Dict[str, str],
config_manager=Depends(get_config_manager),
@@ -111,20 +109,17 @@ async def update_migration_settings(
raise HTTPException(status_code=400, detail="Missing 'cron' field in payload")
cron_expr = payload["cron"]
# Basic validation could go here
# In a real system, you'd save this to config and restart the scheduler.
# Here we just blindly patch the in-memory or file config for the MVP.
current_cfg = config_manager.get_config()
current_cfg["migration_sync_cron"] = cron_expr
config_manager.save_config(current_cfg)
config = config_manager.get_config()
config.settings.migration_sync_cron = cron_expr
config_manager.save_config(config)
return {"cron": cron_expr, "status": "updated"}
# [/DEF:update_migration_settings:Function]
# [DEF:get_resource_mappings:Function]
# @PURPOSE: Fetch all synchronized object mappings from the database.
@router.get("/mappings-data", response_model=List[Dict[str, Any]])
@router.get("/migration/mappings-data", response_model=List[Dict[str, Any]])
async def get_resource_mappings(
skip: int = Query(0, ge=0),
limit: int = Query(100, ge=1, le=1000),
@@ -147,4 +142,62 @@ async def get_resource_mappings(
return result
# [/DEF:get_resource_mappings:Function]
# [DEF:trigger_sync_now:Function]
# @PURPOSE: Triggers an immediate ID synchronization for all environments.
# @PRE: At least one environment must be configured.
# @POST: Environment rows are ensured in DB; sync_environment is called for each.
@router.post("/migration/sync-now", response_model=Dict[str, Any])
async def trigger_sync_now(
config_manager=Depends(get_config_manager),
db: Session = Depends(get_db),
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
):
with belief_scope("trigger_sync_now"):
from ...core.logger import logger
from ...models.mapping import Environment as EnvironmentModel
config = config_manager.get_config()
environments = config.environments
if not environments:
raise HTTPException(status_code=400, detail="No environments configured")
# Ensure each environment exists in DB (upsert) to satisfy FK constraints
for env in environments:
existing = db.query(EnvironmentModel).filter_by(id=env.id).first()
if not existing:
db_env = EnvironmentModel(
id=env.id,
name=env.name,
url=env.url,
credentials_id=env.id, # Use env.id as credentials reference
)
db.add(db_env)
logger.info(f"[trigger_sync_now][Action] Created environment row for {env.id}")
else:
existing.name = env.name
existing.url = env.url
db.commit()
service = IdMappingService(db)
results = {"synced": [], "failed": []}
for env in environments:
try:
client = SupersetClient(env)
service.sync_environment(env.id, client)
results["synced"].append(env.id)
logger.info(f"[trigger_sync_now][Action] Synced environment {env.id}")
except Exception as e:
results["failed"].append({"env_id": env.id, "error": str(e)})
logger.error(f"[trigger_sync_now][Error] Failed to sync {env.id}: {e}")
return {
"status": "completed",
"synced_count": len(results["synced"]),
"failed_count": len(results["failed"]),
"details": results
}
# [/DEF:trigger_sync_now:Function]
# [/DEF:backend.src.api.routes.migration:Module]