264 lines
11 KiB
Python
264 lines
11 KiB
Python
# [DEF:backend.src.api.routes.migration:Module]
|
|
# @TIER: STANDARD
|
|
# @SEMANTICS: api, migration, dashboards
|
|
# @PURPOSE: API endpoints for migration operations.
|
|
# @LAYER: API
|
|
# @RELATION: DEPENDS_ON -> backend.src.dependencies
|
|
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
from typing import List, Dict, Any, Optional
|
|
from sqlalchemy.orm import Session
|
|
from ...dependencies import get_config_manager, get_task_manager, has_permission
|
|
from ...core.database import get_db
|
|
from ...models.dashboard import DashboardMetadata, DashboardSelection
|
|
from ...core.superset_client import SupersetClient
|
|
from ...core.logger import belief_scope
|
|
from ...core.migration.dry_run_orchestrator import MigrationDryRunService
|
|
from ...core.mapping_service import IdMappingService
|
|
from ...models.mapping import ResourceMapping
|
|
|
|
router = APIRouter(prefix="/api", tags=["migration"])
|
|
|
|
# [DEF:get_dashboards:Function]
|
|
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
|
|
# @PRE: Environment ID must be valid.
|
|
# @POST: Returns a list of dashboard metadata.
|
|
# @PARAM: env_id (str) - The ID of the environment to fetch from.
|
|
# @RETURN: List[DashboardMetadata]
|
|
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
|
|
async def get_dashboards(
|
|
env_id: str,
|
|
config_manager=Depends(get_config_manager),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("get_dashboards", f"env_id={env_id}"):
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == env_id), None)
|
|
if not env:
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
client = SupersetClient(env)
|
|
dashboards = client.get_dashboards_summary()
|
|
return dashboards
|
|
# [/DEF:get_dashboards:Function]
|
|
|
|
# [DEF:execute_migration:Function]
|
|
# @PURPOSE: Execute the migration of selected dashboards.
|
|
# @PRE: Selection must be valid and environments must exist.
|
|
# @POST: Starts the migration task and returns the task ID.
|
|
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
|
|
# @RETURN: Dict - {"task_id": str, "message": str}
|
|
@router.post("/migration/execute")
|
|
async def execute_migration(
|
|
selection: DashboardSelection,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("execute_migration"):
|
|
# Validate environments exist
|
|
environments = config_manager.get_environments()
|
|
env_ids = {e.id for e in environments}
|
|
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
|
|
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
|
|
|
# Create migration task with debug logging
|
|
from ...core.logger import logger
|
|
|
|
# Include replace_db_config and fix_cross_filters in the task parameters
|
|
task_params = selection.dict()
|
|
task_params['replace_db_config'] = selection.replace_db_config
|
|
task_params['fix_cross_filters'] = selection.fix_cross_filters
|
|
|
|
logger.info(f"Creating migration task with params: {task_params}")
|
|
logger.info(f"Available environments: {env_ids}")
|
|
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
|
|
|
|
try:
|
|
task = await task_manager.create_task("superset-migration", task_params)
|
|
logger.info(f"Task created successfully: {task.id}")
|
|
return {"task_id": task.id, "message": "Migration initiated"}
|
|
except Exception as e:
|
|
logger.error(f"Task creation failed: {e}")
|
|
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
|
|
# [/DEF:execute_migration:Function]
|
|
|
|
|
|
# [DEF:dry_run_migration:Function]
|
|
# @PURPOSE: Build pre-flight diff and risk summary without applying migration.
|
|
# @PRE: Selection and environments are valid.
|
|
# @POST: Returns deterministic JSON diff and risk scoring.
|
|
@router.post("/migration/dry-run", response_model=Dict[str, Any])
|
|
async def dry_run_migration(
|
|
selection: DashboardSelection,
|
|
config_manager=Depends(get_config_manager),
|
|
db: Session = Depends(get_db),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("dry_run_migration"):
|
|
environments = config_manager.get_environments()
|
|
env_map = {env.id: env for env in environments}
|
|
source_env = env_map.get(selection.source_env_id)
|
|
target_env = env_map.get(selection.target_env_id)
|
|
if not source_env or not target_env:
|
|
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
|
if selection.source_env_id == selection.target_env_id:
|
|
raise HTTPException(status_code=400, detail="Source and target environments must be different")
|
|
if not selection.selected_ids:
|
|
raise HTTPException(status_code=400, detail="No dashboards selected for dry run")
|
|
|
|
service = MigrationDryRunService()
|
|
source_client = SupersetClient(source_env)
|
|
target_client = SupersetClient(target_env)
|
|
try:
|
|
return service.run(
|
|
selection=selection,
|
|
source_client=source_client,
|
|
target_client=target_client,
|
|
db=db,
|
|
)
|
|
except ValueError as exc:
|
|
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
# [/DEF:dry_run_migration:Function]
|
|
|
|
# [DEF:get_migration_settings:Function]
|
|
# @PURPOSE: Get current migration Cron string explicitly.
|
|
@router.get("/migration/settings", response_model=Dict[str, str])
|
|
async def get_migration_settings(
|
|
config_manager=Depends(get_config_manager),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_migration_settings"):
|
|
config = config_manager.get_config()
|
|
cron = config.settings.migration_sync_cron
|
|
return {"cron": cron}
|
|
# [/DEF:get_migration_settings:Function]
|
|
|
|
# [DEF:update_migration_settings:Function]
|
|
# @PURPOSE: Update migration Cron string.
|
|
@router.put("/migration/settings", response_model=Dict[str, str])
|
|
async def update_migration_settings(
|
|
payload: Dict[str, str],
|
|
config_manager=Depends(get_config_manager),
|
|
_ = Depends(has_permission("plugin:migration", "WRITE"))
|
|
):
|
|
with belief_scope("update_migration_settings"):
|
|
if "cron" not in payload:
|
|
raise HTTPException(status_code=400, detail="Missing 'cron' field in payload")
|
|
|
|
cron_expr = payload["cron"]
|
|
|
|
config = config_manager.get_config()
|
|
config.settings.migration_sync_cron = cron_expr
|
|
config_manager.save_config(config)
|
|
|
|
return {"cron": cron_expr, "status": "updated"}
|
|
# [/DEF:update_migration_settings:Function]
|
|
|
|
# [DEF:get_resource_mappings:Function]
|
|
# @PURPOSE: Fetch synchronized object mappings with search, filtering, and pagination.
|
|
@router.get("/migration/mappings-data", response_model=Dict[str, Any])
|
|
async def get_resource_mappings(
|
|
skip: int = Query(0, ge=0),
|
|
limit: int = Query(50, ge=1, le=500),
|
|
search: Optional[str] = Query(None, description="Search by resource name or UUID"),
|
|
env_id: Optional[str] = Query(None, description="Filter by environment ID"),
|
|
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
|
db: Session = Depends(get_db),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_resource_mappings"):
|
|
query = db.query(ResourceMapping)
|
|
|
|
if env_id:
|
|
query = query.filter(ResourceMapping.environment_id == env_id)
|
|
|
|
if resource_type:
|
|
query = query.filter(ResourceMapping.resource_type == resource_type.upper())
|
|
|
|
if search:
|
|
search_term = f"%{search}%"
|
|
query = query.filter(
|
|
(ResourceMapping.resource_name.ilike(search_term)) |
|
|
(ResourceMapping.uuid.ilike(search_term))
|
|
)
|
|
|
|
total = query.count()
|
|
mappings = query.order_by(ResourceMapping.resource_type, ResourceMapping.resource_name).offset(skip).limit(limit).all()
|
|
|
|
items = []
|
|
for m in mappings:
|
|
items.append({
|
|
"id": m.id,
|
|
"environment_id": m.environment_id,
|
|
"resource_type": m.resource_type.value if m.resource_type else None,
|
|
"uuid": m.uuid,
|
|
"remote_id": m.remote_integer_id,
|
|
"resource_name": m.resource_name,
|
|
"last_synced_at": m.last_synced_at.isoformat() if m.last_synced_at else None
|
|
})
|
|
|
|
return {"items": items, "total": total}
|
|
# [/DEF:get_resource_mappings:Function]
|
|
|
|
# [DEF:trigger_sync_now:Function]
|
|
# @PURPOSE: Triggers an immediate ID synchronization for all environments.
|
|
# @PRE: At least one environment must be configured.
|
|
# @POST: Environment rows are ensured in DB; sync_environment is called for each.
|
|
@router.post("/migration/sync-now", response_model=Dict[str, Any])
|
|
async def trigger_sync_now(
|
|
config_manager=Depends(get_config_manager),
|
|
db: Session = Depends(get_db),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("trigger_sync_now"):
|
|
from ...core.logger import logger
|
|
from ...models.mapping import Environment as EnvironmentModel
|
|
|
|
config = config_manager.get_config()
|
|
environments = config.environments
|
|
|
|
if not environments:
|
|
raise HTTPException(status_code=400, detail="No environments configured")
|
|
|
|
# Ensure each environment exists in DB (upsert) to satisfy FK constraints
|
|
for env in environments:
|
|
existing = db.query(EnvironmentModel).filter_by(id=env.id).first()
|
|
if not existing:
|
|
db_env = EnvironmentModel(
|
|
id=env.id,
|
|
name=env.name,
|
|
url=env.url,
|
|
credentials_id=env.id, # Use env.id as credentials reference
|
|
)
|
|
db.add(db_env)
|
|
logger.info(f"[trigger_sync_now][Action] Created environment row for {env.id}")
|
|
else:
|
|
existing.name = env.name
|
|
existing.url = env.url
|
|
db.commit()
|
|
|
|
service = IdMappingService(db)
|
|
results = {"synced": [], "failed": []}
|
|
|
|
for env in environments:
|
|
try:
|
|
client = SupersetClient(env)
|
|
service.sync_environment(env.id, client)
|
|
results["synced"].append(env.id)
|
|
logger.info(f"[trigger_sync_now][Action] Synced environment {env.id}")
|
|
except Exception as e:
|
|
results["failed"].append({"env_id": env.id, "error": str(e)})
|
|
logger.error(f"[trigger_sync_now][Error] Failed to sync {env.id}: {e}")
|
|
|
|
return {
|
|
"status": "completed",
|
|
"synced_count": len(results["synced"]),
|
|
"failed_count": len(results["failed"]),
|
|
"details": results
|
|
}
|
|
# [/DEF:trigger_sync_now:Function]
|
|
|
|
# [/DEF:backend.src.api.routes.migration:Module]
|