328 lines
15 KiB
Python
328 lines
15 KiB
Python
# [DEF:backend.src.api.routes.dashboards:Module]
|
|
#
|
|
# @TIER: STANDARD
|
|
# @SEMANTICS: api, dashboards, resources, hub
|
|
# @PURPOSE: API endpoints for the Dashboard Hub - listing dashboards with Git and task status
|
|
# @LAYER: API
|
|
# @RELATION: DEPENDS_ON -> backend.src.dependencies
|
|
# @RELATION: DEPENDS_ON -> backend.src.services.resource_service
|
|
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
|
|
#
|
|
# @INVARIANT: All dashboard responses include git_status and last_task metadata
|
|
|
|
# [SECTION: IMPORTS]
|
|
from fastapi import APIRouter, Depends, HTTPException
|
|
from typing import List, Optional, Dict
|
|
from pydantic import BaseModel, Field
|
|
from ...dependencies import get_config_manager, get_task_manager, get_resource_service, get_mapping_service, has_permission
|
|
from ...core.logger import logger, belief_scope
|
|
# [/SECTION]
|
|
|
|
router = APIRouter(prefix="/api/dashboards", tags=["Dashboards"])
|
|
|
|
# [DEF:GitStatus:DataClass]
|
|
class GitStatus(BaseModel):
|
|
branch: Optional[str] = None
|
|
sync_status: Optional[str] = Field(None, pattern="^OK|DIFF$")
|
|
# [/DEF:GitStatus:DataClass]
|
|
|
|
# [DEF:LastTask:DataClass]
|
|
class LastTask(BaseModel):
|
|
task_id: Optional[str] = None
|
|
status: Optional[str] = Field(None, pattern="^RUNNING|SUCCESS|ERROR|WAITING_INPUT$")
|
|
# [/DEF:LastTask:DataClass]
|
|
|
|
# [DEF:DashboardItem:DataClass]
|
|
class DashboardItem(BaseModel):
|
|
id: int
|
|
title: str
|
|
slug: Optional[str] = None
|
|
url: Optional[str] = None
|
|
last_modified: Optional[str] = None
|
|
git_status: Optional[GitStatus] = None
|
|
last_task: Optional[LastTask] = None
|
|
# [/DEF:DashboardItem:DataClass]
|
|
|
|
# [DEF:DashboardsResponse:DataClass]
|
|
class DashboardsResponse(BaseModel):
|
|
dashboards: List[DashboardItem]
|
|
total: int
|
|
page: int
|
|
page_size: int
|
|
total_pages: int
|
|
# [/DEF:DashboardsResponse:DataClass]
|
|
|
|
# [DEF:get_dashboards:Function]
|
|
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
|
|
# @PRE: env_id must be a valid environment ID
|
|
# @PRE: page must be >= 1 if provided
|
|
# @PRE: page_size must be between 1 and 100 if provided
|
|
# @POST: Returns a list of dashboards with enhanced metadata and pagination info
|
|
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
|
|
# @PARAM: env_id (str) - The environment ID to fetch dashboards from
|
|
# @PARAM: search (Optional[str]) - Filter by title/slug
|
|
# @PARAM: page (Optional[int]) - Page number (default: 1)
|
|
# @PARAM: page_size (Optional[int]) - Items per page (default: 10, max: 100)
|
|
# @RETURN: DashboardsResponse - List of dashboards with status metadata
|
|
# @RELATION: CALLS -> ResourceService.get_dashboards_with_status
|
|
@router.get("", response_model=DashboardsResponse)
|
|
async def get_dashboards(
|
|
env_id: str,
|
|
search: Optional[str] = None,
|
|
page: int = 1,
|
|
page_size: int = 10,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
resource_service=Depends(get_resource_service),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_dashboards", f"env_id={env_id}, search={search}, page={page}, page_size={page_size}"):
|
|
# Validate pagination parameters
|
|
if page < 1:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page: {page}")
|
|
raise HTTPException(status_code=400, detail="Page must be >= 1")
|
|
if page_size < 1 or page_size > 100:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page_size: {page_size}")
|
|
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
|
|
|
|
# Validate environment exists
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == env_id), None)
|
|
if not env:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Environment not found: {env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
# Get all tasks for status lookup
|
|
all_tasks = task_manager.get_all_tasks()
|
|
|
|
# Fetch dashboards with status using ResourceService
|
|
dashboards = await resource_service.get_dashboards_with_status(env, all_tasks)
|
|
|
|
# Apply search filter if provided
|
|
if search:
|
|
search_lower = search.lower()
|
|
dashboards = [
|
|
d for d in dashboards
|
|
if search_lower in d.get('title', '').lower()
|
|
or search_lower in d.get('slug', '').lower()
|
|
]
|
|
|
|
# Calculate pagination
|
|
total = len(dashboards)
|
|
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
|
start_idx = (page - 1) * page_size
|
|
end_idx = start_idx + page_size
|
|
|
|
# Slice dashboards for current page
|
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
|
|
|
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards (page {page}/{total_pages}, total: {total})")
|
|
|
|
return DashboardsResponse(
|
|
dashboards=paginated_dashboards,
|
|
total=total,
|
|
page=page,
|
|
page_size=page_size,
|
|
total_pages=total_pages
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Failed to fetch dashboards: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}")
|
|
# [/DEF:get_dashboards:Function]
|
|
|
|
# [DEF:MigrateRequest:DataClass]
|
|
class MigrateRequest(BaseModel):
|
|
source_env_id: str = Field(..., description="Source environment ID")
|
|
target_env_id: str = Field(..., description="Target environment ID")
|
|
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to migrate")
|
|
db_mappings: Optional[Dict[str, str]] = Field(None, description="Database mappings for migration")
|
|
replace_db_config: bool = Field(False, description="Replace database configuration")
|
|
# [/DEF:MigrateRequest:DataClass]
|
|
|
|
# [DEF:TaskResponse:DataClass]
|
|
class TaskResponse(BaseModel):
|
|
task_id: str
|
|
# [/DEF:TaskResponse:DataClass]
|
|
|
|
# [DEF:migrate_dashboards:Function]
|
|
# @PURPOSE: Trigger bulk migration of dashboards from source to target environment
|
|
# @PRE: User has permission plugin:migration:execute
|
|
# @PRE: source_env_id and target_env_id are valid environment IDs
|
|
# @PRE: dashboard_ids is a non-empty list
|
|
# @POST: Returns task_id for tracking migration progress
|
|
# @POST: Task is created and queued for execution
|
|
# @PARAM: request (MigrateRequest) - Migration request with source, target, and dashboard IDs
|
|
# @RETURN: TaskResponse - Task ID for tracking
|
|
# @RELATION: DISPATCHES -> MigrationPlugin
|
|
# @RELATION: CALLS -> task_manager.create_task
|
|
@router.post("/migrate", response_model=TaskResponse)
|
|
async def migrate_dashboards(
|
|
request: MigrateRequest,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("migrate_dashboards", f"source={request.source_env_id}, target={request.target_env_id}, count={len(request.dashboard_ids)}"):
|
|
# Validate request
|
|
if not request.dashboard_ids:
|
|
logger.error("[migrate_dashboards][Coherence:Failed] No dashboard IDs provided")
|
|
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
|
|
|
|
# Validate environments exist
|
|
environments = config_manager.get_environments()
|
|
source_env = next((e for e in environments if e.id == request.source_env_id), None)
|
|
target_env = next((e for e in environments if e.id == request.target_env_id), None)
|
|
|
|
if not source_env:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Source environment not found: {request.source_env_id}")
|
|
raise HTTPException(status_code=404, detail="Source environment not found")
|
|
if not target_env:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Target environment not found: {request.target_env_id}")
|
|
raise HTTPException(status_code=404, detail="Target environment not found")
|
|
|
|
try:
|
|
# Create migration task
|
|
task_params = {
|
|
'source_env_id': request.source_env_id,
|
|
'target_env_id': request.target_env_id,
|
|
'selected_ids': request.dashboard_ids,
|
|
'replace_db_config': request.replace_db_config,
|
|
'db_mappings': request.db_mappings or {}
|
|
}
|
|
|
|
task_obj = await task_manager.create_task(
|
|
plugin_id='superset-migration',
|
|
params=task_params
|
|
)
|
|
|
|
logger.info(f"[migrate_dashboards][Coherence:OK] Migration task created: {task_obj.id} for {len(request.dashboard_ids)} dashboards")
|
|
|
|
return TaskResponse(task_id=str(task_obj.id))
|
|
|
|
except Exception as e:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Failed to create migration task: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to create migration task: {str(e)}")
|
|
# [/DEF:migrate_dashboards:Function]
|
|
|
|
# [DEF:BackupRequest:DataClass]
|
|
class BackupRequest(BaseModel):
|
|
env_id: str = Field(..., description="Environment ID")
|
|
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to backup")
|
|
schedule: Optional[str] = Field(None, description="Cron schedule for recurring backups (e.g., '0 0 * * *')")
|
|
# [/DEF:BackupRequest:DataClass]
|
|
|
|
# [DEF:backup_dashboards:Function]
|
|
# @PURPOSE: Trigger bulk backup of dashboards with optional cron schedule
|
|
# @PRE: User has permission plugin:backup:execute
|
|
# @PRE: env_id is a valid environment ID
|
|
# @PRE: dashboard_ids is a non-empty list
|
|
# @POST: Returns task_id for tracking backup progress
|
|
# @POST: Task is created and queued for execution
|
|
# @POST: If schedule is provided, a scheduled task is created
|
|
# @PARAM: request (BackupRequest) - Backup request with environment and dashboard IDs
|
|
# @RETURN: TaskResponse - Task ID for tracking
|
|
# @RELATION: DISPATCHES -> BackupPlugin
|
|
# @RELATION: CALLS -> task_manager.create_task
|
|
@router.post("/backup", response_model=TaskResponse)
|
|
async def backup_dashboards(
|
|
request: BackupRequest,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("plugin:backup", "EXECUTE"))
|
|
):
|
|
with belief_scope("backup_dashboards", f"env={request.env_id}, count={len(request.dashboard_ids)}, schedule={request.schedule}"):
|
|
# Validate request
|
|
if not request.dashboard_ids:
|
|
logger.error("[backup_dashboards][Coherence:Failed] No dashboard IDs provided")
|
|
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
|
|
|
|
# Validate environment exists
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == request.env_id), None)
|
|
|
|
if not env:
|
|
logger.error(f"[backup_dashboards][Coherence:Failed] Environment not found: {request.env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
# Create backup task
|
|
task_params = {
|
|
'env': request.env_id,
|
|
'dashboards': request.dashboard_ids,
|
|
'schedule': request.schedule
|
|
}
|
|
|
|
task_obj = await task_manager.create_task(
|
|
plugin_id='superset-backup',
|
|
params=task_params
|
|
)
|
|
|
|
logger.info(f"[backup_dashboards][Coherence:OK] Backup task created: {task_obj.id} for {len(request.dashboard_ids)} dashboards")
|
|
|
|
return TaskResponse(task_id=str(task_obj.id))
|
|
|
|
except Exception as e:
|
|
logger.error(f"[backup_dashboards][Coherence:Failed] Failed to create backup task: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to create backup task: {str(e)}")
|
|
# [/DEF:backup_dashboards:Function]
|
|
|
|
# [DEF:DatabaseMapping:DataClass]
|
|
class DatabaseMapping(BaseModel):
|
|
source_db: str
|
|
target_db: str
|
|
source_db_uuid: Optional[str] = None
|
|
target_db_uuid: Optional[str] = None
|
|
confidence: float
|
|
# [/DEF:DatabaseMapping:DataClass]
|
|
|
|
# [DEF:DatabaseMappingsResponse:DataClass]
|
|
class DatabaseMappingsResponse(BaseModel):
|
|
mappings: List[DatabaseMapping]
|
|
# [/DEF:DatabaseMappingsResponse:DataClass]
|
|
|
|
# [DEF:get_database_mappings:Function]
|
|
# @PURPOSE: Get database mapping suggestions between source and target environments
|
|
# @PRE: User has permission plugin:migration:read
|
|
# @PRE: source_env_id and target_env_id are valid environment IDs
|
|
# @POST: Returns list of suggested database mappings with confidence scores
|
|
# @PARAM: source_env_id (str) - Source environment ID
|
|
# @PARAM: target_env_id (str) - Target environment ID
|
|
# @RETURN: DatabaseMappingsResponse - List of suggested mappings
|
|
# @RELATION: CALLS -> MappingService.get_suggestions
|
|
@router.get("/db-mappings", response_model=DatabaseMappingsResponse)
|
|
async def get_database_mappings(
|
|
source_env_id: str,
|
|
target_env_id: str,
|
|
mapping_service=Depends(get_mapping_service),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_database_mappings", f"source={source_env_id}, target={target_env_id}"):
|
|
try:
|
|
# Get mapping suggestions using MappingService
|
|
suggestions = await mapping_service.get_suggestions(source_env_id, target_env_id)
|
|
|
|
# Format suggestions as DatabaseMapping objects
|
|
mappings = [
|
|
DatabaseMapping(
|
|
source_db=s.get('source_db', ''),
|
|
target_db=s.get('target_db', ''),
|
|
source_db_uuid=s.get('source_db_uuid'),
|
|
target_db_uuid=s.get('target_db_uuid'),
|
|
confidence=s.get('confidence', 0.0)
|
|
)
|
|
for s in suggestions
|
|
]
|
|
|
|
logger.info(f"[get_database_mappings][Coherence:OK] Returning {len(mappings)} database mapping suggestions")
|
|
|
|
return DatabaseMappingsResponse(mappings=mappings)
|
|
|
|
except Exception as e:
|
|
logger.error(f"[get_database_mappings][Coherence:Failed] Failed to get database mappings: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to get database mappings: {str(e)}")
|
|
# [/DEF:get_database_mappings:Function]
|
|
|
|
# [/DEF:backend.src.api.routes.dashboards:Module]
|