686 lines
30 KiB
Python
686 lines
30 KiB
Python
# [DEF:backend.src.api.routes.dashboards:Module]
|
|
#
|
|
# @TIER: CRITICAL
|
|
# @SEMANTICS: api, dashboards, resources, hub
|
|
# @PURPOSE: API endpoints for the Dashboard Hub - listing dashboards with Git and task status
|
|
# @LAYER: API
|
|
# @RELATION: DEPENDS_ON -> backend.src.dependencies
|
|
# @RELATION: DEPENDS_ON -> backend.src.services.resource_service
|
|
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
|
|
#
|
|
# @INVARIANT: All dashboard responses include git_status and last_task metadata
|
|
#
|
|
# @TEST_CONTRACT: DashboardsAPI -> {
|
|
# required_fields: {env_id: string, page: integer, page_size: integer},
|
|
# optional_fields: {search: string},
|
|
# invariants: ["Pagination must be valid", "Environment must exist"]
|
|
# }
|
|
#
|
|
# @TEST_FIXTURE: dashboard_list_happy -> {
|
|
# "env_id": "prod",
|
|
# "expected_count": 1,
|
|
# "dashboards": [{"id": 1, "title": "Main Revenue"}]
|
|
# }
|
|
#
|
|
# @TEST_EDGE: pagination_zero_page -> {"env_id": "prod", "page": 0, "status": 400}
|
|
# @TEST_EDGE: pagination_oversize -> {"env_id": "prod", "page_size": 101, "status": 400}
|
|
# @TEST_EDGE: missing_env -> {"env_id": "ghost", "status": 404}
|
|
# @TEST_EDGE: empty_dashboards -> {"env_id": "empty_env", "expected_total": 0}
|
|
# @TEST_EDGE: external_superset_failure -> {"env_id": "bad_conn", "status": 503}
|
|
#
|
|
# @TEST_INVARIANT: metadata_consistency -> verifies: [dashboard_list_happy, empty_dashboards]
|
|
#
|
|
|
|
# [SECTION: IMPORTS]
|
|
from fastapi import APIRouter, Depends, HTTPException, Query, Response
|
|
from fastapi.responses import JSONResponse
|
|
from typing import List, Optional, Dict, Any
|
|
import re
|
|
from urllib.parse import urlparse
|
|
from pydantic import BaseModel, Field
|
|
from ...dependencies import get_config_manager, get_task_manager, get_resource_service, get_mapping_service, has_permission
|
|
from ...core.logger import logger, belief_scope
|
|
from ...core.superset_client import SupersetClient
|
|
from ...core.utils.network import DashboardNotFoundError
|
|
from ...services.resource_service import ResourceService
|
|
# [/SECTION]
|
|
|
|
router = APIRouter(prefix="/api/dashboards", tags=["Dashboards"])
|
|
|
|
# [DEF:GitStatus:DataClass]
|
|
class GitStatus(BaseModel):
|
|
branch: Optional[str] = None
|
|
sync_status: Optional[str] = Field(None, pattern="^OK|DIFF|NO_REPO|ERROR$")
|
|
has_repo: Optional[bool] = None
|
|
has_changes_for_commit: Optional[bool] = None
|
|
# [/DEF:GitStatus:DataClass]
|
|
|
|
# [DEF:LastTask:DataClass]
|
|
class LastTask(BaseModel):
|
|
task_id: Optional[str] = None
|
|
status: Optional[str] = Field(
|
|
None,
|
|
pattern="^PENDING|RUNNING|SUCCESS|FAILED|ERROR|AWAITING_INPUT|WAITING_INPUT|AWAITING_MAPPING$",
|
|
)
|
|
validation_status: Optional[str] = Field(None, pattern="^PASS|FAIL|WARN|UNKNOWN$")
|
|
# [/DEF:LastTask:DataClass]
|
|
|
|
# [DEF:DashboardItem:DataClass]
|
|
class DashboardItem(BaseModel):
|
|
id: int
|
|
title: str
|
|
slug: Optional[str] = None
|
|
url: Optional[str] = None
|
|
last_modified: Optional[str] = None
|
|
created_by: Optional[str] = None
|
|
modified_by: Optional[str] = None
|
|
owners: Optional[List[str]] = None
|
|
git_status: Optional[GitStatus] = None
|
|
last_task: Optional[LastTask] = None
|
|
# [/DEF:DashboardItem:DataClass]
|
|
|
|
# [DEF:DashboardsResponse:DataClass]
|
|
class DashboardsResponse(BaseModel):
|
|
dashboards: List[DashboardItem]
|
|
total: int
|
|
page: int
|
|
page_size: int
|
|
total_pages: int
|
|
# [/DEF:DashboardsResponse:DataClass]
|
|
|
|
# [DEF:DashboardChartItem:DataClass]
|
|
class DashboardChartItem(BaseModel):
|
|
id: int
|
|
title: str
|
|
viz_type: Optional[str] = None
|
|
dataset_id: Optional[int] = None
|
|
last_modified: Optional[str] = None
|
|
overview: Optional[str] = None
|
|
# [/DEF:DashboardChartItem:DataClass]
|
|
|
|
# [DEF:DashboardDatasetItem:DataClass]
|
|
class DashboardDatasetItem(BaseModel):
|
|
id: int
|
|
table_name: str
|
|
schema: Optional[str] = None
|
|
database: str
|
|
last_modified: Optional[str] = None
|
|
overview: Optional[str] = None
|
|
# [/DEF:DashboardDatasetItem:DataClass]
|
|
|
|
# [DEF:DashboardDetailResponse:DataClass]
|
|
class DashboardDetailResponse(BaseModel):
|
|
id: int
|
|
title: str
|
|
slug: Optional[str] = None
|
|
url: Optional[str] = None
|
|
description: Optional[str] = None
|
|
last_modified: Optional[str] = None
|
|
published: Optional[bool] = None
|
|
charts: List[DashboardChartItem]
|
|
datasets: List[DashboardDatasetItem]
|
|
chart_count: int
|
|
dataset_count: int
|
|
# [/DEF:DashboardDetailResponse:DataClass]
|
|
|
|
# [DEF:DashboardTaskHistoryItem:DataClass]
|
|
class DashboardTaskHistoryItem(BaseModel):
|
|
id: str
|
|
plugin_id: str
|
|
status: str
|
|
validation_status: Optional[str] = None
|
|
started_at: Optional[str] = None
|
|
finished_at: Optional[str] = None
|
|
env_id: Optional[str] = None
|
|
summary: Optional[str] = None
|
|
# [/DEF:DashboardTaskHistoryItem:DataClass]
|
|
|
|
# [DEF:DashboardTaskHistoryResponse:DataClass]
|
|
class DashboardTaskHistoryResponse(BaseModel):
|
|
dashboard_id: int
|
|
items: List[DashboardTaskHistoryItem]
|
|
# [/DEF:DashboardTaskHistoryResponse:DataClass]
|
|
|
|
# [DEF:DatabaseMapping:DataClass]
|
|
class DatabaseMapping(BaseModel):
|
|
source_db: str
|
|
target_db: str
|
|
source_db_uuid: Optional[str] = None
|
|
target_db_uuid: Optional[str] = None
|
|
confidence: float
|
|
# [/DEF:DatabaseMapping:DataClass]
|
|
|
|
# [DEF:DatabaseMappingsResponse:DataClass]
|
|
class DatabaseMappingsResponse(BaseModel):
|
|
mappings: List[DatabaseMapping]
|
|
# [/DEF:DatabaseMappingsResponse:DataClass]
|
|
|
|
# [DEF:get_dashboards:Function]
|
|
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
|
|
# @PRE: env_id must be a valid environment ID
|
|
# @PRE: page must be >= 1 if provided
|
|
# @PRE: page_size must be between 1 and 100 if provided
|
|
# @POST: Returns a list of dashboards with enhanced metadata and pagination info
|
|
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
|
|
# @PARAM: env_id (str) - The environment ID to fetch dashboards from
|
|
# @PARAM: search (Optional[str]) - Filter by title/slug
|
|
# @PARAM: page (Optional[int]) - Page number (default: 1)
|
|
# @PARAM: page_size (Optional[int]) - Items per page (default: 10, max: 100)
|
|
# @RETURN: DashboardsResponse - List of dashboards with status metadata
|
|
# @RELATION: CALLS -> ResourceService.get_dashboards_with_status
|
|
@router.get("", response_model=DashboardsResponse)
|
|
async def get_dashboards(
|
|
env_id: str,
|
|
search: Optional[str] = None,
|
|
page: int = 1,
|
|
page_size: int = 10,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
resource_service=Depends(get_resource_service),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_dashboards", f"env_id={env_id}, search={search}, page={page}, page_size={page_size}"):
|
|
# Validate pagination parameters
|
|
if page < 1:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page: {page}")
|
|
raise HTTPException(status_code=400, detail="Page must be >= 1")
|
|
if page_size < 1 or page_size > 100:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page_size: {page_size}")
|
|
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
|
|
|
|
# Validate environment exists
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == env_id), None)
|
|
if not env:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Environment not found: {env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
# Get all tasks for status lookup
|
|
all_tasks = task_manager.get_all_tasks()
|
|
|
|
# Fast path: real ResourceService -> one Superset page call per API request.
|
|
if isinstance(resource_service, ResourceService):
|
|
try:
|
|
page_payload = await resource_service.get_dashboards_page_with_status(
|
|
env,
|
|
all_tasks,
|
|
page=page,
|
|
page_size=page_size,
|
|
search=search,
|
|
include_git_status=False,
|
|
)
|
|
paginated_dashboards = page_payload["dashboards"]
|
|
total = page_payload["total"]
|
|
total_pages = page_payload["total_pages"]
|
|
except Exception as page_error:
|
|
logger.warning(
|
|
"[get_dashboards][Action] Page-based fetch failed; using compatibility fallback: %s",
|
|
page_error,
|
|
)
|
|
dashboards = await resource_service.get_dashboards_with_status(
|
|
env,
|
|
all_tasks,
|
|
include_git_status=False,
|
|
)
|
|
|
|
if search:
|
|
search_lower = search.lower()
|
|
dashboards = [
|
|
d for d in dashboards
|
|
if search_lower in d.get('title', '').lower()
|
|
or search_lower in d.get('slug', '').lower()
|
|
]
|
|
|
|
total = len(dashboards)
|
|
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
|
start_idx = (page - 1) * page_size
|
|
end_idx = start_idx + page_size
|
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
|
else:
|
|
# Compatibility path for mocked services in route tests.
|
|
dashboards = await resource_service.get_dashboards_with_status(
|
|
env,
|
|
all_tasks,
|
|
include_git_status=False,
|
|
)
|
|
|
|
if search:
|
|
search_lower = search.lower()
|
|
dashboards = [
|
|
d for d in dashboards
|
|
if search_lower in d.get('title', '').lower()
|
|
or search_lower in d.get('slug', '').lower()
|
|
]
|
|
|
|
total = len(dashboards)
|
|
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
|
start_idx = (page - 1) * page_size
|
|
end_idx = start_idx + page_size
|
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
|
|
|
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards (page {page}/{total_pages}, total: {total})")
|
|
|
|
return DashboardsResponse(
|
|
dashboards=paginated_dashboards,
|
|
total=total,
|
|
page=page,
|
|
page_size=page_size,
|
|
total_pages=total_pages
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"[get_dashboards][Coherence:Failed] Failed to fetch dashboards: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}")
|
|
# [/DEF:get_dashboards:Function]
|
|
|
|
# [DEF:get_database_mappings:Function]
|
|
# @PURPOSE: Get database mapping suggestions between source and target environments
|
|
# @PRE: User has permission plugin:migration:read
|
|
# @PRE: source_env_id and target_env_id are valid environment IDs
|
|
# @POST: Returns list of suggested database mappings with confidence scores
|
|
# @PARAM: source_env_id (str) - Source environment ID
|
|
# @PARAM: target_env_id (str) - Target environment ID
|
|
# @RETURN: DatabaseMappingsResponse - List of suggested mappings
|
|
# @RELATION: CALLS -> MappingService.get_suggestions
|
|
@router.get("/db-mappings", response_model=DatabaseMappingsResponse)
|
|
async def get_database_mappings(
|
|
source_env_id: str,
|
|
target_env_id: str,
|
|
config_manager=Depends(get_config_manager),
|
|
mapping_service=Depends(get_mapping_service),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_database_mappings", f"source={source_env_id}, target={target_env_id}"):
|
|
# Validate environments exist
|
|
environments = config_manager.get_environments()
|
|
source_env = next((e for e in environments if e.id == source_env_id), None)
|
|
target_env = next((e for e in environments if e.id == target_env_id), None)
|
|
|
|
if not source_env:
|
|
logger.error(f"[get_database_mappings][Coherence:Failed] Source environment not found: {source_env_id}")
|
|
raise HTTPException(status_code=404, detail="Source environment not found")
|
|
if not target_env:
|
|
logger.error(f"[get_database_mappings][Coherence:Failed] Target environment not found: {target_env_id}")
|
|
raise HTTPException(status_code=404, detail="Target environment not found")
|
|
|
|
try:
|
|
# Get mapping suggestions using MappingService
|
|
suggestions = await mapping_service.get_suggestions(source_env_id, target_env_id)
|
|
|
|
# Format suggestions as DatabaseMapping objects
|
|
mappings = [
|
|
DatabaseMapping(
|
|
source_db=s.get('source_db', ''),
|
|
target_db=s.get('target_db', ''),
|
|
source_db_uuid=s.get('source_db_uuid'),
|
|
target_db_uuid=s.get('target_db_uuid'),
|
|
confidence=s.get('confidence', 0.0)
|
|
)
|
|
for s in suggestions
|
|
]
|
|
|
|
logger.info(f"[get_database_mappings][Coherence:OK] Returning {len(mappings)} database mapping suggestions")
|
|
|
|
return DatabaseMappingsResponse(mappings=mappings)
|
|
|
|
except Exception as e:
|
|
logger.error(f"[get_database_mappings][Coherence:Failed] Failed to get database mappings: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to get database mappings: {str(e)}")
|
|
# [/DEF:get_database_mappings:Function]
|
|
|
|
# [DEF:get_dashboard_detail:Function]
|
|
# @PURPOSE: Fetch detailed dashboard info with related charts and datasets
|
|
# @PRE: env_id must be valid and dashboard_id must exist
|
|
# @POST: Returns dashboard detail payload for overview page
|
|
# @RELATION: CALLS -> SupersetClient.get_dashboard_detail
|
|
@router.get("/{dashboard_id:int}", response_model=DashboardDetailResponse)
|
|
async def get_dashboard_detail(
|
|
dashboard_id: int,
|
|
env_id: str,
|
|
config_manager=Depends(get_config_manager),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_dashboard_detail", f"dashboard_id={dashboard_id}, env_id={env_id}"):
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == env_id), None)
|
|
if not env:
|
|
logger.error(f"[get_dashboard_detail][Coherence:Failed] Environment not found: {env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
client = SupersetClient(env)
|
|
detail = client.get_dashboard_detail(dashboard_id)
|
|
logger.info(
|
|
f"[get_dashboard_detail][Coherence:OK] Dashboard {dashboard_id}: {detail.get('chart_count', 0)} charts, {detail.get('dataset_count', 0)} datasets"
|
|
)
|
|
return DashboardDetailResponse(**detail)
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"[get_dashboard_detail][Coherence:Failed] Failed to fetch dashboard detail: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard detail: {str(e)}")
|
|
# [/DEF:get_dashboard_detail:Function]
|
|
|
|
|
|
# [DEF:_task_matches_dashboard:Function]
|
|
# @PURPOSE: Checks whether task params are tied to a specific dashboard and environment.
|
|
# @PRE: task-like object exposes plugin_id and params fields.
|
|
# @POST: Returns True only for supported task plugins tied to dashboard_id (+optional env_id).
|
|
def _task_matches_dashboard(task: Any, dashboard_id: int, env_id: Optional[str]) -> bool:
|
|
plugin_id = getattr(task, "plugin_id", None)
|
|
if plugin_id not in {"superset-backup", "llm_dashboard_validation"}:
|
|
return False
|
|
|
|
params = getattr(task, "params", {}) or {}
|
|
dashboard_id_str = str(dashboard_id)
|
|
|
|
if plugin_id == "llm_dashboard_validation":
|
|
task_dashboard_id = params.get("dashboard_id")
|
|
if str(task_dashboard_id) != dashboard_id_str:
|
|
return False
|
|
if env_id:
|
|
task_env = params.get("environment_id")
|
|
return str(task_env) == str(env_id)
|
|
return True
|
|
|
|
# superset-backup can pass dashboards as "dashboard_ids" or "dashboards"
|
|
dashboard_ids = params.get("dashboard_ids") or params.get("dashboards") or []
|
|
normalized_ids = {str(item) for item in dashboard_ids}
|
|
if dashboard_id_str not in normalized_ids:
|
|
return False
|
|
if env_id:
|
|
task_env = params.get("environment_id") or params.get("env")
|
|
return str(task_env) == str(env_id)
|
|
return True
|
|
# [/DEF:_task_matches_dashboard:Function]
|
|
|
|
|
|
# [DEF:get_dashboard_tasks_history:Function]
|
|
# @PURPOSE: Returns history of backup and LLM validation tasks for a dashboard.
|
|
# @PRE: dashboard_id is valid integer.
|
|
# @POST: Response contains sorted task history (newest first).
|
|
@router.get("/{dashboard_id:int}/tasks", response_model=DashboardTaskHistoryResponse)
|
|
async def get_dashboard_tasks_history(
|
|
dashboard_id: int,
|
|
env_id: Optional[str] = None,
|
|
limit: int = Query(20, ge=1, le=100),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("tasks", "READ"))
|
|
):
|
|
with belief_scope("get_dashboard_tasks_history", f"dashboard_id={dashboard_id}, env_id={env_id}, limit={limit}"):
|
|
matching_tasks = []
|
|
for task in task_manager.get_all_tasks():
|
|
if _task_matches_dashboard(task, dashboard_id, env_id):
|
|
matching_tasks.append(task)
|
|
|
|
def _sort_key(task_obj: Any) -> str:
|
|
return (
|
|
str(getattr(task_obj, "started_at", "") or "")
|
|
or str(getattr(task_obj, "finished_at", "") or "")
|
|
)
|
|
|
|
matching_tasks.sort(key=_sort_key, reverse=True)
|
|
selected = matching_tasks[:limit]
|
|
|
|
items = []
|
|
for task in selected:
|
|
result = getattr(task, "result", None)
|
|
summary = None
|
|
validation_status = None
|
|
if isinstance(result, dict):
|
|
raw_validation_status = result.get("status")
|
|
if raw_validation_status is not None:
|
|
validation_status = str(raw_validation_status)
|
|
summary = (
|
|
result.get("summary")
|
|
or result.get("status")
|
|
or result.get("message")
|
|
)
|
|
params = getattr(task, "params", {}) or {}
|
|
items.append(
|
|
DashboardTaskHistoryItem(
|
|
id=str(getattr(task, "id", "")),
|
|
plugin_id=str(getattr(task, "plugin_id", "")),
|
|
status=str(getattr(task, "status", "")),
|
|
validation_status=validation_status,
|
|
started_at=getattr(task, "started_at", None).isoformat() if getattr(task, "started_at", None) else None,
|
|
finished_at=getattr(task, "finished_at", None).isoformat() if getattr(task, "finished_at", None) else None,
|
|
env_id=str(params.get("environment_id") or params.get("env")) if (params.get("environment_id") or params.get("env")) else None,
|
|
summary=summary,
|
|
)
|
|
)
|
|
|
|
logger.info(f"[get_dashboard_tasks_history][Coherence:OK] Found {len(items)} tasks for dashboard {dashboard_id}")
|
|
return DashboardTaskHistoryResponse(dashboard_id=dashboard_id, items=items)
|
|
# [/DEF:get_dashboard_tasks_history:Function]
|
|
|
|
|
|
# [DEF:get_dashboard_thumbnail:Function]
|
|
# @PURPOSE: Proxies Superset dashboard thumbnail with cache support.
|
|
# @PRE: env_id must exist.
|
|
# @POST: Returns image bytes or 202 when thumbnail is being prepared by Superset.
|
|
@router.get("/{dashboard_id:int}/thumbnail")
|
|
async def get_dashboard_thumbnail(
|
|
dashboard_id: int,
|
|
env_id: str,
|
|
force: bool = Query(False),
|
|
config_manager=Depends(get_config_manager),
|
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
):
|
|
with belief_scope("get_dashboard_thumbnail", f"dashboard_id={dashboard_id}, env_id={env_id}, force={force}"):
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == env_id), None)
|
|
if not env:
|
|
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Environment not found: {env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
client = SupersetClient(env)
|
|
digest = None
|
|
thumb_endpoint = None
|
|
|
|
# Preferred flow (newer Superset): ask server to cache screenshot and return digest/image_url.
|
|
try:
|
|
screenshot_payload = client.network.request(
|
|
method="POST",
|
|
endpoint=f"/dashboard/{dashboard_id}/cache_dashboard_screenshot/",
|
|
json={"force": force},
|
|
)
|
|
payload = screenshot_payload.get("result", screenshot_payload) if isinstance(screenshot_payload, dict) else {}
|
|
image_url = payload.get("image_url", "") if isinstance(payload, dict) else ""
|
|
if isinstance(image_url, str) and image_url:
|
|
matched = re.search(r"/dashboard/\d+/(?:thumbnail|screenshot)/([^/]+)/?$", image_url)
|
|
if matched:
|
|
digest = matched.group(1)
|
|
except DashboardNotFoundError:
|
|
logger.warning(
|
|
"[get_dashboard_thumbnail][Fallback] cache_dashboard_screenshot endpoint unavailable, fallback to dashboard.thumbnail_url"
|
|
)
|
|
|
|
# Fallback flow (older Superset): read thumbnail_url from dashboard payload.
|
|
if not digest:
|
|
dashboard_payload = client.network.request(
|
|
method="GET",
|
|
endpoint=f"/dashboard/{dashboard_id}",
|
|
)
|
|
dashboard_data = dashboard_payload.get("result", dashboard_payload) if isinstance(dashboard_payload, dict) else {}
|
|
thumbnail_url = dashboard_data.get("thumbnail_url", "") if isinstance(dashboard_data, dict) else ""
|
|
if isinstance(thumbnail_url, str) and thumbnail_url:
|
|
parsed = urlparse(thumbnail_url)
|
|
parsed_path = parsed.path or thumbnail_url
|
|
if parsed_path.startswith("/api/v1/"):
|
|
parsed_path = parsed_path[len("/api/v1"):]
|
|
thumb_endpoint = parsed_path
|
|
matched = re.search(r"/dashboard/\d+/(?:thumbnail|screenshot)/([^/]+)/?$", parsed_path)
|
|
if matched:
|
|
digest = matched.group(1)
|
|
|
|
if not thumb_endpoint:
|
|
thumb_endpoint = f"/dashboard/{dashboard_id}/thumbnail/{digest or 'latest'}/"
|
|
|
|
thumb_response = client.network.request(
|
|
method="GET",
|
|
endpoint=thumb_endpoint,
|
|
raw_response=True,
|
|
allow_redirects=True,
|
|
)
|
|
|
|
if thumb_response.status_code == 202:
|
|
payload_202: Dict[str, Any] = {}
|
|
try:
|
|
payload_202 = thumb_response.json()
|
|
except Exception:
|
|
payload_202 = {"message": "Thumbnail is being generated"}
|
|
return JSONResponse(status_code=202, content=payload_202)
|
|
|
|
content_type = thumb_response.headers.get("Content-Type", "image/png")
|
|
return Response(content=thumb_response.content, media_type=content_type)
|
|
except DashboardNotFoundError as e:
|
|
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Dashboard not found for thumbnail: {e}")
|
|
raise HTTPException(status_code=404, detail="Dashboard thumbnail not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Failed to fetch dashboard thumbnail: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard thumbnail: {str(e)}")
|
|
# [/DEF:get_dashboard_thumbnail:Function]
|
|
|
|
# [DEF:MigrateRequest:DataClass]
|
|
class MigrateRequest(BaseModel):
|
|
source_env_id: str = Field(..., description="Source environment ID")
|
|
target_env_id: str = Field(..., description="Target environment ID")
|
|
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to migrate")
|
|
db_mappings: Optional[Dict[str, str]] = Field(None, description="Database mappings for migration")
|
|
replace_db_config: bool = Field(False, description="Replace database configuration")
|
|
# [/DEF:MigrateRequest:DataClass]
|
|
|
|
# [DEF:TaskResponse:DataClass]
|
|
class TaskResponse(BaseModel):
|
|
task_id: str
|
|
# [/DEF:TaskResponse:DataClass]
|
|
|
|
# [DEF:migrate_dashboards:Function]
|
|
# @PURPOSE: Trigger bulk migration of dashboards from source to target environment
|
|
# @PRE: User has permission plugin:migration:execute
|
|
# @PRE: source_env_id and target_env_id are valid environment IDs
|
|
# @PRE: dashboard_ids is a non-empty list
|
|
# @POST: Returns task_id for tracking migration progress
|
|
# @POST: Task is created and queued for execution
|
|
# @PARAM: request (MigrateRequest) - Migration request with source, target, and dashboard IDs
|
|
# @RETURN: TaskResponse - Task ID for tracking
|
|
# @RELATION: DISPATCHES -> MigrationPlugin
|
|
# @RELATION: CALLS -> task_manager.create_task
|
|
@router.post("/migrate", response_model=TaskResponse)
|
|
async def migrate_dashboards(
|
|
request: MigrateRequest,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
|
):
|
|
with belief_scope("migrate_dashboards", f"source={request.source_env_id}, target={request.target_env_id}, count={len(request.dashboard_ids)}"):
|
|
# Validate request
|
|
if not request.dashboard_ids:
|
|
logger.error("[migrate_dashboards][Coherence:Failed] No dashboard IDs provided")
|
|
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
|
|
|
|
# Validate environments exist
|
|
environments = config_manager.get_environments()
|
|
source_env = next((e for e in environments if e.id == request.source_env_id), None)
|
|
target_env = next((e for e in environments if e.id == request.target_env_id), None)
|
|
|
|
if not source_env:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Source environment not found: {request.source_env_id}")
|
|
raise HTTPException(status_code=404, detail="Source environment not found")
|
|
if not target_env:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Target environment not found: {request.target_env_id}")
|
|
raise HTTPException(status_code=404, detail="Target environment not found")
|
|
|
|
try:
|
|
# Create migration task
|
|
task_params = {
|
|
'source_env_id': request.source_env_id,
|
|
'target_env_id': request.target_env_id,
|
|
'selected_ids': request.dashboard_ids,
|
|
'replace_db_config': request.replace_db_config,
|
|
'db_mappings': request.db_mappings or {}
|
|
}
|
|
|
|
task_obj = await task_manager.create_task(
|
|
plugin_id='superset-migration',
|
|
params=task_params
|
|
)
|
|
|
|
logger.info(f"[migrate_dashboards][Coherence:OK] Migration task created: {task_obj.id} for {len(request.dashboard_ids)} dashboards")
|
|
|
|
return TaskResponse(task_id=str(task_obj.id))
|
|
|
|
except Exception as e:
|
|
logger.error(f"[migrate_dashboards][Coherence:Failed] Failed to create migration task: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to create migration task: {str(e)}")
|
|
# [/DEF:migrate_dashboards:Function]
|
|
|
|
# [DEF:BackupRequest:DataClass]
|
|
class BackupRequest(BaseModel):
|
|
env_id: str = Field(..., description="Environment ID")
|
|
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to backup")
|
|
schedule: Optional[str] = Field(None, description="Cron schedule for recurring backups (e.g., '0 0 * * *')")
|
|
# [/DEF:BackupRequest:DataClass]
|
|
|
|
# [DEF:backup_dashboards:Function]
|
|
# @PURPOSE: Trigger bulk backup of dashboards with optional cron schedule
|
|
# @PRE: User has permission plugin:backup:execute
|
|
# @PRE: env_id is a valid environment ID
|
|
# @PRE: dashboard_ids is a non-empty list
|
|
# @POST: Returns task_id for tracking backup progress
|
|
# @POST: Task is created and queued for execution
|
|
# @POST: If schedule is provided, a scheduled task is created
|
|
# @PARAM: request (BackupRequest) - Backup request with environment and dashboard IDs
|
|
# @RETURN: TaskResponse - Task ID for tracking
|
|
# @RELATION: DISPATCHES -> BackupPlugin
|
|
# @RELATION: CALLS -> task_manager.create_task
|
|
@router.post("/backup", response_model=TaskResponse)
|
|
async def backup_dashboards(
|
|
request: BackupRequest,
|
|
config_manager=Depends(get_config_manager),
|
|
task_manager=Depends(get_task_manager),
|
|
_ = Depends(has_permission("plugin:backup", "EXECUTE"))
|
|
):
|
|
with belief_scope("backup_dashboards", f"env={request.env_id}, count={len(request.dashboard_ids)}, schedule={request.schedule}"):
|
|
# Validate request
|
|
if not request.dashboard_ids:
|
|
logger.error("[backup_dashboards][Coherence:Failed] No dashboard IDs provided")
|
|
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
|
|
|
|
# Validate environment exists
|
|
environments = config_manager.get_environments()
|
|
env = next((e for e in environments if e.id == request.env_id), None)
|
|
|
|
if not env:
|
|
logger.error(f"[backup_dashboards][Coherence:Failed] Environment not found: {request.env_id}")
|
|
raise HTTPException(status_code=404, detail="Environment not found")
|
|
|
|
try:
|
|
# Create backup task
|
|
task_params = {
|
|
'env': request.env_id,
|
|
'dashboards': request.dashboard_ids,
|
|
'schedule': request.schedule
|
|
}
|
|
|
|
task_obj = await task_manager.create_task(
|
|
plugin_id='superset-backup',
|
|
params=task_params
|
|
)
|
|
|
|
logger.info(f"[backup_dashboards][Coherence:OK] Backup task created: {task_obj.id} for {len(request.dashboard_ids)} dashboards")
|
|
|
|
return TaskResponse(task_id=str(task_obj.id))
|
|
|
|
except Exception as e:
|
|
logger.error(f"[backup_dashboards][Coherence:Failed] Failed to create backup task: {e}")
|
|
raise HTTPException(status_code=503, detail=f"Failed to create backup task: {str(e)}")
|
|
# [/DEF:backup_dashboards:Function]
|
|
|
|
# [/DEF:backend.src.api.routes.dashboards:Module]
|